blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
โ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
โ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
โ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e58223496a4579245283ccdb1478dd5d10774f3b | 81d635211686b1bc87af5892bd9e0fb95cc2ddb8 | /adwords api/googleads-python-lib-master/examples/dfp/v201511/report_service/run_sales_report.py | b3ab57226822fd2a475aea1e43470dcd44e9a4d3 | [
"Apache-2.0"
]
| permissive | analyticsbot/Python-Code---Part-2 | de2f0581258b6c8b8808b4ef2884fe7e323876f0 | 12bdcfdef4472bcedc77ae61707c25a4a09cba8a | refs/heads/master | 2021-06-04T05:10:33.185766 | 2016-08-31T13:45:45 | 2016-08-31T13:45:45 | 66,679,512 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,017 | py | #!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example runs a report equal to the "Sales by salespersons report."
"""
import tempfile
# Import appropriate modules from the client library.
from googleads import dfp
from googleads import errors
def main(client):
# Initialize a DataDownloader.
report_downloader = client.GetDataDownloader(version='v201511')
# Create report job.
report_job = {
'reportQuery': {
'dimensions': ['SALESPERSON_ID', 'SALESPERSON_NAME'],
'columns': ['AD_SERVER_IMPRESSIONS', 'AD_SERVER_CPM_AND_CPC_REVENUE',
'AD_SERVER_WITHOUT_CPD_AVERAGE_ECPM'],
'dateRangeType': 'LAST_MONTH'
}
}
try:
# Run the report and wait for it to finish.
report_job_id = report_downloader.WaitForReport(report_job)
except errors.DfpReportError, e:
print 'Failed to generate report. Error was: %s' % e
# Change to your preferred export format.
export_format = 'CSV_DUMP'
report_file = tempfile.NamedTemporaryFile(suffix='.csv.gz', delete=False)
# Download report data.
report_downloader.DownloadReportToFile(
report_job_id, export_format, report_file)
report_file.close()
# Display results.
print 'Report job with id \'%s\' downloaded to:\n%s' % (
report_job_id, report_file.name)
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client)
| [
"[email protected]"
]
| |
befe9d9fe8a23849fe477c8c46be22c914bcb4d9 | 2d0bada349646b801a69c542407279cc7bc25013 | /src/vai_quantizer/vai_q_pytorch/example/pruning/torch/cifar10_mynet_pruning.py | 755d8ac60ac2669fec756a4529d43f399c7326a7 | [
"Apache-2.0",
"BSD-2-Clause",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"BSD-3-Clause-Open-MPI",
"LicenseRef-scancode-free-unknown",
"Libtool-exception",
"GCC-exception-3.1",
"LicenseRef-scancode-mit-old-style",
"OFL-1.1",
"JSON",
"LGPL-2.1-only",
"LGPL-2.0-or-later",
"ICU",
"LicenseRef-scancode-other-permissive",
"GPL-2.0-or-later",
"GPL-3.0-only",
"LicenseRef-scancode-issl-2018",
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-unicode",
"LGPL-3.0-only",
"LicenseRef-scancode-warranty-disclaimer",
"GPL-3.0-or-later",
"Zlib",
"BSD-Source-Code",
"ClArtistic",
"LicenseRef-scancode-unknown-license-reference",
"ISC",
"NCSA",
"LicenseRef-scancode-proprietary-license",
"GPL-2.0-only",
"CC-BY-4.0",
"FSFULLR",
"Minpack",
"Unlicense",
"BSL-1.0",
"NAIST-2003",
"LicenseRef-scancode-protobuf",
"LicenseRef-scancode-public-domain",
"Libpng",
"Spencer-94",
"Intel",
"GPL-1.0-or-later",
"MPL-2.0"
]
| permissive | Xilinx/Vitis-AI | 31e664f7adff0958bb7d149883ab9c231efb3541 | f74ddc6ed086ba949b791626638717e21505dba2 | refs/heads/master | 2023-08-31T02:44:51.029166 | 2023-07-27T06:50:28 | 2023-07-27T06:50:28 | 215,649,623 | 1,283 | 683 | Apache-2.0 | 2023-08-17T09:24:55 | 2019-10-16T21:41:54 | Python | UTF-8 | Python | false | false | 10,659 | py | import argparse
import os
import time
import torch
import torch.nn as nn
import torchvision.datasets as datasets
import torchvision.transforms as transforms
from pytorch_nndct import get_pruning_runner
parser = argparse.ArgumentParser()
parser.add_argument(
'--method', type=str, default='iterative', help='iterative/one_step')
parser.add_argument(
'--lr', type=float, default=1e-3, help='Initial learning rate')
parser.add_argument(
'--sparsity', type=float, default=0.5, help='Sparsity ratio')
parser.add_argument(
'--ana_subset_len',
type=int,
default=2000,
help='Subset length for evaluating model in analysis, using the whole validation dataset if it is not set'
)
parser.add_argument(
'--num_subnet', type=int, default=20, help='Total number of subnet')
parser.add_argument('--epoches', type=int, default=1, help='Train epoch')
parser.add_argument(
'--pretrained',
type=str,
default='mynet.pth',
help='Pretrained model filepath')
parser.add_argument(
'--data_dir',
type=str,
default='./cifar10',
help='Dataset directory')
parser.add_argument(
'--num_workers',
type=int,
default=64,
help='Number of workers used in dataloading')
parser.add_argument('--batch_size', type=int, default=128, help='Batch size')
parser.add_argument(
'--weight_decay', type=float, default=1e-4, help='Weight decay')
parser.add_argument('--momentum', type=float, default=0.9, help='Momentum')
args, _ = parser.parse_known_args()
class MyNet(nn.Module):
def __init__(self):
super(MyNet, self).__init__()
self.conv1 = nn.Conv2d(3, 32, 3)
self.bn1 = nn.BatchNorm2d(32)
self.relu1 = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32, 128, 3, stride=2)
self.bn2 = nn.BatchNorm2d(128)
self.relu2 = nn.ReLU(inplace=True)
self.conv3 = nn.Conv2d(128, 256, 3)
self.bn3 = nn.BatchNorm2d(256)
self.relu3 = nn.ReLU(inplace=True)
self.conv4 = nn.Conv2d(256, 512, 3)
self.bn4 = nn.BatchNorm2d(512)
self.relu4 = nn.ReLU(inplace=True)
self.avgpool1 = nn.AdaptiveAvgPool2d((1,1))
self.fc1 = nn.Linear(512, 32)
self.fc = nn.Linear(32, 10)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu1(x)
x = self.conv2(x)
x = self.bn2(x)
x = self.relu2(x)
x = self.conv3(x)
x = self.bn3(x)
x = self.relu3(x)
x = self.conv4(x)
x = self.bn4(x)
x = self.relu4(x)
x = self.avgpool1(x)
x = torch.flatten(x, 1)
x = self.fc1(x)
x = self.fc(x)
return x
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self, name, fmt=':f'):
self.name = name
self.fmt = fmt
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
def __str__(self):
fmtstr = '{name} {val' + self.fmt + '} ({avg' + self.fmt + '})'
return fmtstr.format(**self.__dict__)
class ProgressMeter(object):
def __init__(self, num_batches, meters, prefix=""):
self.batch_fmtstr = self._get_batch_fmtstr(num_batches)
self.meters = meters
self.prefix = prefix
def display(self, batch):
entries = [self.prefix + self.batch_fmtstr.format(batch)]
entries += [str(meter) for meter in self.meters]
print('\t'.join(entries))
def _get_batch_fmtstr(self, num_batches):
num_digits = len(str(num_batches // 1))
fmt = '{:' + str(num_digits) + 'd}'
return '[' + fmt + '/' + fmt.format(num_batches) + ']'
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions
for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].flatten().float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
def adjust_learning_rate(optimizer, epoch, lr):
"""Sets the learning rate to the initial LR decayed by every 2 epochs"""
lr = lr * (0.1**(epoch // 2))
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def train(train_loader, model, criterion, optimizer, epoch):
batch_time = AverageMeter('Time', ':6.3f')
data_time = AverageMeter('Data', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
progress = ProgressMeter(
len(train_loader), [batch_time, data_time, losses, top1, top5],
prefix="Epoch: [{}]".format(epoch))
# switch to train mode
model.train()
end = time.time()
for i, (images, target) in enumerate(train_loader):
# measure data loading time
data_time.update(time.time() - end)
model = model.cuda()
images = images.cuda()
target = target.cuda()
# compute output
output = model(images)
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), images.size(0))
top1.update(acc1[0], images.size(0))
top5.update(acc5[0], images.size(0))
# compute gradient and do SGD step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % 10 == 0:
progress.display(i)
def eval_fn(model, dataloader):
top1 = AverageMeter('Acc@1', ':6.2f')
model.eval()
with torch.no_grad():
for i, (images, targets) in enumerate(dataloader):
images = images.cuda()
targets = targets.cuda()
outputs = model(images)
acc1, _ = accuracy(outputs, targets, topk=(1, 5))
top1.update(acc1[0], images.size(0))
return top1.avg
def calibration_fn(model, dataloader, number_forward=100):
model.train()
print("Adaptive BN atart...")
with torch.no_grad():
for index, (images, target) in enumerate(dataloader):
images = images.cuda()
model(images)
if index > number_forward:
break
print("Adaptive BN end...")
def evaluate(dataloader, model, criterion):
batch_time = AverageMeter('Time', ':6.3f')
losses = AverageMeter('Loss', ':.4e')
top1 = AverageMeter('Acc@1', ':6.2f')
top5 = AverageMeter('Acc@5', ':6.2f')
progress = ProgressMeter(
len(val_loader), [batch_time, losses, top1, top5], prefix='Test: ')
# switch to evaluate mode
model.eval()
with torch.no_grad():
end = time.time()
for i, (images, target) in enumerate(dataloader):
model = model.cuda()
images = images.cuda(non_blocking=True)
target = target.cuda(non_blocking=True)
# compute output
output = model(images)
loss = criterion(output, target)
# measure accuracy and record loss
acc1, acc5 = accuracy(output, target, topk=(1, 5))
losses.update(loss.item(), images.size(0))
top1.update(acc1[0], images.size(0))
top5.update(acc5[0], images.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % 50 == 0:
progress.display(i)
# TODO: this should also be done with the ProgressMeter
print(' * Acc@1 {top1.avg:.3f} Acc@5 {top5.avg:.3f}'.format(
top1=top1, top5=top5))
return top1.avg, top5.avg
def load_weights(model, model_path):
checkpoint = torch.load(model_path)
model.load_state_dict(checkpoint)
return model
def get_dataloader():
transform = transforms.Compose(
[transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5))])
train_dataset = datasets.CIFAR10(root=args.data_dir, train=True, download=True, transform=transform)
train_loader = torch.utils.data.DataLoader(train_dataset, batch_size=args.batch_size, shuffle=True, num_workers=args.num_workers)
val_dataset = datasets.CIFAR10(root=args.data_dir, train=False, download=True, transform=transform)
val_loader = torch.utils.data.DataLoader(val_dataset, batch_size=args.batch_size, shuffle=False, num_workers=args.num_workers)
ana_dataset = torch.utils.data.Subset(val_dataset,
list(range(args.ana_subset_len)))
ana_loader = torch.utils.data.DataLoader(
ana_dataset,
batch_size=args.batch_size,
shuffle=False,
num_workers=args.num_workers,
pin_memory=True)
return train_loader, val_loader, ana_loader
if __name__ == '__main__':
model = MyNet()
model.cuda()
train_loader, val_loader, ana_loader = get_dataloader()
criterion = torch.nn.CrossEntropyLoss().cuda()
if os.path.exists(args.pretrained):
ckpt = torch.load(args.pretrained)
model.load_state_dict(ckpt)
acc1, acc5 = evaluate(val_loader, model, criterion)
else:
optimizer = torch.optim.Adam(
model.parameters(), args.lr, weight_decay=args.weight_decay)
best_acc1 = 0
for epoch in range(args.epoches):
#adjust_learning_rate(optimizer, epoch, args.lr)
train(train_loader, model, criterion, optimizer, epoch)
acc1, acc5 = evaluate(val_loader, model, criterion)
if acc1 > best_acc1:
best_acc1 = acc1
torch.save(model.state_dict(), args.pretrained)
input_signature = torch.randn([1, 3, 32, 32], dtype=torch.float32)
input_signature = input_signature.cuda()
pruning_runner = get_pruning_runner(model, input_signature, args.method)
if args.method == 'iterative':
pruning_runner.ana(eval_fn, args=(val_loader,), gpus=['0'])
model = pruning_runner.prune(removal_ratio=args.sparsity, mode='sparse')
elif args.method == 'one_step':
pruning_runner.search(
gpus=['0'],
calibration_fn=calibration_fn,
calib_args=(val_loader,),
eval_fn=eval_fn,
eval_args=(val_loader,),
num_subnet=args.num_subnet,
removal_ratio=args.sparsity)
# index=None: select optimal subnet automatically
model = pruning_runner.prune(removal_ratio=args.sparsity, index=None, mode='slim')
optimizer = torch.optim.Adam(
model.parameters(), args.lr, weight_decay=args.weight_decay)
best_acc1 = 0
for epoch in range(args.epoches):
train(train_loader, model, criterion, optimizer, epoch)
acc1, acc5 = evaluate(val_loader, model, criterion)
# remember best acc@1 and save checkpoint
is_best = acc1 > best_acc1
best_acc1 = max(acc1, best_acc1)
if is_best:
torch.save(model.state_dict(), 'mynet_pruned.pth')
| [
"[email protected]"
]
| |
260bf1b93bcd27bc0cb7694536a4ba9c217b4072 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/104/usersdata/228/50746/submittedfiles/av1_2.py | bae0b9ed8cdec01e64f282f927ac05793347dc28 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 470 | py | # -*- coding: utf-8 -*-
import math
n=int(input('digite a dimensรฃo da barra de chocolate seu cuzรฃo:'))
x1=int(input('digite a coordenada x da figura 1:'))
y1=int(input('digite a coordenada y da figura 1:'))
x2=int(input('digite a coordenada x da figura 2:'))
y2=int(input('digite a coordenada y da figura 2:'))
if x1<=(n/2) and y1<=(n/2) and x2>=(n/2) and y2>=(n/2) or x2<=(n/2) and y2<=(n/2) and x1>=(n/2) and y1>=(n/2):
print('S')
else:
print('N')
| [
"[email protected]"
]
| |
099905020c576120fb6232a703a9de83c5c8002b | a7ded5d3d19a98e61a44189cffe3703f7938e0db | /xero_python/payrollau/models/timesheet.py | 493b8954d2de30837bd208ee908e652e279ab3d5 | [
"MIT"
]
| permissive | liseekeralbert/xero-python | dfd1076344f763d74f81f701e32600cf88bcc7b2 | d27ab1894ecd84d2a9af0ca91583593756b21ab3 | refs/heads/master | 2022-12-16T07:41:14.331308 | 2020-09-18T17:12:35 | 2020-09-18T17:12:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,740 | py | # coding: utf-8
"""
Xero Payroll AU
This is the Xero Payroll API for orgs in Australia region. # noqa: E501
OpenAPI spec version: 2.3.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class Timesheet(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"employee_id": "str",
"start_date": "date[ms-format]",
"end_date": "date[ms-format]",
"status": "TimesheetStatus",
"hours": "float",
"timesheet_id": "str",
"timesheet_lines": "list[TimesheetLine]",
"updated_date_utc": "datetime[ms-format]",
"validation_errors": "list[ValidationError]",
}
attribute_map = {
"employee_id": "EmployeeID",
"start_date": "StartDate",
"end_date": "EndDate",
"status": "Status",
"hours": "Hours",
"timesheet_id": "TimesheetID",
"timesheet_lines": "TimesheetLines",
"updated_date_utc": "UpdatedDateUTC",
"validation_errors": "ValidationErrors",
}
def __init__(
self,
employee_id=None,
start_date=None,
end_date=None,
status=None,
hours=None,
timesheet_id=None,
timesheet_lines=None,
updated_date_utc=None,
validation_errors=None,
): # noqa: E501
"""Timesheet - a model defined in OpenAPI""" # noqa: E501
self._employee_id = None
self._start_date = None
self._end_date = None
self._status = None
self._hours = None
self._timesheet_id = None
self._timesheet_lines = None
self._updated_date_utc = None
self._validation_errors = None
self.discriminator = None
self.employee_id = employee_id
self.start_date = start_date
self.end_date = end_date
if status is not None:
self.status = status
if hours is not None:
self.hours = hours
if timesheet_id is not None:
self.timesheet_id = timesheet_id
if timesheet_lines is not None:
self.timesheet_lines = timesheet_lines
if updated_date_utc is not None:
self.updated_date_utc = updated_date_utc
if validation_errors is not None:
self.validation_errors = validation_errors
@property
def employee_id(self):
"""Gets the employee_id of this Timesheet. # noqa: E501
The Xero identifier for an employee # noqa: E501
:return: The employee_id of this Timesheet. # noqa: E501
:rtype: str
"""
return self._employee_id
@employee_id.setter
def employee_id(self, employee_id):
"""Sets the employee_id of this Timesheet.
The Xero identifier for an employee # noqa: E501
:param employee_id: The employee_id of this Timesheet. # noqa: E501
:type: str
"""
if employee_id is None:
raise ValueError(
"Invalid value for `employee_id`, must not be `None`"
) # noqa: E501
self._employee_id = employee_id
@property
def start_date(self):
"""Gets the start_date of this Timesheet. # noqa: E501
Period start date (YYYY-MM-DD) # noqa: E501
:return: The start_date of this Timesheet. # noqa: E501
:rtype: date
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""Sets the start_date of this Timesheet.
Period start date (YYYY-MM-DD) # noqa: E501
:param start_date: The start_date of this Timesheet. # noqa: E501
:type: date
"""
if start_date is None:
raise ValueError(
"Invalid value for `start_date`, must not be `None`"
) # noqa: E501
self._start_date = start_date
@property
def end_date(self):
"""Gets the end_date of this Timesheet. # noqa: E501
Period end date (YYYY-MM-DD) # noqa: E501
:return: The end_date of this Timesheet. # noqa: E501
:rtype: date
"""
return self._end_date
@end_date.setter
def end_date(self, end_date):
"""Sets the end_date of this Timesheet.
Period end date (YYYY-MM-DD) # noqa: E501
:param end_date: The end_date of this Timesheet. # noqa: E501
:type: date
"""
if end_date is None:
raise ValueError(
"Invalid value for `end_date`, must not be `None`"
) # noqa: E501
self._end_date = end_date
@property
def status(self):
"""Gets the status of this Timesheet. # noqa: E501
:return: The status of this Timesheet. # noqa: E501
:rtype: TimesheetStatus
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this Timesheet.
:param status: The status of this Timesheet. # noqa: E501
:type: TimesheetStatus
"""
self._status = status
@property
def hours(self):
"""Gets the hours of this Timesheet. # noqa: E501
Timesheet total hours # noqa: E501
:return: The hours of this Timesheet. # noqa: E501
:rtype: float
"""
return self._hours
@hours.setter
def hours(self, hours):
"""Sets the hours of this Timesheet.
Timesheet total hours # noqa: E501
:param hours: The hours of this Timesheet. # noqa: E501
:type: float
"""
self._hours = hours
@property
def timesheet_id(self):
"""Gets the timesheet_id of this Timesheet. # noqa: E501
The Xero identifier for a Payroll Timesheet # noqa: E501
:return: The timesheet_id of this Timesheet. # noqa: E501
:rtype: str
"""
return self._timesheet_id
@timesheet_id.setter
def timesheet_id(self, timesheet_id):
"""Sets the timesheet_id of this Timesheet.
The Xero identifier for a Payroll Timesheet # noqa: E501
:param timesheet_id: The timesheet_id of this Timesheet. # noqa: E501
:type: str
"""
self._timesheet_id = timesheet_id
@property
def timesheet_lines(self):
"""Gets the timesheet_lines of this Timesheet. # noqa: E501
:return: The timesheet_lines of this Timesheet. # noqa: E501
:rtype: list[TimesheetLine]
"""
return self._timesheet_lines
@timesheet_lines.setter
def timesheet_lines(self, timesheet_lines):
"""Sets the timesheet_lines of this Timesheet.
:param timesheet_lines: The timesheet_lines of this Timesheet. # noqa: E501
:type: list[TimesheetLine]
"""
self._timesheet_lines = timesheet_lines
@property
def updated_date_utc(self):
"""Gets the updated_date_utc of this Timesheet. # noqa: E501
Last modified timestamp # noqa: E501
:return: The updated_date_utc of this Timesheet. # noqa: E501
:rtype: datetime
"""
return self._updated_date_utc
@updated_date_utc.setter
def updated_date_utc(self, updated_date_utc):
"""Sets the updated_date_utc of this Timesheet.
Last modified timestamp # noqa: E501
:param updated_date_utc: The updated_date_utc of this Timesheet. # noqa: E501
:type: datetime
"""
self._updated_date_utc = updated_date_utc
@property
def validation_errors(self):
"""Gets the validation_errors of this Timesheet. # noqa: E501
Displays array of validation error messages from the API # noqa: E501
:return: The validation_errors of this Timesheet. # noqa: E501
:rtype: list[ValidationError]
"""
return self._validation_errors
@validation_errors.setter
def validation_errors(self, validation_errors):
"""Sets the validation_errors of this Timesheet.
Displays array of validation error messages from the API # noqa: E501
:param validation_errors: The validation_errors of this Timesheet. # noqa: E501
:type: list[ValidationError]
"""
self._validation_errors = validation_errors
| [
"[email protected]"
]
| |
26a9cb5f30a3cf4c4bf5cd521eeab51868367484 | eddb9243d65ff6b0c51502c877e91c034ca5b349 | /desktop_new.py | 422713fc6c35f00901b3136bbb7e29a4dbabe62e | []
| no_license | cristianmusic7/Donkey-car-image-recognition | 1a90b1261f78d986171fab5d13b6f75ceba11c63 | e08afc1c250cc563a8a776785c56cb15ef9c3e58 | refs/heads/master | 2020-09-03T00:38:46.930267 | 2019-11-04T03:47:28 | 2019-11-04T03:47:28 | 219,342,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,299 | py | # desktop.py
import asyncio
import aiohttp
import cv2
import json
import numpy as np
import argparse
from imutils.video import FPS
import imutils
from rtcbot import RTCConnection, Gamepad, CVDisplay
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-p", "--prototxt", required=True,
help="path to Caffe 'deploy' prototxt file")
ap.add_argument("-m", "--model", required=True,
help="path to Caffe pre-trained model")
ap.add_argument("-c", "--confidence", type=float, default=0.2,
help="minimum probability to filter weak detections")
args = vars(ap.parse_args())
# load our serialized model from disk
print("[INFO] loading model...")
net = cv2.dnn.readNetFromCaffe(args["prototxt"], args["model"])
print("[INFO] loading model done")
# initialize the list of class labels MobileNet SSD was trained to
# detect, then generate a set of bounding box colors for each class
CLASSES = ["background", "aeroplane", "bicycle", "bird", "boat",
"bottle", "bus", "car", "cat", "chair", "cow", "diningtable",
"dog", "horse", "motorbike", "person", "pottedplant", "sheep",
"sofa", "train", "tvmonitor"]
CONSIDER = set(["dog", "person", "car"])
objCount = {obj: 0 for obj in CONSIDER}
disp = CVDisplay()
#g = Gamepad()
conn = RTCConnection()
fps = FPS().start()
async def fpsCheck():
while True:
await asyncio.sleep(5)
if fps:
fps.stop()
print("[INFO] elasped time: {:.2f}".format(fps.elapsed()))
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
@conn.video.subscribe
def onFrame(frame):
# Show a 4x larger image so that it is easy to see
#resized = cv2.resize(frame, (frame.shape[1], frame.shape[0]))
#resized = imutils.resize(frame, width=400)
fps.update()
frame = imutils.resize(frame, width=400)
(h, w) = frame.shape[:2]
blob = cv2.dnn.blobFromImage(cv2.resize(frame, (300, 300)),0.007843, (300, 300), 127.5)
# pass the blob through the network and obtain the detections and
# predictions
net.setInput(blob)
detections = net.forward()
# reset the object count for each object in the CONSIDER set
objCount = {obj: 0 for obj in CONSIDER}
# loop over the detections
for i in np.arange(0, detections.shape[2]):
# extract the confidence (i.e., probability) associated with
# the prediction
confidence = detections[0, 0, i, 2]
# filter out weak detections by ensuring the confidence is
# greater than the minimum confidence
if confidence > args["confidence"]:
# extract the index of the class label from the
# detections
idx = int(detections[0, 0, i, 1])
# check to see if the predicted class is in the set of
# classes that need to be considered
if CLASSES[idx] in CONSIDER:
# increment the count of the particular object
# detected in the frame
objCount[CLASSES[idx]] += 1
# compute the (x, y)-coordinates of the bounding box
# for the object
box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
(startX, startY, endX, endY) = box.astype("int")
# draw the bounding box around the detected object on
# the frame
cv2.rectangle(frame, (startX, startY), (endX, endY),
(255, 0, 0), 2)
# draw the object count on the frame
label = ", ".join("{}: {}".format(obj, count) for (obj, count) in objCount.items())
cv2.putText(frame, label, (10, h - 20),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255,0), 2)
#cv2.imshow("Home pet location monitor ({})".format(i), frame)
disp.put_nowait(frame)
async def connect():
localDescription = await conn.getLocalDescription()
async with aiohttp.ClientSession() as session:
async with session.post(
"http://192.168.0.3:8080/connect", data=json.dumps(localDescription)
) as resp:
response = await resp.json()
await conn.setRemoteDescription(response)
# Start sending gamepad controls
#g.subscribe(conn)
asyncio.ensure_future(fpsCheck())
asyncio.ensure_future(connect())
try:
asyncio.get_event_loop().run_forever()
finally:
fps.stop()
conn.close()
disp.close()
#g.close()
| [
"[email protected]"
]
| |
bac6cf0ef69a3305e112f61db28295b9b5055f98 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/CodeJamData/15/02/18.py | 7fff1f128ae1ac791095e13e767772b235e64379 | []
| no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | import sys
import math
t = int(sys.stdin.readline().strip())
for i in range(t):
d = int(sys.stdin.readline().strip())
p = [int(k) for k in sys.stdin.readline().strip().split()]
minm = max(p)
j = 1
while j < minm:
m = 0
for k in range(d):
m += (p[k] + j - 1) / j - 1
minm = min(m + j, minm)
j += 1
print "Case #%d: %d" % (i + 1, minm)
| [
"[email protected]"
]
| |
b97cc92c79952f82fe2dcbc3881bef415eabfae2 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/CISCO-DMN-DSG-PRODUCTION-MIB.py | b5ed607f3ceec754b230a08f00d28aa2ed45f9dd | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 5,400 | py | #
# PySNMP MIB module CISCO-DMN-DSG-PRODUCTION-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-DMN-DSG-PRODUCTION-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:55:07 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
ObjectIdentifier, OctetString, Integer = mibBuilder.importSymbols("ASN1", "ObjectIdentifier", "OctetString", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ValueSizeConstraint, SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ValueSizeConstraint", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection")
ciscoDSGUtilities, = mibBuilder.importSymbols("CISCO-DMN-DSG-ROOT-MIB", "ciscoDSGUtilities")
ObjectGroup, NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "NotificationGroup", "ModuleCompliance")
MibScalar, MibTable, MibTableRow, MibTableColumn, iso, Integer32, Counter32, TimeTicks, Counter64, IpAddress, Bits, ModuleIdentity, NotificationType, Unsigned32, MibIdentifier, Gauge32, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "iso", "Integer32", "Counter32", "TimeTicks", "Counter64", "IpAddress", "Bits", "ModuleIdentity", "NotificationType", "Unsigned32", "MibIdentifier", "Gauge32", "ObjectIdentity")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
ciscoDSGProd = ModuleIdentity((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21))
ciscoDSGProd.setRevisions(('2010-08-24 07:00', '2009-12-20 12:00',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoDSGProd.setRevisionsDescriptions(('V01.00.01 2010-08-24 Added itmes : prodBoardModelNum, prodBoardModelName, prodBoardCatalogNum, prodBoardCustomerCode and prodBoardFpgaType.', 'V01.00.00 2009-12-20 Initial Version.',))
if mibBuilder.loadTexts: ciscoDSGProd.setLastUpdated('201008240700Z')
if mibBuilder.loadTexts: ciscoDSGProd.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoDSGProd.setContactInfo('Cisco Systems, Inc. Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553 NETS E-mail: [email protected]')
if mibBuilder.loadTexts: ciscoDSGProd.setDescription('Cisco Production MIB.')
prodInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1))
prodHostName = MibScalar((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 14))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: prodHostName.setStatus('current')
if mibBuilder.loadTexts: prodHostName.setDescription('User Configurable hostname of this Unit.')
prodTrackingId = MibScalar((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 12))).setMaxAccess("readonly")
if mibBuilder.loadTexts: prodTrackingId.setStatus('current')
if mibBuilder.loadTexts: prodTrackingId.setDescription('Unit Tracking ID.')
prodModelNo = MibScalar((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 49))).setMaxAccess("readonly")
if mibBuilder.loadTexts: prodModelNo.setStatus('current')
if mibBuilder.loadTexts: prodModelNo.setDescription('Model Number.')
prodModelName = MibScalar((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 49))).setMaxAccess("readonly")
if mibBuilder.loadTexts: prodModelName.setStatus('current')
if mibBuilder.loadTexts: prodModelName.setDescription('Model Name.')
prodCatalogNo = MibScalar((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 49))).setMaxAccess("readonly")
if mibBuilder.loadTexts: prodCatalogNo.setStatus('current')
if mibBuilder.loadTexts: prodCatalogNo.setDescription('Catalogue Number.')
prodCustomerCode = MibScalar((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 49))).setMaxAccess("readonly")
if mibBuilder.loadTexts: prodCustomerCode.setStatus('current')
if mibBuilder.loadTexts: prodCustomerCode.setDescription('Customer Code.')
prodLimitVer = MibScalar((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 49))).setMaxAccess("readonly")
if mibBuilder.loadTexts: prodLimitVer.setStatus('current')
if mibBuilder.loadTexts: prodLimitVer.setDescription('Oldest Compatible Application Version.')
prodFPGADesignation = MibScalar((1, 3, 6, 1, 4, 1, 1429, 2, 2, 5, 21, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 0))).setMaxAccess("readonly")
if mibBuilder.loadTexts: prodFPGADesignation.setStatus('current')
if mibBuilder.loadTexts: prodFPGADesignation.setDescription('Production FPGA Designation.')
mibBuilder.exportSymbols("CISCO-DMN-DSG-PRODUCTION-MIB", prodFPGADesignation=prodFPGADesignation, prodHostName=prodHostName, PYSNMP_MODULE_ID=ciscoDSGProd, prodInfo=prodInfo, prodTrackingId=prodTrackingId, prodLimitVer=prodLimitVer, prodModelName=prodModelName, prodCatalogNo=prodCatalogNo, prodCustomerCode=prodCustomerCode, prodModelNo=prodModelNo, ciscoDSGProd=ciscoDSGProd)
| [
"[email protected]"
]
| |
b01ad9c8a93447aaab97d72aebb1b10d25c1222b | 1b862f34c125ce200244dd79e4fda4b5b605ce2e | /.history/images_20210218100216.py | 7b30a0f4a984693ddd55e1ad78ab2b6c5d15a53b | []
| no_license | edwino26/CoreImages | 26085a49cf1cb79442ae563a88354b2fdceace87 | 6bf6e68cac8ab36c87b1e6ea702bfe6882b0f40e | refs/heads/master | 2023-06-22T12:53:37.344895 | 2021-07-21T04:31:44 | 2021-07-21T04:31:44 | 309,553,247 | 0 | 4 | null | 2021-04-29T23:23:15 | 2020-11-03T02:45:07 | Lasso | UTF-8 | Python | false | false | 2,677 | py |
#
# %%
import glob
import cv2
import os.path
import numpy as np
import matplotlib.pyplot as plt
# %%
cores_per_image = 6
uvFiles = glob.glob('./Photos/*.jpg')
fname = uvFiles[0][9:25]
print(fname)
# Picture path
img = cv2.imread(uvFiles[0])
a = []
b = []
# %%
def oneventlbuttondown(event, x, y, flags, param):
if event == cv2.EVENT_LBUTTONDOWN:
xy = "%d,%d" % (x, y)
a.append(x)
b.append(y)
cv2.circle(img, (x, y), 10, (0, 0, 255), thickness=-1)
# cv2.putText(img, xy, (x, y), cv2.FONT_HERSHEY_PLAIN, 1.0, (0, 0, 0), thickness=1)
cv2.imshow("image", img)
core_length = 3
vc = []
do = int(fname[0:3])
dn = int(fname[5:8])
for k in range(len(uvFiles)): #Loop through various files containing images
for i in range(cores_per_image):
if k == 0 and i == 0:
cv2.namedWindow("image", cv2.WINDOW_NORMAL)
# cv2.resizeWindow("output", 400, 300)
cv2.setMouseCallback("image", oneventlbuttondown)
cv2.imshow("image", img)
print(
'Click 1) left upper corner 2) right lower corner in leftmost core and 3) leftupper corner in second core')
cv2.waitKey(0)
y = b[0];
x = a[0];
dy = b[1] - b[0];
dx = a[1] - a[0]
gap = a[2] - a[1]
if i == 3:
midgap = gap * 4
else:
midgap = 0
if i > 0: x = x + (dx + gap) + midgap
crop_img = img[y:y + dy, x:x + dx]
if i == 0:
vc = crop_img
else:
vc = cv2.vconcat([vc, crop_img])
crop_name = str(int(fname[0:3]) + (core_length * i)) + ".jpg"
path = os.path.join(os.path.relpath('Cropped', start=os.curdir), crop_name)
cv2.imwrite(path, crop_img)
concat_name = fname[0:3] + "-" + fname[5:8] + ".jpg"
path = os.path.join(os.path.relpath('Cropped', start=os.curdir), concat_name)
cv2.imwrite(path, vc)
p = vc.shape
vc_gray = cv2.cvtColor(vc, cv2.COLOR_BGR2GRAY)
print(vc.shape) # Dimensions of Image
print(vc_gray.shape) # It is already a numpy array
print(type(vc_gray))
# print(p[:10, :10, 1 ])
img_log = np.average(vc_gray[:, 80:120], axis=1)
depths = np.arange(do, dn, (dn - do) / len(img_log))
plt.figure()
# plt.subplot(1, 2, 1)
plt.subplot2grid((1, 10), (0, 0), colspan=3)
plt.plot(img_log, depths, 'green');
plt.axis([0, 120, do, dn]);
plt.gca().invert_yaxis();
plt.gca().invert_xaxis()
# plt.subplot(1, 2 ,2)
plt.subplot2grid((1, 10), (0, 3), colspan=7)
plt.imshow(vc_gray[:, 40:120], aspect='auto', origin='upper');
plt.colorbar()
p_50 = np.percentile(img_log, 50)
plt.show()
# %%
| [
"[email protected]"
]
| |
cc2dc649da57f9b2130bd313729257af71306cba | e777b4cd72b6f8eb0eb451943edc2b2cac3a367a | /setup.py | 67443f8896345b4dd232e8701ae5f396baad255e | []
| no_license | bannsec/xss_catcher | 3620f523d0a44d54116148d2f70fe7faaef366dd | a2b363c0ff98dfd164903a6b42150ce679488b48 | refs/heads/master | 2021-06-18T19:35:55.816391 | 2017-06-19T00:11:40 | 2017-06-19T00:11:40 | 63,832,134 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,124 | py | # Always prefer setuptools over distutils
from setuptools import setup, find_packages
# To use a consistent encoding
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
long_description = "See website for more info."
setup(
name='xss_catcher',
version='0.0.2',
description='Simple pythonic script to catch Cross Site Scripting (XSS) connections',
long_description=long_description,
url='https://github.com/owlz/xss_catcher',
author='Michael Bann',
author_email='[email protected]',
license='MIT',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: End Users/Desktop',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Operating System :: POSIX :: Linux',
'Environment :: Console'
],
keywords='xss',
packages=find_packages(exclude=['contrib', 'docs', 'tests','dist']),
install_requires=[],
entry_points={
'console_scripts': [
'xss_catcher = xss_catcher.xss_catcher:main',
],
},
)
| [
"[email protected]"
]
| |
c0ee3d3acb28c074c182de16fb58bda400405331 | 0f96f24c8682ece3b501904baaa0eef411969bb1 | /0x0F-python-object_relational_mapping/6-model_state.py | 3b6b8d1e31f9d3168bf63050b77ae4dcab6af4a6 | []
| no_license | dkokonkwo/holbertonschool-higher_level_programming | 95c5103001e807bd46767f66d97568e23d893e68 | 5fa97f754afaf7326550113416e80fd942226254 | refs/heads/master | 2023-03-18T03:38:33.386497 | 2020-09-28T02:56:13 | 2020-09-28T02:56:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | #!/usr/bin/python3
"""Start link class to table in database
"""
import sys
from model_state import Base, State
from sqlalchemy import (create_engine)
if __name__ == "__main__":
engine = create_engine('mysql+mysqldb://{}:{}@localhost/{}'.format(
sys.argv[1], sys.argv[2], sys.argv[3]), pool_pre_ping=True)
Base.metadata.create_all(engine)
| [
"[email protected]"
]
| |
bd3f1ffa0841425a78c119759282d4003145b05f | 2ee1fa9a5983c057f050a93c46bc894974856179 | /botorch/utils/multi_objective/scalarization.py | 67a16f8103e1d54fb9a1704949c6f9b1097d21bd | [
"MIT"
]
| permissive | jelena-markovic/botorch | 8bd9641f165264ed1fea68af0a95f0e37b5e187c | dc868cc569e266abb4e7e112d019bd1bd6af27c4 | refs/heads/master | 2023-08-17T05:04:03.033178 | 2020-11-04T23:57:09 | 2020-11-04T23:58:39 | 310,691,257 | 0 | 0 | MIT | 2020-11-06T19:45:42 | 2020-11-06T19:45:41 | null | UTF-8 | Python | false | false | 2,455 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
r"""
Helper utilities for constructing scalarizations.
References
.. [Knowles2005]
J. Knowles, "ParEGO: a hybrid algorithm with on-line landscape approximation
for expensive multiobjective optimization problems," in IEEE Transactions
on Evolutionary Computation, vol. 10, no. 1, pp. 50-66, Feb. 2006.
"""
from __future__ import annotations
from typing import Callable
import torch
from botorch.exceptions.errors import BotorchTensorDimensionError
from botorch.utils.transforms import normalize
from torch import Tensor
def get_chebyshev_scalarization(
weights: Tensor, Y: Tensor, alpha: float = 0.05
) -> Callable[[Tensor], Tensor]:
r"""Construct an augmented Chebyshev scalarization.
Outcomes are first normalized to [0,1] and then an augmented
Chebyshev scalarization is applied.
Augmented Chebyshev scalarization:
objective(y) = min(w * y) + alpha * sum(w * y)
Note: this assumes maximization.
See [Knowles2005]_ for details.
This scalarization can be used with qExpectedImprovement to implement q-ParEGO
as proposed in [Daulton2020qehvi]_.
Args:
weights: A `m`-dim tensor of weights.
Y: A `n x m`-dim tensor of observed outcomes, which are used for
scaling the outcomes to [0,1].
alpha: Parameter governing the influence of the weighted sum term. The
default value comes from [Knowles2005]_.
Returns:
Transform function using the objective weights.
Example:
>>> weights = torch.tensor([0.75, 0.25])
>>> transform = get_aug_chebyshev_scalarization(weights, Y)
"""
if weights.shape != Y.shape[-1:]:
raise BotorchTensorDimensionError(
"weights must be an `m`-dim tensor where Y is `... x m`."
f"Got shapes {weights.shape} and {Y.shape}."
)
elif Y.ndim > 2:
raise NotImplementedError("Batched Y is not currently supported.")
Y_bounds = torch.stack([Y.min(dim=-2).values, Y.max(dim=-2).values])
def obj(Y: Tensor) -> Tensor:
# scale to [0,1]
Y_normalized = normalize(Y, bounds=Y_bounds)
product = weights * Y_normalized
return product.min(dim=-1).values + alpha * product.sum(dim=-1)
return obj
| [
"[email protected]"
]
| |
71357d96f44f5b2a00c333e37802a5dd988ffbdd | 04c06575a49a3f4e30e4f3f2bf2365585664d2e8 | /python_leetcode_2020/Python_Leetcode_2020/1465_max_area_of_pieceofcake_after_cuts.py | 84149a9672842b58f898ef87d46a45ad4451afe6 | []
| no_license | xiangcao/Leetcode | 18da3d5b271ff586fdf44c53f1a677423ca3dfed | d953abe2c9680f636563e76287d2f907e90ced63 | refs/heads/master | 2022-06-22T04:45:15.446329 | 2022-06-17T13:03:01 | 2022-06-17T13:03:01 | 26,052,392 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,291 | py | """
Given a rectangular cake with height h and width w, and two arrays of integers horizontalCuts and verticalCuts where horizontalCuts[i] is the distance from the top of the rectangular cake to the ith horizontal cut and similarly, verticalCuts[j] is the distance from the left of the rectangular cake to the jth vertical cut.
Return the maximum area of a piece of cake after you cut at each horizontal and vertical position provided in the arrays horizontalCuts and verticalCuts. Since the answer can be a huge number, return this modulo 10^9 + 7.
Constraints:
2 <= h, w <= 10^9
1 <= horizontalCuts.length < min(h, 10^5)
1 <= verticalCuts.length < min(w, 10^5)
1 <= horizontalCuts[i] < h
1 <= verticalCuts[i] < w
It is guaranteed that all elements in horizontalCuts are distinct.
It is guaranteed that all elements in verticalCuts are distinct.
"""
class Solution {
public int maxArea(int h, int w, int[] hc, int[] vc) {
return (int) ((getMaxDist(h, hc) * getMaxDist(w, vc)) % (Math.pow(10, 9) + 7));
}
private long getMaxDist(int end, int[] cuts) {
Arrays.sort(cuts);
long res = 0, from = 0;
for (int c : cuts) {
res = Math.max(res, c - from);
from = c;
}
return Math.max(res, end - from);
}
}
| [
"[email protected]"
]
| |
063947a01bf8d4c0106d3e960c33eb7208b143f8 | d80ef8c716bcc5ea54e87540dbf0463f15bf44ce | /Proxy/test/mitmproxy/test_version.py | f87b08517cd20bf6f7ab33dd2aa65b4da988f2a7 | [
"MIT"
]
| permissive | YagiGo/YPTN | 5043d22eb131c7164d3fa575f0c4e3d8a963dbf4 | d7692a68ee1bf578536b4c09c566272210fc8b69 | refs/heads/master | 2018-10-16T03:44:18.024169 | 2018-07-24T08:53:57 | 2018-07-24T08:53:57 | 107,633,669 | 4 | 1 | MIT | 2018-06-08T09:04:29 | 2017-10-20T04:55:22 | JavaScript | UTF-8 | Python | false | false | 248 | py | import runpy
from mitmproxy import version
def test_version(capsys):
runpy.run_module('mitmproxy.version', run_name='__main__')
stdout, stderr = capsys.readouterr()
assert len(stdout) > 0
assert stdout.strip() == version.VERSION
| [
"[email protected]"
]
| |
fc6d2a4cbbd5a898cc67e115186e1724231f9eaf | 7e7e2c5d327a518a03b2307f7f3ece37517fa361 | /ThirdWeek/Task9.py | 196edbf3279906c37f30e69bd31f1ff91ba3f5d3 | []
| no_license | Midnight1Knight/HSE-course | 0fdd995f2e8bf98ecd5fc4ecbcd503e6ef2150ab | 9b79c359fc65d260e3d454de5464abd5c89be770 | refs/heads/master | 2022-09-24T14:36:29.472005 | 2020-06-04T17:11:37 | 2020-06-04T17:11:37 | 269,414,804 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | p = float(0)
n = int(input())
x = float(input())
sumX = float
counter = 0
while n > 0:
k = float(input())
sumX = x
counter = n
while counter > 1:
sumX *= x
counter -= 1
p += float(k * sumX)
n -= 1
k = float(input())
p += float(k)
print(float(p))
| [
"[email protected]"
]
| |
13109ecbe3376732d745c55c29a10df925581684 | c87b2388fe94dbc081ad01e59f8b66d90f2ded46 | /tests/test_project/app_rename_table/migrations/0001_initial.py | ea12d2850c4271965b9e03aa7b2c261527668328 | [
"Apache-2.0"
]
| permissive | rajuniit/django-migration-linter | f6ac60064cd1feb6a1b6a552ff1656619213a419 | 8db24f919dd2411d0c7d000ab51a92398771cb24 | refs/heads/main | 2023-07-26T17:57:03.535454 | 2021-09-07T05:31:08 | 2021-09-07T05:31:08 | 403,851,905 | 0 | 0 | Apache-2.0 | 2021-09-07T05:29:01 | 2021-09-07T05:29:01 | null | UTF-8 | Python | false | false | 709 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.18 on 2019-04-14 15:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="A",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("field", models.IntegerField()),
],
)
]
| [
"[email protected]"
]
| |
35d8ead869e13c5b0a9da303e4e549be8cada59f | c6f261ec25661c4b454721ab9b727b37eb738ddc | /7_7.py | 90ff96474d377802fbd3e5ecc275b96e2ea41ead | []
| no_license | LinearPi/challenge_game | c88c7b2a96e90820abd5c6edf990c31ec8fbc28b | 241fbed9c968c282b3d0a627eda724a59701ff95 | refs/heads/master | 2022-01-13T16:28:35.065079 | 2019-06-13T07:06:22 | 2019-06-13T07:06:22 | 97,297,520 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 49 | py | print('someone like you but you donor like it')
| [
"[email protected]"
]
| |
8867e7d73b6bbba89607d65330e7999f1bbe7e27 | 2a3a1f103a324bd36e1f557643bf785a782ac5bf | /1097.py | 972c97114436ab2cb4c479a435b64015bbca0b26 | []
| no_license | dbgkswn7581/python_basic_questions | 4c381847c6ab90647cc4dddaca3831210dbbe721 | 5042ea2a2e84a71e81ab8eccf940abde1e82a68f | refs/heads/master | 2023-02-08T04:59:51.654197 | 2021-01-04T06:54:18 | 2021-01-04T06:54:18 | 326,170,578 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 597 | py | arr = []
b = []
for i in range(19):
a = input()
for j in a:
if j == '0':
b.append(0)
elif j == '1':
b.append(1)
arr.append(b)
b = []
def rever(x,y):
for i in range(19):
if arr[x-1][i] == 1:
arr[x-1][i] = 0
else:
arr[x-1][i] = 1
if arr[i][y-1] == 1:
arr[i][y-1] = 0
else:
arr[i][y-1] = 1
m = int(input())
for p in range(m):
x,y = map(int, input().split())
rever(x,y)
for i in arr:
for j in range(19):
print(i[j], end=' ')
print() | [
"[email protected]"
]
| |
e716ca1d7f771038543501f168eb23c014a9c77e | e41633a4816260be9e138e3460c3971716765544 | /simple_subtraction.py | 7846fd291cf5dcfdb5cc1bfb395007f5400f2eb9 | [
"MIT"
]
| permissive | pawarspeaks/Python-Projects | c608ed8cc8c938a922b16ff8d4bd03b5f009b5ba | 9ba2f21a40e9d2297395cb4e9453687a0de4d754 | refs/heads/main | 2023-08-22T23:38:41.708153 | 2021-10-29T19:13:48 | 2021-10-29T19:13:48 | 422,687,844 | 4 | 0 | null | 2021-10-29T19:13:20 | 2021-10-29T19:13:20 | null | UTF-8 | Python | false | false | 117 | py | pertama=int(raw_input("Angka pertama=> "))
kedua=int(raw_input("Angka Kedua=> "))
print "Hasil"
print pertama-kedua
| [
"[email protected]"
]
| |
0ca434186efe591b665ab5be8163b5b376502a87 | b373c3b1e9b60f604c5213b8678cc374e4d60621 | /tasks.py | 429eda6543b1c0d1c4e9ccd8fa56b119ee23974b | [
"MIT"
]
| permissive | dgilland/sqlservice | db512bc3328477472298ca5186de3e9d4b0541d4 | 0be2bb62b2655916f5958e7eccaed63e83f1fe7b | refs/heads/master | 2023-05-31T02:35:33.651188 | 2022-10-12T00:37:13 | 2022-10-12T00:37:13 | 59,388,509 | 173 | 13 | null | 2018-08-03T21:23:52 | 2016-05-22T00:56:21 | Python | UTF-8 | Python | false | false | 3,950 | py | """
This module provides the CLI interface for invoke tasks.
All tasks can be executed from this file's directory using:
$ inv <task>
Where <task> is a function defined below with the @task decorator.
"""
from functools import partial
import os
from invoke import Exit, UnexpectedExit, run as _run, task
PACKAGE_NAME = "sqlservice"
PACKAGE_SOURCE = f"src/{PACKAGE_NAME}"
TEST_TARGETS = f"{PACKAGE_SOURCE} tests"
LINT_TARGETS = f"{TEST_TARGETS} tasks.py"
EXIT_EXCEPTIONS = (Exit, UnexpectedExit, SystemExit)
# Set pyt=True to enable colored output when available.
run = partial(_run, pty=True)
@task
def black(ctx, quiet=False):
"""Autoformat code using black."""
run(f"black {LINT_TARGETS}", hide=quiet)
@task
def isort(ctx, quiet=False):
"""Autoformat Python imports."""
run(f"isort {LINT_TARGETS}", hide=quiet)
@task
def docformatter(ctx):
"""Autoformat docstrings using docformatter."""
run(
f"docformatter -r {LINT_TARGETS} "
f"--in-place --pre-summary-newline --wrap-descriptions 100 --wrap-summaries 100"
)
@task
def fmt(ctx):
"""Autoformat code and docstrings."""
print("Preparing to run formatters: docformatter, isort, black\n")
print("Running docformatter")
docformatter(ctx)
print("Running isort")
isort(ctx, quiet=True)
print("Running black")
black(ctx, quiet=True)
@task
def flake8(ctx):
"""Check code for PEP8 violations using flake8."""
run(f"flake8 --format=pylint {LINT_TARGETS}")
@task
def pylint(ctx):
"""Check code for static errors using pylint."""
run(f"pylint {LINT_TARGETS}")
@task
def mypy(ctx):
"""Check code using mypy type checker."""
run(f"mypy {LINT_TARGETS}")
@task
def lint(ctx):
"""Run linters."""
linters = {"flake8": flake8, "pylint": pylint, "mypy": mypy}
failures = []
print(f"Preparing to run linters: {', '.join(linters)}\n")
for name, linter in linters.items():
print(f"Running {name}")
try:
linter(ctx)
except EXIT_EXCEPTIONS:
failures.append(name)
result = "FAILED"
else:
result = "PASSED"
print(f"{result}\n")
if failures:
failed = ", ".join(failures)
raise Exit(f"ERROR: Linters that failed: {failed}")
@task(help={"args": "Override default pytest arguments"})
def test(ctx, args=f"{TEST_TARGETS} --cov={PACKAGE_NAME}"):
"""Run unit tests using pytest."""
tox_env_site_packages_dir = os.getenv("TOX_ENV_SITE_PACKAGES_DIR")
if tox_env_site_packages_dir:
# Re-path package source to match tox env so that we generate proper coverage report.
tox_env_pkg_src = os.path.join(tox_env_site_packages_dir, os.path.basename(PACKAGE_SOURCE))
args = args.replace(PACKAGE_SOURCE, tox_env_pkg_src)
run(f"pytest {args}")
@task
def ci(ctx):
"""Run linters and tests."""
print("Building package")
build(ctx)
print("Building docs")
docs(ctx)
print("Checking linters")
lint(ctx)
print("Running unit tests")
test(ctx)
@task
def docs(ctx, serve=False, bind="127.0.0.1", port=8000):
"""Build docs."""
run("rm -rf docs/_build")
run("sphinx-build -q -W -b html docs docs/_build/html")
if serve:
print(f"Serving docs on {bind} port {port} (http://{bind}:{port}/) ...")
run(f"python -m http.server -b {bind} --directory docs/_build/html {port}", hide=True)
@task
def build(ctx):
"""Build Python package."""
run("rm -rf dist build docs/_build")
run("python -m build")
@task
def clean(ctx):
"""Remove temporary files related to development."""
run("find . -type f -name '*.py[cod]' -delete -o -type d -name __pycache__ -delete")
run("rm -rf .tox .coverage .cache .pytest_cache .mypy_cache **/.egg* **/*.egg* dist build")
@task(pre=[build])
def release(ctx):
"""Release Python package."""
run("twine upload dist/*")
| [
"[email protected]"
]
| |
a9b1d887ebcfe4a2b8adbc771fbe2a309ca4ab71 | 37572d62583271b872ec0e8f051701111d8d9230 | /migrations/versions/1b2b0d93a8ba_16.py | cd05612485ac7c331d9aa19c6caa6a5372232951 | []
| no_license | linwenjunid/flasky | 6b4b920b56194b04d2a33b2c843b858c519dffb5 | ea1e74bbc1185ade002cdfbee96b99675ee3c22d | refs/heads/master | 2020-03-18T23:40:47.958836 | 2019-03-29T01:27:40 | 2019-03-29T01:27:40 | 135,418,770 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | """16
Revision ID: 1b2b0d93a8ba
Revises: 5ae2353a173c
Create Date: 2018-08-21 16:10:28.530756
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '1b2b0d93a8ba'
down_revision = '5ae2353a173c'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('celery_tasks', sa.Column('task_percent', sa.Float(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('celery_tasks', 'task_percent')
# ### end Alembic commands ###
| [
"[email protected]"
]
| |
7b31e92e4ea4f7d437d2d38e4498703c7211dc33 | 0fd9cb5e8bfb26fa62b90e65f7fbaa2fd233d3d3 | /napalm/python/netmiko/fortinet/fortinet_ssh.py | f20231334f1f3a4dffb086717c2d3a33a95eebde | []
| no_license | levensailor/pip4lambda | 0cff15b2dba3ba586652c6cc914252daf01a874b | 22a83a43141f9bf72fdd0cd5faee3b88cc7e49fa | refs/heads/master | 2022-03-02T05:31:48.894906 | 2022-02-11T16:38:00 | 2022-02-11T16:38:00 | 174,207,440 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,833 | py | from __future__ import unicode_literals
import paramiko
import time
import re
from netmiko.cisco_base_connection import CiscoSSHConnection
class FortinetSSH(CiscoSSHConnection):
def _modify_connection_params(self):
"""Modify connection parameters prior to SSH connection."""
paramiko.Transport._preferred_kex = ('diffie-hellman-group14-sha1',
'diffie-hellman-group-exchange-sha1',
'diffie-hellman-group-exchange-sha256',
'diffie-hellman-group1-sha1',)
def session_preparation(self):
"""Prepare the session after the connection has been established."""
self._test_channel_read()
self.set_base_prompt(alt_prompt_terminator='$')
self.disable_paging()
# Clear the read buffer
time.sleep(.3 * self.global_delay_factor)
self.clear_buffer()
def disable_paging(self, delay_factor=1):
"""Disable paging is only available with specific roles so it may fail."""
check_command = "get system status | grep Virtual"
output = self.send_command_timing(check_command)
self.allow_disable_global = True
self.vdoms = False
self._output_mode = 'more'
if "Virtual domain configuration: enable" in output:
self.vdoms = True
vdom_additional_command = "config global"
output = self.send_command_timing(vdom_additional_command, delay_factor=2)
if "Command fail" in output:
self.allow_disable_global = False
self.remote_conn.close()
self.establish_connection(width=100, height=1000)
new_output = ''
if self.allow_disable_global:
self._retrieve_output_mode()
disable_paging_commands = ["config system console", "set output standard", "end"]
# There is an extra 'end' required if in multi-vdoms are enabled
if self.vdoms:
disable_paging_commands.append("end")
outputlist = [self.send_command_timing(command, delay_factor=2)
for command in disable_paging_commands]
# Should test output is valid
new_output = self.RETURN.join(outputlist)
return output + new_output
def _retrieve_output_mode(self):
"""Save the state of the output mode so it can be reset at the end of the session."""
reg_mode = re.compile(r'output\s+:\s+(?P<mode>.*)\s+\n')
output = self.send_command("get system console")
result_mode_re = reg_mode.search(output)
if result_mode_re:
result_mode = result_mode_re.group('mode').strip()
if result_mode in ['more', 'standard']:
self._output_mode = result_mode
def cleanup(self):
"""Re-enable paging globally."""
if self.allow_disable_global:
# Return paging state
output_mode_cmd = "set output {}".format(self._output_mode)
enable_paging_commands = ["config system console",
output_mode_cmd,
"end"]
if self.vdoms:
enable_paging_commands.insert(0, "config global")
# Should test output is valid
for command in enable_paging_commands:
self.send_command_timing(command)
def config_mode(self, config_command=''):
"""No config mode for Fortinet devices."""
return ''
def exit_config_mode(self, exit_config=''):
"""No config mode for Fortinet devices."""
return ''
def save_config(self, cmd='', confirm=True, confirm_response=''):
"""Not Implemented"""
raise NotImplementedError
| [
"[email protected]"
]
| |
75f4afcd3f28a07b31a0169bdacd3a042829807a | fb754d8c5abf560bb0f0c123dde2f6790d71452c | /dataset/dota_coco/dota_generate_test_result.py | 44c8f1804bcff723e1dbb5df2e172340c4b1e41e | [
"Apache-2.0"
]
| permissive | ellinyang/PaddleDetection | 024c256c620949828fd3d2a7592aaebc6e3fef92 | 993119bbc3de330310083e9487fef5147d82c087 | refs/heads/master | 2021-11-04T17:56:50.729293 | 2021-10-28T02:30:12 | 2021-10-28T02:30:12 | 222,353,395 | 3 | 0 | Apache-2.0 | 2019-11-18T03:11:33 | 2019-11-18T03:11:32 | null | UTF-8 | Python | false | false | 7,901 | py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import re
import glob
import numpy as np
from multiprocessing import Pool
from functools import partial
from shapely.geometry import Polygon
import argparse
nms_thresh = 0.1
class_name_15 = [
'plane', 'baseball-diamond', 'bridge', 'ground-track-field',
'small-vehicle', 'large-vehicle', 'ship', 'tennis-court',
'basketball-court', 'storage-tank', 'soccer-ball-field', 'roundabout',
'harbor', 'swimming-pool', 'helicopter'
]
class_name_16 = [
'plane', 'baseball-diamond', 'bridge', 'ground-track-field',
'small-vehicle', 'large-vehicle', 'ship', 'tennis-court',
'basketball-court', 'storage-tank', 'soccer-ball-field', 'roundabout',
'harbor', 'swimming-pool', 'helicopter', 'container-crane'
]
def rbox_iou(g, p):
"""
iou of rbox
"""
g = np.array(g)
p = np.array(p)
g = Polygon(g[:8].reshape((4, 2)))
p = Polygon(p[:8].reshape((4, 2)))
g = g.buffer(0)
p = p.buffer(0)
if not g.is_valid or not p.is_valid:
return 0
inter = Polygon(g).intersection(Polygon(p)).area
union = g.area + p.area - inter
if union == 0:
return 0
else:
return inter / union
def py_cpu_nms_poly_fast(dets, thresh):
"""
Args:
dets: pred results
thresh: nms threshold
Returns: index of keep
"""
obbs = dets[:, 0:-1]
x1 = np.min(obbs[:, 0::2], axis=1)
y1 = np.min(obbs[:, 1::2], axis=1)
x2 = np.max(obbs[:, 0::2], axis=1)
y2 = np.max(obbs[:, 1::2], axis=1)
scores = dets[:, 8]
areas = (x2 - x1 + 1) * (y2 - y1 + 1)
polys = []
for i in range(len(dets)):
tm_polygon = [
dets[i][0], dets[i][1], dets[i][2], dets[i][3], dets[i][4],
dets[i][5], dets[i][6], dets[i][7]
]
polys.append(tm_polygon)
polys = np.array(polys)
order = scores.argsort()[::-1]
keep = []
while order.size > 0:
ovr = []
i = order[0]
keep.append(i)
xx1 = np.maximum(x1[i], x1[order[1:]])
yy1 = np.maximum(y1[i], y1[order[1:]])
xx2 = np.minimum(x2[i], x2[order[1:]])
yy2 = np.minimum(y2[i], y2[order[1:]])
w = np.maximum(0.0, xx2 - xx1)
h = np.maximum(0.0, yy2 - yy1)
hbb_inter = w * h
hbb_ovr = hbb_inter / (areas[i] + areas[order[1:]] - hbb_inter)
# h_keep_inds = np.where(hbb_ovr == 0)[0]
h_inds = np.where(hbb_ovr > 0)[0]
tmp_order = order[h_inds + 1]
for j in range(tmp_order.size):
iou = rbox_iou(polys[i], polys[tmp_order[j]])
hbb_ovr[h_inds[j]] = iou
# ovr.append(iou)
# ovr_index.append(tmp_order[j])
try:
if math.isnan(ovr[0]):
pdb.set_trace()
except:
pass
inds = np.where(hbb_ovr <= thresh)[0]
order = order[inds + 1]
return keep
def poly2origpoly(poly, x, y, rate):
origpoly = []
for i in range(int(len(poly) / 2)):
tmp_x = float(poly[i * 2] + x) / float(rate)
tmp_y = float(poly[i * 2 + 1] + y) / float(rate)
origpoly.append(tmp_x)
origpoly.append(tmp_y)
return origpoly
def nmsbynamedict(nameboxdict, nms, thresh):
"""
Args:
nameboxdict: nameboxdict
nms: nms
thresh: nms threshold
Returns: nms result as dict
"""
nameboxnmsdict = {x: [] for x in nameboxdict}
for imgname in nameboxdict:
keep = nms(np.array(nameboxdict[imgname]), thresh)
outdets = []
for index in keep:
outdets.append(nameboxdict[imgname][index])
nameboxnmsdict[imgname] = outdets
return nameboxnmsdict
def merge_single(output_dir, nms, pred_class_lst):
"""
Args:
output_dir: output_dir
nms: nms
pred_class_lst: pred_class_lst
class_name: class_name
Returns:
"""
class_name, pred_bbox_list = pred_class_lst
nameboxdict = {}
for line in pred_bbox_list:
splitline = line.split(' ')
subname = splitline[0]
splitname = subname.split('__')
oriname = splitname[0]
pattern1 = re.compile(r'__\d+___\d+')
x_y = re.findall(pattern1, subname)
x_y_2 = re.findall(r'\d+', x_y[0])
x, y = int(x_y_2[0]), int(x_y_2[1])
pattern2 = re.compile(r'__([\d+\.]+)__\d+___')
rate = re.findall(pattern2, subname)[0]
confidence = splitline[1]
poly = list(map(float, splitline[2:]))
origpoly = poly2origpoly(poly, x, y, rate)
det = origpoly
det.append(confidence)
det = list(map(float, det))
if (oriname not in nameboxdict):
nameboxdict[oriname] = []
nameboxdict[oriname].append(det)
nameboxnmsdict = nmsbynamedict(nameboxdict, nms, nms_thresh)
# write result
dstname = os.path.join(output_dir, class_name + '.txt')
with open(dstname, 'w') as f_out:
for imgname in nameboxnmsdict:
for det in nameboxnmsdict[imgname]:
confidence = det[-1]
bbox = det[0:-1]
outline = imgname + ' ' + str(confidence) + ' ' + ' '.join(
map(str, bbox))
f_out.write(outline + '\n')
def dota_generate_test_result(pred_txt_dir,
output_dir='output',
dota_version='v1.0'):
"""
pred_txt_dir: dir of pred txt
output_dir: dir of output
dota_version: dota_version v1.0 or v1.5 or v2.0
"""
pred_txt_list = glob.glob("{}/*.txt".format(pred_txt_dir))
# step1: summary pred bbox
pred_classes = {}
class_lst = class_name_15 if dota_version == 'v1.0' else class_name_16
for class_name in class_lst:
pred_classes[class_name] = []
for current_txt in pred_txt_list:
img_id = os.path.split(current_txt)[1]
img_id = img_id.split('.txt')[0]
with open(current_txt) as f:
res = f.readlines()
for item in res:
item = item.split(' ')
pred_class = item[0]
item[0] = img_id
pred_bbox = ' '.join(item)
pred_classes[pred_class].append(pred_bbox)
pred_classes_lst = []
for class_name in pred_classes.keys():
print('class_name: {}, count: {}'.format(class_name,
len(pred_classes[class_name])))
pred_classes_lst.append((class_name, pred_classes[class_name]))
# step2: merge
pool = Pool(len(class_lst))
nms = py_cpu_nms_poly_fast
mergesingle_fn = partial(merge_single, output_dir, nms)
pool.map(mergesingle_fn, pred_classes_lst)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='dota anno to coco')
parser.add_argument('--pred_txt_dir', help='path of pred txt dir')
parser.add_argument(
'--output_dir', help='path of output dir', default='output')
parser.add_argument(
'--dota_version',
help='dota_version, v1.0 or v1.5 or v2.0',
type=str,
default='v1.0')
args = parser.parse_args()
# process
dota_generate_test_result(args.pred_txt_dir, args.output_dir,
args.dota_version)
print('done!')
| [
"[email protected]"
]
| |
0e4d12400d385371006b7ad61680b902e236e83c | dc42a65f63ca8327ee74675c7606a9b6e3b39d40 | /tpdatasrc/co8fixes/scr/py00081spugnoir.py | 9d67ab60fadca2c1158c5ab7df6dfd15a16fc507 | [
"MIT"
]
| permissive | anatoliy-savchak/TemplePlus | 03cda558de2fd30d34305b7e5b548d202ba97f76 | 50922bb14cc2d7dcf8fceeccf45c3b905c1b512f | refs/heads/master_old | 2023-04-28T12:01:06.205497 | 2021-09-25T13:03:51 | 2021-09-25T13:03:51 | 172,583,383 | 2 | 0 | MIT | 2019-02-25T20:56:01 | 2019-02-25T20:56:00 | null | UTF-8 | Python | false | false | 3,263 | py | from toee import *
from utilities import *
from combat_standard_routines import *
def san_dialog( attachee, triggerer ):
if (game.global_flags[819] == 1):
attachee.attack(triggerer)
return SKIP_DEFAULT
if (attachee.leader_get() != OBJ_HANDLE_NULL):
triggerer.begin_dialog( attachee, 100 ) ## spugnoir in party
elif (game.global_vars[913] == 32):
triggerer.begin_dialog( attachee, 140 ) ## have attacked 3 or more farm animals with spugnoir in party
elif (game.leader.reputation_has(32) == 1 or game.leader.reputation_has(30) == 1 or game.leader.reputation_has(29) == 1):
attachee.float_line(11004,triggerer) ## have lawbreaker or convict or banished from hommlet rep
else:
triggerer.begin_dialog( attachee, 1 ) ## none of the above
return SKIP_DEFAULT
def san_first_heartbeat( attachee, triggerer ):
if (attachee.leader_get() == OBJ_HANDLE_NULL):
if (game.global_vars[501] == 4 or game.global_vars[501] == 5 or game.global_vars[501] == 6 or game.global_vars[510] == 2):
attachee.object_flag_set(OF_OFF)
else:
attachee.object_flag_unset(OF_OFF)
if (attachee.leader_get() == OBJ_HANDLE_NULL and not game.combat_is_active()):
game.global_vars[712] = 0
return RUN_DEFAULT
def san_dying( attachee, triggerer ):
if should_modify_CR( attachee ):
modify_CR( attachee, get_av_level() )
attachee.float_line(12014,triggerer)
if (attachee.leader_get() != OBJ_HANDLE_NULL):
game.global_vars[29] = game.global_vars[29] + 1
return RUN_DEFAULT
def san_enter_combat( attachee, triggerer ):
print "Spugnoir Enter Combat"
ProtectTheInnocent( attachee, triggerer)
return RUN_DEFAULT
def san_heartbeat( attachee, triggerer ):
if (game.global_vars[712] == 0 and attachee.leader_get() == OBJ_HANDLE_NULL and not game.combat_is_active()) and game.party_alignment & ALIGNMENT_GOOD == 0:
attachee.cast_spell(spell_mage_armor, attachee)
attachee.spells_pending_to_memorized()
game.global_vars[712] = 1
if (not game.combat_is_active()):
if (game.global_vars[913] >= 3):
if (attachee != OBJ_HANDLE_NULL):
leader = attachee.leader_get()
if (leader != OBJ_HANDLE_NULL):
leader.follower_remove(attachee)
attachee.float_line(22000,triggerer)
return RUN_DEFAULT
def san_join( attachee, triggerer ):
create_item_in_inventory( 4645, attachee)
create_item_in_inventory( 4647, attachee)
create_item_in_inventory( 4224, attachee)
create_item_in_inventory( 12848, attachee)
game.new_sid = 0
return RUN_DEFAULT
def san_new_map( attachee, triggerer ):
if ((attachee.map == 5006) and (game.global_vars[695] == 1 or game.global_vars[695] == 2)):
attachee.float_line(12070,triggerer)
elif ((attachee.map == 5024) and (is_daytime() != 1)):
attachee.float_line(10019,triggerer)
return RUN_DEFAULT
def equip_transfer( attachee, triggerer ):
itemA = attachee.item_find(6081)
if (itemA != OBJ_HANDLE_NULL):
itemA.destroy()
create_item_in_inventory( 6081, triggerer )
itemB = attachee.item_find(6023)
if (itemB != OBJ_HANDLE_NULL):
itemB.destroy()
create_item_in_inventory( 6023, triggerer )
itemC = attachee.item_find(4060)
if (itemC != OBJ_HANDLE_NULL):
itemC.destroy()
create_item_in_inventory( 4060, triggerer )
create_item_in_inventory( 7001, attachee )
return RUN_DEFAULT | [
"doug1234@unknown"
]
| doug1234@unknown |
28043124ed5ef8d6e19e6a791c78390a5bf65db4 | 2c74bb301f1ed83b79254944183ac5a18a639fdf | /tests/components/flux_led/test_config_flow.py | 3f1704f7e8c9309afe03ed25f06afd83a44ad0d9 | [
"Apache-2.0"
]
| permissive | Adminiuga/home-assistant | 5bec93007ddac1a268cc359bf7e48530c5f73b38 | dcf68d768e4f628d038f1fdd6e40bad713fbc222 | refs/heads/dev | 2023-02-22T22:03:31.013931 | 2022-11-09T00:27:20 | 2022-11-09T00:27:20 | 123,929,062 | 5 | 4 | Apache-2.0 | 2023-02-22T06:14:31 | 2018-03-05T14:11:09 | Python | UTF-8 | Python | false | false | 25,829 | py | """Define tests for the Flux LED/Magic Home config flow."""
from __future__ import annotations
from unittest.mock import patch
import pytest
from homeassistant import config_entries
from homeassistant.components import dhcp
from homeassistant.components.flux_led.const import (
CONF_CUSTOM_EFFECT_COLORS,
CONF_CUSTOM_EFFECT_SPEED_PCT,
CONF_CUSTOM_EFFECT_TRANSITION,
CONF_MINOR_VERSION,
CONF_MODEL_DESCRIPTION,
CONF_MODEL_INFO,
CONF_MODEL_NUM,
CONF_REMOTE_ACCESS_ENABLED,
CONF_REMOTE_ACCESS_HOST,
CONF_REMOTE_ACCESS_PORT,
DOMAIN,
TRANSITION_JUMP,
TRANSITION_STROBE,
)
from homeassistant.const import CONF_DEVICE, CONF_HOST, CONF_MODEL
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from . import (
DEFAULT_ENTRY_TITLE,
DHCP_DISCOVERY,
FLUX_DISCOVERY,
FLUX_DISCOVERY_PARTIAL,
IP_ADDRESS,
MAC_ADDRESS,
MAC_ADDRESS_ONE_OFF,
MODEL,
MODEL_DESCRIPTION,
MODEL_NUM,
MODULE,
_patch_discovery,
_patch_wifibulb,
)
from tests.common import MockConfigEntry
MAC_ADDRESS_DIFFERENT = "ff:bb:ff:dd:ee:ff"
async def test_discovery(hass: HomeAssistant):
"""Test setting up discovery."""
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
# test we can try again
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
with _patch_discovery(), _patch_wifibulb(), patch(
f"{MODULE}.async_setup", return_value=True
) as mock_setup, patch(
f"{MODULE}.async_setup_entry", return_value=True
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_DEVICE: MAC_ADDRESS},
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == DEFAULT_ENTRY_TITLE
assert result3["data"] == {
CONF_MINOR_VERSION: 4,
CONF_HOST: IP_ADDRESS,
CONF_MODEL: MODEL,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_INFO: MODEL,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
CONF_REMOTE_ACCESS_ENABLED: True,
CONF_REMOTE_ACCESS_HOST: "the.cloud",
CONF_REMOTE_ACCESS_PORT: 8816,
CONF_MINOR_VERSION: 0x04,
}
mock_setup.assert_called_once()
mock_setup_entry.assert_called_once()
# ignore configured devices
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery(), _patch_wifibulb():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "no_devices_found"
async def test_discovery_legacy(hass: HomeAssistant):
"""Test setting up discovery with a legacy device."""
with _patch_discovery(device=FLUX_DISCOVERY_PARTIAL), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
# test we can try again
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
with _patch_discovery(), _patch_wifibulb(), patch(
f"{MODULE}.async_setup", return_value=True
) as mock_setup, patch(
f"{MODULE}.async_setup_entry", return_value=True
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_DEVICE: MAC_ADDRESS},
)
await hass.async_block_till_done()
assert result3["type"] == "create_entry"
assert result3["title"] == DEFAULT_ENTRY_TITLE
assert result3["data"] == {
CONF_MINOR_VERSION: 4,
CONF_HOST: IP_ADDRESS,
CONF_MODEL: MODEL,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_INFO: MODEL,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
CONF_REMOTE_ACCESS_ENABLED: True,
CONF_REMOTE_ACCESS_HOST: "the.cloud",
CONF_REMOTE_ACCESS_PORT: 8816,
CONF_MINOR_VERSION: 0x04,
}
mock_setup.assert_called_once()
mock_setup_entry.assert_called_once()
# ignore configured devices
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery(), _patch_wifibulb():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "no_devices_found"
async def test_discovery_with_existing_device_present(hass: HomeAssistant):
"""Test setting up discovery."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: "127.0.0.2"}, unique_id="dd:dd:dd:dd:dd:dd"
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_wifibulb(no_device=True):
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery(), _patch_wifibulb():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
# Now abort and make sure we can start over
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery(), _patch_wifibulb():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "pick_device"
assert not result2["errors"]
with _patch_discovery(), _patch_wifibulb(), patch(
f"{MODULE}.async_setup_entry", return_value=True
) as mock_setup_entry:
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_DEVICE: MAC_ADDRESS}
)
assert result3["type"] == "create_entry"
assert result3["title"] == DEFAULT_ENTRY_TITLE
assert result3["data"] == {
CONF_MINOR_VERSION: 4,
CONF_HOST: IP_ADDRESS,
CONF_MODEL: MODEL,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_INFO: MODEL,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
CONF_REMOTE_ACCESS_ENABLED: True,
CONF_REMOTE_ACCESS_HOST: "the.cloud",
CONF_REMOTE_ACCESS_PORT: 8816,
CONF_MINOR_VERSION: 0x04,
}
await hass.async_block_till_done()
mock_setup_entry.assert_called_once()
# ignore configured devices
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery(), _patch_wifibulb():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "no_devices_found"
async def test_discovery_no_device(hass: HomeAssistant):
"""Test discovery without device."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with _patch_discovery(no_device=True), _patch_wifibulb():
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "no_devices_found"
async def test_manual_working_discovery(hass: HomeAssistant):
"""Test manually setup."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
# Cannot connect (timeout)
with _patch_discovery(no_device=True), _patch_wifibulb(no_device=True):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
await hass.async_block_till_done()
assert result2["type"] == "form"
assert result2["step_id"] == "user"
assert result2["errors"] == {"base": "cannot_connect"}
# Success
with _patch_discovery(), _patch_wifibulb(), patch(
f"{MODULE}.async_setup", return_value=True
), patch(f"{MODULE}.async_setup_entry", return_value=True):
result4 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
await hass.async_block_till_done()
assert result4["type"] == "create_entry"
assert result4["title"] == DEFAULT_ENTRY_TITLE
assert result4["data"] == {
CONF_MINOR_VERSION: 4,
CONF_HOST: IP_ADDRESS,
CONF_MODEL: MODEL,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_INFO: MODEL,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
CONF_REMOTE_ACCESS_ENABLED: True,
CONF_REMOTE_ACCESS_HOST: "the.cloud",
CONF_REMOTE_ACCESS_PORT: 8816,
CONF_MINOR_VERSION: 0x04,
}
# Duplicate
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with _patch_discovery(no_device=True), _patch_wifibulb(no_device=True):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
await hass.async_block_till_done()
assert result2["type"] == "abort"
assert result2["reason"] == "already_configured"
async def test_manual_no_discovery_data(hass: HomeAssistant):
"""Test manually setup without discovery data."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["step_id"] == "user"
assert not result["errors"]
with _patch_discovery(no_device=True), _patch_wifibulb(), patch(
f"{MODULE}.async_setup", return_value=True
), patch(f"{MODULE}.async_setup_entry", return_value=True):
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_HOST: IP_ADDRESS}
)
await hass.async_block_till_done()
assert result["type"] == "create_entry"
assert result["data"] == {
CONF_HOST: IP_ADDRESS,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
}
async def test_discovered_by_discovery_and_dhcp(hass):
"""Test we get the form with discovery and abort for dhcp source when we get both."""
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
data=FLUX_DISCOVERY,
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["errors"] is None
with _patch_discovery(), _patch_wifibulb():
result2 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DHCP},
data=DHCP_DISCOVERY,
)
await hass.async_block_till_done()
assert result2["type"] == FlowResultType.ABORT
assert result2["reason"] == "already_in_progress"
with _patch_discovery(), _patch_wifibulb():
result3 = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_DHCP},
data=dhcp.DhcpServiceInfo(
hostname="any",
ip=IP_ADDRESS,
macaddress="00:00:00:00:00:00",
),
)
await hass.async_block_till_done()
assert result3["type"] == FlowResultType.ABORT
assert result3["reason"] == "already_in_progress"
async def test_discovered_by_discovery(hass):
"""Test we can setup when discovered from discovery."""
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
data=FLUX_DISCOVERY,
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["errors"] is None
with _patch_discovery(), _patch_wifibulb(), patch(
f"{MODULE}.async_setup", return_value=True
) as mock_async_setup, patch(
f"{MODULE}.async_setup_entry", return_value=True
) as mock_async_setup_entry:
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["data"] == {
CONF_MINOR_VERSION: 4,
CONF_HOST: IP_ADDRESS,
CONF_MODEL: MODEL,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_INFO: MODEL,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
CONF_REMOTE_ACCESS_ENABLED: True,
CONF_REMOTE_ACCESS_HOST: "the.cloud",
CONF_REMOTE_ACCESS_PORT: 8816,
CONF_MINOR_VERSION: 0x04,
}
assert mock_async_setup.called
assert mock_async_setup_entry.called
async def test_discovered_by_dhcp_udp_responds(hass):
"""Test we can setup when discovered from dhcp but with udp response."""
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["errors"] is None
with _patch_discovery(), _patch_wifibulb(), patch(
f"{MODULE}.async_setup", return_value=True
) as mock_async_setup, patch(
f"{MODULE}.async_setup_entry", return_value=True
) as mock_async_setup_entry:
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["data"] == {
CONF_MINOR_VERSION: 4,
CONF_HOST: IP_ADDRESS,
CONF_MODEL: MODEL,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_INFO: MODEL,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
CONF_REMOTE_ACCESS_ENABLED: True,
CONF_REMOTE_ACCESS_HOST: "the.cloud",
CONF_REMOTE_ACCESS_PORT: 8816,
CONF_MINOR_VERSION: 0x04,
}
assert mock_async_setup.called
assert mock_async_setup_entry.called
async def test_discovered_by_dhcp_no_udp_response(hass):
"""Test we can setup when discovered from dhcp but no udp response."""
with _patch_discovery(no_device=True), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["errors"] is None
with _patch_discovery(no_device=True), _patch_wifibulb(), patch(
f"{MODULE}.async_setup", return_value=True
) as mock_async_setup, patch(
f"{MODULE}.async_setup_entry", return_value=True
) as mock_async_setup_entry:
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["data"] == {
CONF_HOST: IP_ADDRESS,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
}
assert mock_async_setup.called
assert mock_async_setup_entry.called
async def test_discovered_by_dhcp_partial_udp_response_fallback_tcp(hass):
"""Test we can setup when discovered from dhcp but part of the udp response is missing."""
with _patch_discovery(no_device=True), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.FORM
assert result["errors"] is None
with _patch_discovery(device=FLUX_DISCOVERY_PARTIAL), _patch_wifibulb(), patch(
f"{MODULE}.async_setup", return_value=True
) as mock_async_setup, patch(
f"{MODULE}.async_setup_entry", return_value=True
) as mock_async_setup_entry:
result2 = await hass.config_entries.flow.async_configure(result["flow_id"], {})
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["data"] == {
CONF_HOST: IP_ADDRESS,
CONF_MODEL_NUM: MODEL_NUM,
CONF_MODEL_DESCRIPTION: MODEL_DESCRIPTION,
}
assert result2["title"] == "Bulb RGBCW DDEEFF"
assert mock_async_setup.called
assert mock_async_setup_entry.called
async def test_discovered_by_dhcp_no_udp_response_or_tcp_response(hass):
"""Test we can setup when discovered from dhcp but no udp response or tcp response."""
with _patch_discovery(no_device=True), _patch_wifibulb(no_device=True):
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "cannot_connect"
@pytest.mark.parametrize(
"source, data",
[
(config_entries.SOURCE_DHCP, DHCP_DISCOVERY),
(config_entries.SOURCE_INTEGRATION_DISCOVERY, FLUX_DISCOVERY),
],
)
async def test_discovered_by_dhcp_or_discovery_adds_missing_unique_id(
hass, source, data
):
"""Test we can setup when discovered from dhcp or discovery."""
config_entry = MockConfigEntry(domain=DOMAIN, data={CONF_HOST: IP_ADDRESS})
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}, data=data
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "already_configured"
assert config_entry.unique_id == MAC_ADDRESS
async def test_mac_address_off_by_one_updated_via_discovery(hass):
"""Test the mac address is updated when its off by one from integration discovery."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=MAC_ADDRESS_ONE_OFF
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_INTEGRATION_DISCOVERY},
data=FLUX_DISCOVERY,
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "already_configured"
assert config_entry.unique_id == MAC_ADDRESS
async def test_mac_address_off_by_one_not_updated_from_dhcp(hass):
"""Test the mac address is NOT updated when its off by one from dhcp discovery."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=MAC_ADDRESS_ONE_OFF
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_DHCP}, data=DHCP_DISCOVERY
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "already_configured"
assert config_entry.unique_id == MAC_ADDRESS_ONE_OFF
@pytest.mark.parametrize(
"source, data",
[
(config_entries.SOURCE_DHCP, DHCP_DISCOVERY),
(config_entries.SOURCE_INTEGRATION_DISCOVERY, FLUX_DISCOVERY),
],
)
async def test_discovered_by_dhcp_or_discovery_mac_address_mismatch_host_already_configured(
hass, source, data
):
"""Test we abort if the host is already configured but the mac does not match."""
config_entry = MockConfigEntry(
domain=DOMAIN, data={CONF_HOST: IP_ADDRESS}, unique_id=MAC_ADDRESS_DIFFERENT
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}, data=data
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "already_configured"
assert config_entry.unique_id == MAC_ADDRESS_DIFFERENT
async def test_options(hass: HomeAssistant):
"""Test options flow."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={CONF_HOST: IP_ADDRESS},
title=IP_ADDRESS,
options={
CONF_CUSTOM_EFFECT_COLORS: "[255,0,0], [0,0,255]",
CONF_CUSTOM_EFFECT_SPEED_PCT: 30,
CONF_CUSTOM_EFFECT_TRANSITION: TRANSITION_STROBE,
},
unique_id=MAC_ADDRESS,
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_wifibulb():
assert await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
result = await hass.config_entries.options.async_init(config_entry.entry_id)
assert result["type"] == "form"
assert result["step_id"] == "init"
user_input = {
CONF_CUSTOM_EFFECT_COLORS: "[0,0,255], [255,0,0]",
CONF_CUSTOM_EFFECT_SPEED_PCT: 50,
CONF_CUSTOM_EFFECT_TRANSITION: TRANSITION_JUMP,
}
with _patch_discovery(), _patch_wifibulb():
result2 = await hass.config_entries.options.async_configure(
result["flow_id"], user_input
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["data"] == user_input
assert result2["data"] == config_entry.options
assert hass.states.get("light.bulb_rgbcw_ddeeff") is not None
@pytest.mark.parametrize(
"source, data",
[
(config_entries.SOURCE_DHCP, DHCP_DISCOVERY),
(config_entries.SOURCE_INTEGRATION_DISCOVERY, FLUX_DISCOVERY),
],
)
async def test_discovered_can_be_ignored(hass, source, data):
"""Test we abort if the mac was already ignored."""
config_entry = MockConfigEntry(
domain=DOMAIN,
data={},
unique_id=MAC_ADDRESS,
source=config_entries.SOURCE_IGNORE,
)
config_entry.add_to_hass(hass)
with _patch_discovery(), _patch_wifibulb():
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": source}, data=data
)
await hass.async_block_till_done()
assert result["type"] == FlowResultType.ABORT
assert result["reason"] == "already_configured"
| [
"[email protected]"
]
| |
3264f45ff99ac37d6e6a01b5f9e165a75bcb3531 | 39d3af3e67a00b93d4325ecd6942b021bf4acbba | /deployTest1/wsgi.py | 0e2c9f328e4ad3751809c8a8c259b2991f57ade7 | []
| no_license | dmhburke/DeployTest1GIT | 51fcc1405eaf8420aed60ab42c2e21a4a1a36f59 | 002d06e9dca5bc9d032fd7953fcb1ea16fb8994e | refs/heads/master | 2020-05-04T03:03:59.969438 | 2019-04-02T17:07:58 | 2019-04-02T17:07:58 | 178,936,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | """
WSGI config for deployTest1 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'deployTest1.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
c3d04162029b2999ba7cd3a187103bbfa97e48c4 | e1525596c75925dd5f711bbf95fb90acff440339 | /tests/hwsim/test_p2p_channel.py | 316f6b6a3675b5f24ab49294064c9fb77d48a74e | [
"BSD-3-Clause"
]
| permissive | kimocoder/hostap-wpa3 | 1a414dfe21ed0395ea0f1c52f7e4ebc7b26561df | 2a0c65b3caa898d0720eb89a937c0c4ca5360548 | refs/heads/master | 2023-04-26T05:08:13.139108 | 2018-11-24T21:52:52 | 2018-11-24T21:52:52 | 180,796,283 | 0 | 0 | NOASSERTION | 2023-04-24T01:42:29 | 2019-04-11T13:18:19 | C | UTF-8 | Python | false | false | 57,169 | py | # P2P channel selection test cases
# Copyright (c) 2014, Jouni Malinen <[email protected]>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
from remotehost import remote_compatible
import logging
logger = logging.getLogger()
import os
import subprocess
import time
import hostapd
import hwsim_utils
from tshark import run_tshark
from wpasupplicant import WpaSupplicant
from hwsim import HWSimRadio
from p2p_utils import *
def set_country(country, dev=None):
subprocess.call(['iw', 'reg', 'set', country])
time.sleep(0.1)
if dev:
for i in range(10):
ev = dev.wait_global_event(["CTRL-EVENT-REGDOM-CHANGE"], timeout=15)
if ev is None:
raise Exception("No regdom change event seen")
if "type=COUNTRY alpha2=" + country in ev:
return
raise Exception("No matching regdom event seen for set_country(%s)" % country)
def test_p2p_channel_5ghz(dev):
"""P2P group formation with 5 GHz preference"""
try:
set_country("US", dev[0])
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz - did not follow 5 GHz preference" % freq)
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[1].flush_scan_cache()
def test_p2p_channel_5ghz_no_vht(dev):
"""P2P group formation with 5 GHz preference when VHT channels are disallowed"""
try:
set_country("US", dev[0])
dev[0].global_request("P2P_SET disallow_freq 5180-5240")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz - did not follow 5 GHz preference" % freq)
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[0].global_request("P2P_SET disallow_freq ")
dev[1].flush_scan_cache()
def test_p2p_channel_random_social(dev):
"""P2P group formation with 5 GHz preference but all 5 GHz channels disabled"""
try:
set_country("US", dev[0])
dev[0].global_request("SET p2p_oper_channel 11")
dev[0].global_request("P2P_SET disallow_freq 5000-6000,2462")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq not in [ 2412, 2437, 2462 ]:
raise Exception("Unexpected channel %d MHz - did not pick random social channel" % freq)
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[0].global_request("P2P_SET disallow_freq ")
dev[1].flush_scan_cache()
def test_p2p_channel_random(dev):
"""P2P group formation with 5 GHz preference but all 5 GHz channels and all social channels disabled"""
try:
set_country("US", dev[0])
dev[0].global_request("SET p2p_oper_channel 11")
dev[0].global_request("P2P_SET disallow_freq 5000-6000,2412,2437,2462")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq > 2500 or freq in [ 2412, 2437, 2462 ]:
raise Exception("Unexpected channel %d MHz" % freq)
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[0].global_request("P2P_SET disallow_freq ")
dev[1].flush_scan_cache()
def test_p2p_channel_random_social_with_op_class_change(dev, apdev, params):
"""P2P group formation using random social channel with oper class change needed"""
try:
set_country("US", dev[0])
logger.info("Start group on 5 GHz")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz - did not pick 5 GHz preference" % freq)
remove_group(dev[0], dev[1])
logger.info("Disable 5 GHz and try to re-start group based on 5 GHz preference")
dev[0].global_request("SET p2p_oper_reg_class 115")
dev[0].global_request("SET p2p_oper_channel 36")
dev[0].global_request("P2P_SET disallow_freq 5000-6000")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq not in [ 2412, 2437, 2462 ]:
raise Exception("Unexpected channel %d MHz - did not pick random social channel" % freq)
remove_group(dev[0], dev[1])
out = run_tshark(os.path.join(params['logdir'], "hwsim0.pcapng"),
"wifi_p2p.public_action.subtype == 0")
if out is not None:
last = None
for l in out.splitlines():
if "Operating Channel:" not in l:
continue
last = l
if last is None:
raise Exception("Could not find GO Negotiation Request")
if "Operating Class 81" not in last:
raise Exception("Unexpected operating class: " + last.strip())
finally:
set_country("00")
dev[0].global_request("P2P_SET disallow_freq ")
dev[0].global_request("SET p2p_oper_reg_class 0")
dev[0].global_request("SET p2p_oper_channel 0")
dev[1].flush_scan_cache()
def test_p2p_channel_avoid(dev):
"""P2P and avoid frequencies driver event"""
try:
set_country("US", dev[0])
if "OK" not in dev[0].request("DRIVER_EVENT AVOID_FREQUENCIES 5000-6000,2412,2437,2462"):
raise Exception("Could not simulate driver event")
ev = dev[0].wait_event(["CTRL-EVENT-AVOID-FREQ"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AVOID-FREQ event")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq > 2500 or freq in [ 2412, 2437, 2462 ]:
raise Exception("Unexpected channel %d MHz" % freq)
if "OK" not in dev[0].request("DRIVER_EVENT AVOID_FREQUENCIES"):
raise Exception("Could not simulate driver event(2)")
ev = dev[0].wait_event(["CTRL-EVENT-AVOID-FREQ"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AVOID-FREQ event")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"], timeout=1)
if ev is not None:
raise Exception("Unexpected + " + ev + " event")
if "OK" not in dev[0].request("DRIVER_EVENT AVOID_FREQUENCIES " + str(freq)):
raise Exception("Could not simulate driver event(3)")
ev = dev[0].wait_event(["CTRL-EVENT-AVOID-FREQ"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AVOID-FREQ event")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"],
timeout=10)
if ev is None:
raise Exception("No P2P-REMOVE-AND-REFORM-GROUP or AP-CSA-FINISHED event")
finally:
set_country("00")
dev[0].request("DRIVER_EVENT AVOID_FREQUENCIES")
dev[1].flush_scan_cache()
def test_p2p_channel_avoid2(dev):
"""P2P and avoid frequencies driver event on 5 GHz"""
try:
set_country("US", dev[0])
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False,
i_max_oper_chwidth=80,
i_ht40=True, i_vht=True)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz" % freq)
if "OK" not in dev[0].request("DRIVER_EVENT AVOID_FREQUENCIES " + str(freq)):
raise Exception("Could not simulate driver event(2)")
ev = dev[0].wait_event(["CTRL-EVENT-AVOID-FREQ"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AVOID-FREQ event")
ev = dev[0].wait_group_event(["CTRL-EVENT-CHANNEL-SWITCH"], timeout=10)
if ev is None:
raise Exception("No channel switch event seen")
if "ch_width=80 MHz" not in ev:
raise Exception("Could not move to a VHT80 channel")
ev = dev[0].wait_group_event(["AP-CSA-FINISHED"], timeout=1)
if ev is None:
raise Exception("No AP-CSA-FINISHED event seen")
finally:
set_country("00")
dev[0].request("DRIVER_EVENT AVOID_FREQUENCIES")
dev[1].flush_scan_cache()
def test_p2p_channel_avoid3(dev):
"""P2P and avoid frequencies driver event on 5 GHz"""
try:
set_country("CN", dev[0])
form(dev[0], dev[1])
set_country("CN", dev[0])
[i_res, r_res] = invite_from_go(dev[0], dev[1], terminate=False,
extra="ht40 vht")
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz" % freq)
if "OK" not in dev[0].request("DRIVER_EVENT AVOID_FREQUENCIES 5180-5320,5500-5640"):
raise Exception("Could not simulate driver event(2)")
ev = dev[0].wait_event(["CTRL-EVENT-AVOID-FREQ"], timeout=10)
if ev is None:
raise Exception("No CTRL-EVENT-AVOID-FREQ event")
ev = dev[0].wait_group_event(["CTRL-EVENT-CHANNEL-SWITCH"], timeout=10)
if ev is None:
raise Exception("No channel switch event seen")
if "ch_width=80 MHz" not in ev:
raise Exception("Could not move to a VHT80 channel")
ev = dev[0].wait_group_event(["AP-CSA-FINISHED"], timeout=1)
if ev is None:
raise Exception("No AP-CSA-FINISHED event seen")
finally:
set_country("00")
dev[0].request("DRIVER_EVENT AVOID_FREQUENCIES")
dev[1].flush_scan_cache()
@remote_compatible
def test_autogo_following_bss(dev, apdev):
"""P2P autonomous GO operate on the same channel as station interface"""
if dev[0].get_mcc() > 1:
logger.info("test mode: MCC")
dev[0].global_request("SET p2p_no_group_iface 0")
channels = { 3 : "2422", 5 : "2432", 9 : "2452" }
for key in channels:
hapd = hostapd.add_ap(apdev[0], { "ssid" : 'ap-test',
"channel" : str(key) })
dev[0].connect("ap-test", key_mgmt="NONE",
scan_freq=str(channels[key]))
res_go = autogo(dev[0])
if res_go['freq'] != channels[key]:
raise Exception("Group operation channel is not the same as on connected station interface")
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].remove_group(res_go['ifname'])
@remote_compatible
def test_go_neg_with_bss_connected(dev, apdev):
"""P2P channel selection: GO negotiation when station interface is connected"""
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
dev[0].global_request("SET p2p_no_group_iface 0")
hapd = hostapd.add_ap(apdev[0],
{ "ssid": 'bss-2.4ghz', "channel": '5' })
dev[0].connect("bss-2.4ghz", key_mgmt="NONE", scan_freq="2432")
#dev[0] as GO
[i_res, r_res] = go_neg_pbc(i_dev=dev[0], i_intent=10, r_dev=dev[1],
r_intent=1)
check_grpform_results(i_res, r_res)
if i_res['role'] != "GO":
raise Exception("GO not selected according to go_intent")
if i_res['freq'] != "2432":
raise Exception("Group formed on a different frequency than BSS")
hwsim_utils.test_connectivity(dev[0], hapd)
dev[0].remove_group(i_res['ifname'])
dev[1].wait_go_ending_session()
if dev[0].get_mcc() > 1:
logger.info("Skip as-client case due to MCC being enabled")
return
#dev[0] as client
[i_res2, r_res2] = go_neg_pbc(i_dev=dev[0], i_intent=1, r_dev=dev[1],
r_intent=10)
check_grpform_results(i_res2, r_res2)
if i_res2['role'] != "client":
raise Exception("GO not selected according to go_intent")
if i_res2['freq'] != "2432":
raise Exception("Group formed on a different frequency than BSS")
hwsim_utils.test_connectivity(dev[0], hapd)
dev[1].remove_group(r_res2['ifname'])
dev[0].wait_go_ending_session()
dev[0].request("DISCONNECT")
hapd.disable()
dev[0].flush_scan_cache()
dev[1].flush_scan_cache()
def test_autogo_with_bss_on_disallowed_chan(dev, apdev):
"""P2P channel selection: Autonomous GO with BSS on a disallowed channel"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
wpas.global_request("SET p2p_no_group_iface 0")
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
try:
hapd = hostapd.add_ap(apdev[0], { "ssid": 'bss-2.4ghz',
"channel": '1' })
wpas.global_request("P2P_SET disallow_freq 2412")
wpas.connect("bss-2.4ghz", key_mgmt="NONE", scan_freq="2412")
res = autogo(wpas)
if res['freq'] == "2412":
raise Exception("GO set on a disallowed channel")
hwsim_utils.test_connectivity(wpas, hapd)
finally:
wpas.global_request("P2P_SET disallow_freq ")
def test_go_neg_with_bss_on_disallowed_chan(dev, apdev):
"""P2P channel selection: GO negotiation with station interface on a disallowed channel"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
wpas.global_request("SET p2p_no_group_iface 0")
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
try:
hapd = hostapd.add_ap(apdev[0],
{ "ssid": 'bss-2.4ghz', "channel": '1' })
# make sure PBC overlap from old test cases is not maintained
dev[1].flush_scan_cache()
wpas.connect("bss-2.4ghz", key_mgmt="NONE", scan_freq="2412")
wpas.global_request("P2P_SET disallow_freq 2412")
#wpas as GO
[i_res, r_res] = go_neg_pbc(i_dev=wpas, i_intent=10, r_dev=dev[1],
r_intent=1)
check_grpform_results(i_res, r_res)
if i_res['role'] != "GO":
raise Exception("GO not selected according to go_intent")
if i_res['freq'] == "2412":
raise Exception("Group formed on a disallowed channel")
hwsim_utils.test_connectivity(wpas, hapd)
wpas.remove_group(i_res['ifname'])
dev[1].wait_go_ending_session()
dev[1].flush_scan_cache()
wpas.dump_monitor()
dev[1].dump_monitor()
#wpas as client
[i_res2, r_res2] = go_neg_pbc(i_dev=wpas, i_intent=1, r_dev=dev[1],
r_intent=10)
check_grpform_results(i_res2, r_res2)
if i_res2['role'] != "client":
raise Exception("GO not selected according to go_intent")
if i_res2['freq'] == "2412":
raise Exception("Group formed on a disallowed channel")
hwsim_utils.test_connectivity(wpas, hapd)
dev[1].remove_group(r_res2['ifname'])
wpas.wait_go_ending_session()
ev = dev[1].wait_global_event(["P2P-GROUP-REMOVED"], timeout=5)
if ev is None:
raise Exception("Group removal not indicated")
wpas.request("DISCONNECT")
hapd.disable()
finally:
wpas.global_request("P2P_SET disallow_freq ")
def test_autogo_force_diff_channel(dev, apdev):
"""P2P autonomous GO and station interface operate on different channels"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
wpas.global_request("SET p2p_no_group_iface 0")
hapd = hostapd.add_ap(apdev[0],
{"ssid" : 'ap-test', "channel" : '1'})
wpas.connect("ap-test", key_mgmt = "NONE", scan_freq = "2412")
wpas.dump_monitor()
channels = { 2 : 2417, 5 : 2432, 9 : 2452 }
for key in channels:
res_go = autogo(wpas, channels[key])
wpas.dump_monitor()
hwsim_utils.test_connectivity(wpas, hapd)
if int(res_go['freq']) == 2412:
raise Exception("Group operation channel is: 2412 excepted: " + res_go['freq'])
wpas.remove_group(res_go['ifname'])
wpas.dump_monitor()
def test_go_neg_forced_freq_diff_than_bss_freq(dev, apdev):
"""P2P channel selection: GO negotiation with forced freq different than station interface"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
# Clear possible PBC session overlap from previous test case
dev[1].flush_scan_cache()
wpas.global_request("SET p2p_no_group_iface 0")
hapd = hostapd.add_ap(apdev[0],
{ "country_code": 'US',
"ssid": 'bss-5ghz', "hw_mode": 'a',
"channel": '40' })
wpas.connect("bss-5ghz", key_mgmt="NONE", scan_freq="5200")
# GO and peer force the same freq, different than BSS freq,
# wpas to become GO
[i_res, r_res] = go_neg_pbc(i_dev=dev[1], i_intent=1, i_freq=5180,
r_dev=wpas, r_intent=14, r_freq=5180)
check_grpform_results(i_res, r_res)
if i_res['freq'] != "5180":
raise Exception("P2P group formed on unexpected frequency: " + i_res['freq'])
if r_res['role'] != "GO":
raise Exception("GO not selected according to go_intent")
hwsim_utils.test_connectivity(wpas, hapd)
wpas.remove_group(r_res['ifname'])
dev[1].wait_go_ending_session()
dev[1].flush_scan_cache()
# GO and peer force the same freq, different than BSS freq, wpas to
# become client
[i_res2, r_res2] = go_neg_pbc(i_dev=dev[1], i_intent=14, i_freq=2422,
r_dev=wpas, r_intent=1, r_freq=2422)
check_grpform_results(i_res2, r_res2)
if i_res2['freq'] != "2422":
raise Exception("P2P group formed on unexpected frequency: " + i_res2['freq'])
if r_res2['role'] != "client":
raise Exception("GO not selected according to go_intent")
hwsim_utils.test_connectivity(wpas, hapd)
wpas.request("DISCONNECT")
hapd.request("DISABLE")
subprocess.call(['iw', 'reg', 'set', '00'])
wpas.flush_scan_cache()
@remote_compatible
def test_go_pref_chan_bss_on_diff_chan(dev, apdev):
"""P2P channel selection: Station on different channel than GO configured pref channel"""
dev[0].global_request("SET p2p_no_group_iface 0")
try:
hapd = hostapd.add_ap(apdev[0], { "ssid": 'bss-2.4ghz',
"channel": '1' })
dev[0].global_request("SET p2p_pref_chan 81:2")
dev[0].connect("bss-2.4ghz", key_mgmt="NONE", scan_freq="2412")
res = autogo(dev[0])
if res['freq'] != "2412":
raise Exception("GO channel did not follow BSS")
hwsim_utils.test_connectivity(dev[0], hapd)
finally:
dev[0].global_request("SET p2p_pref_chan ")
def test_go_pref_chan_bss_on_disallowed_chan(dev, apdev):
"""P2P channel selection: Station interface on different channel than GO configured pref channel, and station channel is disallowed"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
wpas.global_request("SET p2p_no_group_iface 0")
try:
hapd = hostapd.add_ap(apdev[0], { "ssid": 'bss-2.4ghz',
"channel": '1' })
wpas.global_request("P2P_SET disallow_freq 2412")
wpas.global_request("SET p2p_pref_chan 81:2")
wpas.connect("bss-2.4ghz", key_mgmt="NONE", scan_freq="2412")
res2 = autogo(wpas)
if res2['freq'] != "2417":
raise Exception("GO channel did not follow pref_chan configuration")
hwsim_utils.test_connectivity(wpas, hapd)
finally:
wpas.global_request("P2P_SET disallow_freq ")
wpas.global_request("SET p2p_pref_chan ")
@remote_compatible
def test_no_go_freq(dev, apdev):
"""P2P channel selection: no GO freq"""
try:
dev[0].global_request("SET p2p_no_go_freq 2412")
# dev[0] as client, channel 1 is ok
[i_res, r_res] = go_neg_pbc(i_dev=dev[0], i_intent=1,
r_dev=dev[1], r_intent=14, r_freq=2412)
check_grpform_results(i_res, r_res)
if i_res['freq'] != "2412":
raise Exception("P2P group not formed on forced freq")
dev[1].remove_group(r_res['ifname'])
dev[0].wait_go_ending_session()
dev[0].flush_scan_cache()
fail = False
# dev[0] as GO, channel 1 is not allowed
try:
dev[0].global_request("SET p2p_no_go_freq 2412")
[i_res2, r_res2] = go_neg_pbc(i_dev=dev[0], i_intent=14,
r_dev=dev[1], r_intent=1, r_freq=2412)
check_grpform_results(i_res2, r_res2)
fail = True
except:
pass
if fail:
raise Exception("GO set on a disallowed freq")
finally:
dev[0].global_request("SET p2p_no_go_freq ")
@remote_compatible
def test_go_neg_peers_force_diff_freq(dev, apdev):
"""P2P channel selection when peers for different frequency"""
try:
[i_res2, r_res2] = go_neg_pbc(i_dev=dev[0], i_intent=14, i_freq=5180,
r_dev=dev[1], r_intent=0, r_freq=5200)
except Exception, e:
return
raise Exception("Unexpected group formation success")
@remote_compatible
def test_autogo_random_channel(dev, apdev):
"""P2P channel selection: GO instantiated on random channel 1, 6, 11"""
freqs = []
go_freqs = ["2412", "2437", "2462"]
for i in range(0, 20):
result = autogo(dev[0])
if result['freq'] not in go_freqs:
raise Exception("Unexpected frequency selected: " + result['freq'])
if result['freq'] not in freqs:
freqs.append(result['freq'])
if len(freqs) == 3:
break
dev[0].remove_group(result['ifname'])
if i == 20:
raise Exception("GO created 20 times and not all social channels were selected. freqs not selected: " + str(list(set(go_freqs) - set(freqs))))
@remote_compatible
def test_p2p_autogo_pref_chan_disallowed(dev, apdev):
"""P2P channel selection: GO preferred channels are disallowed"""
try:
dev[0].global_request("SET p2p_pref_chan 81:1,81:3,81:6,81:9,81:11")
dev[0].global_request("P2P_SET disallow_freq 2412,2422,2437,2452,2462")
for i in range(0, 5):
res = autogo(dev[0])
if res['freq'] in [ "2412", "2422", "2437", "2452", "2462" ]:
raise Exception("GO channel is disallowed")
dev[0].remove_group(res['ifname'])
finally:
dev[0].global_request("P2P_SET disallow_freq ")
dev[0].global_request("SET p2p_pref_chan ")
def test_p2p_autogo_pref_chan_not_in_regulatory(dev, apdev):
"""P2P channel selection: GO preferred channel not allowed in the regulatory rules"""
try:
set_country("US", dev[0])
dev[0].global_request("SET p2p_pref_chan 124:149")
res = autogo(dev[0], persistent=True)
if res['freq'] != "5745":
raise Exception("Unexpected channel selected: " + res['freq'])
dev[0].remove_group(res['ifname'])
netw = dev[0].list_networks(p2p=True)
if len(netw) != 1:
raise Exception("Unexpected number of network blocks: " + str(netw))
id = netw[0]['id']
set_country("SE", dev[0])
res = autogo(dev[0], persistent=id)
if res['freq'] == "5745":
raise Exception("Unexpected channel selected(2): " + res['freq'])
dev[0].remove_group(res['ifname'])
finally:
dev[0].global_request("SET p2p_pref_chan ")
set_country("00")
def run_autogo(dev, param):
if "OK" not in dev.global_request("P2P_GROUP_ADD " + param):
raise Exception("P2P_GROUP_ADD failed: " + param)
ev = dev.wait_global_event(["P2P-GROUP-STARTED"], timeout=10)
if ev is None:
raise Exception("GO start up timed out")
res = dev.group_form_result(ev)
dev.remove_group()
return res
def _test_autogo_ht_vht(dev):
res = run_autogo(dev[0], "ht40")
res = run_autogo(dev[0], "vht")
res = run_autogo(dev[0], "freq=2")
freq = int(res['freq'])
if freq < 2412 or freq > 2462:
raise Exception("Unexpected freq=2 channel: " + str(freq))
res = run_autogo(dev[0], "freq=5")
freq = int(res['freq'])
if freq < 5000 or freq >= 6000:
raise Exception("Unexpected freq=5 channel: " + str(freq))
res = run_autogo(dev[0], "freq=5 ht40 vht")
logger.info(str(res))
freq = int(res['freq'])
if freq < 5000 or freq >= 6000:
raise Exception("Unexpected freq=5 ht40 vht channel: " + str(freq))
def test_autogo_ht_vht(dev):
"""P2P autonomous GO with HT/VHT parameters"""
try:
set_country("US", dev[0])
_test_autogo_ht_vht(dev)
finally:
set_country("00")
def test_p2p_listen_chan_optimize(dev, apdev):
"""P2P listen channel optimization"""
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add("wlan5")
addr5 = wpas.p2p_dev_addr()
try:
if "OK" not in wpas.global_request("SET p2p_optimize_listen_chan 1"):
raise Exception("Failed to set p2p_optimize_listen_chan")
wpas.p2p_listen()
if not dev[0].discover_peer(addr5):
raise Exception("Could not discover peer")
peer = dev[0].get_peer(addr5)
lfreq = peer['listen_freq']
wpas.p2p_stop_find()
dev[0].p2p_stop_find()
channel = "1" if lfreq != '2412' else "6"
freq = "2412" if lfreq != '2412' else "2437"
params = { "ssid": "test-open", "channel": channel }
hapd = hostapd.add_ap(apdev[0], params)
id = wpas.connect("test-open", key_mgmt="NONE", scan_freq=freq)
wpas.p2p_listen()
if "OK" not in dev[0].global_request("P2P_FLUSH"):
raise Exception("P2P_FLUSH failed")
if not dev[0].discover_peer(addr5):
raise Exception("Could not discover peer")
peer = dev[0].get_peer(addr5)
lfreq2 = peer['listen_freq']
if lfreq == lfreq2:
raise Exception("Listen channel did not change")
if lfreq2 != freq:
raise Exception("Listen channel not on AP's operating channel")
wpas.p2p_stop_find()
dev[0].p2p_stop_find()
wpas.request("DISCONNECT")
wpas.wait_disconnected()
# for larger coverage, cover case of current channel matching
wpas.select_network(id)
wpas.wait_connected()
wpas.request("DISCONNECT")
wpas.wait_disconnected()
lchannel = "1" if channel != "1" else "6"
lfreq3 = "2412" if channel != "1" else "2437"
if "OK" not in wpas.global_request("P2P_SET listen_channel " + lchannel):
raise Exception("Failed to set listen channel")
wpas.select_network(id)
wpas.wait_connected()
wpas.p2p_listen()
if "OK" not in dev[0].global_request("P2P_FLUSH"):
raise Exception("P2P_FLUSH failed")
if not dev[0].discover_peer(addr5):
raise Exception("Could not discover peer")
peer = dev[0].get_peer(addr5)
lfreq4 = peer['listen_freq']
if lfreq4 != lfreq3:
raise Exception("Unexpected Listen channel after configuration")
wpas.p2p_stop_find()
dev[0].p2p_stop_find()
finally:
wpas.global_request("SET p2p_optimize_listen_chan 0")
def test_p2p_channel_5ghz_only(dev):
"""P2P GO start with only 5 GHz band allowed"""
try:
set_country("US", dev[0])
dev[0].global_request("P2P_SET disallow_freq 2400-2500")
res = autogo(dev[0])
freq = int(res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz" % freq)
dev[0].remove_group()
finally:
set_country("00")
dev[0].global_request("P2P_SET disallow_freq ")
def test_p2p_channel_5ghz_165_169_us(dev):
"""P2P GO and 5 GHz channels 165 (allowed) and 169 (disallowed) in US"""
try:
set_country("US", dev[0])
res = dev[0].p2p_start_go(freq=5825)
if res['freq'] != "5825":
raise Exception("Unexpected frequency: " + res['freq'])
dev[0].remove_group()
res = dev[0].global_request("P2P_GROUP_ADD freq=5845")
if "FAIL" not in res:
raise Exception("GO on channel 169 allowed unexpectedly")
finally:
set_country("00")
def wait_go_down_up(dev):
ev = dev.wait_group_event(["AP-DISABLED"], timeout=5)
if ev is None:
raise Exception("AP-DISABLED not seen after P2P-REMOVE-AND-REFORM-GROUP")
ev = dev.wait_group_event(["AP-ENABLED"], timeout=5)
if ev is None:
raise Exception("AP-ENABLED not seen after P2P-REMOVE-AND-REFORM-GROUP")
def test_p2p_go_move_reg_change(dev, apdev):
"""P2P GO move due to regulatory change"""
try:
set_country("US")
dev[0].global_request("P2P_SET disallow_freq 2400-5000")
res = autogo(dev[0])
freq1 = int(res['freq'])
if freq1 < 5000:
raise Exception("Unexpected channel %d MHz" % freq1)
dev[0].global_request("P2P_SET disallow_freq ")
# GO move is not allowed while waiting for initial client connection
connect_cli(dev[0], dev[1], freq=freq1)
dev[1].remove_group()
freq = dev[0].get_group_status_field('freq')
if int(freq) < 5000:
raise Exception("Unexpected freq after initial client: " + freq)
dev[0].dump_monitor()
set_country("00")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"],
timeout=10)
if ev is None:
raise Exception("P2P-REMOVE-AND-REFORM-GROUP or AP-CSA-FINISHED not seen")
if "P2P-REMOVE-AND-REFORM-GROUP" in ev:
wait_go_down_up(dev[0])
freq2 = dev[0].get_group_status_field('freq')
if freq1 == freq2:
raise Exception("Unexpected freq after group reform=" + freq2)
dev[0].remove_group()
finally:
dev[0].global_request("P2P_SET disallow_freq ")
set_country("00")
def test_p2p_go_move_active(dev, apdev):
"""P2P GO stays in freq although SCM is possible"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
ndev = [ wpas, dev[1] ]
_test_p2p_go_move_active(ndev, apdev)
def _test_p2p_go_move_active(dev, apdev):
dev[0].global_request("SET p2p_no_group_iface 0")
try:
dev[0].global_request("P2P_SET disallow_freq 2430-6000")
hapd = hostapd.add_ap(apdev[0], { "ssid" : 'ap-test',
"channel" : '11' })
dev[0].connect("ap-test", key_mgmt="NONE",
scan_freq="2462")
res = autogo(dev[0])
freq = int(res['freq'])
if freq > 2430:
raise Exception("Unexpected channel %d MHz" % freq)
# GO move is not allowed while waiting for initial client connection
connect_cli(dev[0], dev[1], freq=freq)
dev[1].remove_group()
freq = dev[0].get_group_status_field('freq')
if int(freq) > 2430:
raise Exception("Unexpected freq after initial client: " + freq)
dev[0].dump_monitor()
dev[0].global_request("P2P_SET disallow_freq ")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"],
timeout=10)
if ev is not None:
raise Exception("Unexpected P2P-REMOVE-AND-REFORM-GROUP or AP-CSA-FINISHED seen")
dev[0].remove_group()
finally:
dev[0].global_request("P2P_SET disallow_freq ")
def test_p2p_go_move_scm(dev, apdev):
"""P2P GO move due to SCM operation preference"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
ndev = [ wpas, dev[1] ]
_test_p2p_go_move_scm(ndev, apdev)
def _test_p2p_go_move_scm(dev, apdev):
dev[0].global_request("SET p2p_no_group_iface 0")
try:
dev[0].global_request("P2P_SET disallow_freq 2430-6000")
hapd = hostapd.add_ap(apdev[0], { "ssid" : 'ap-test',
"channel" : '11' })
dev[0].connect("ap-test", key_mgmt="NONE",
scan_freq="2462")
dev[0].global_request("SET p2p_go_freq_change_policy 0")
res = autogo(dev[0])
freq = int(res['freq'])
if freq > 2430:
raise Exception("Unexpected channel %d MHz" % freq)
# GO move is not allowed while waiting for initial client connection
connect_cli(dev[0], dev[1], freq=freq)
dev[1].remove_group()
freq = dev[0].get_group_status_field('freq')
if int(freq) > 2430:
raise Exception("Unexpected freq after initial client: " + freq)
dev[0].dump_monitor()
dev[0].global_request("P2P_SET disallow_freq ")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"], timeout=3)
if ev is None:
raise Exception("P2P-REMOVE-AND-REFORM-GROUP or AP-CSA-FINISHED not seen")
if "P2P-REMOVE-AND-REFORM-GROUP" in ev:
wait_go_down_up(dev[0])
freq = dev[0].get_group_status_field('freq')
if freq != '2462':
raise Exception("Unexpected freq after group reform=" + freq)
dev[0].remove_group()
finally:
dev[0].global_request("P2P_SET disallow_freq ")
dev[0].global_request("SET p2p_go_freq_change_policy 2")
def test_p2p_go_move_scm_peer_supports(dev, apdev):
"""P2P GO move due to SCM operation preference (peer supports)"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
ndev = [ wpas, dev[1] ]
_test_p2p_go_move_scm_peer_supports(ndev, apdev)
def _test_p2p_go_move_scm_peer_supports(dev, apdev):
try:
dev[0].global_request("SET p2p_go_freq_change_policy 1")
set_country("US", dev[0])
dev[0].global_request("SET p2p_no_group_iface 0")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz - did not follow 5 GHz preference" % freq)
hapd = hostapd.add_ap(apdev[0], { "ssid" : 'ap-test',
"channel" : '11' })
logger.info('Connecting client to to an AP on channel 11')
dev[0].connect("ap-test", key_mgmt="NONE",
scan_freq="2462")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"], timeout=3)
if ev is None:
raise Exception("P2P-REMOVE-AND-REFORM-GROUP or AP-CSA-FINISHED not seen")
if "P2P-REMOVE-AND-REFORM-GROUP" in ev:
wait_go_down_up(dev[0])
freq = dev[0].get_group_status_field('freq')
if freq != '2462':
raise Exception("Unexpected freq after group reform=" + freq)
dev[0].remove_group()
finally:
dev[0].global_request("SET p2p_go_freq_change_policy 2")
set_country("00")
def test_p2p_go_move_scm_peer_does_not_support(dev, apdev):
"""No P2P GO move due to SCM operation (peer does not supports)"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
ndev = [ wpas, dev[1] ]
_test_p2p_go_move_scm_peer_does_not_support(ndev, apdev)
def _test_p2p_go_move_scm_peer_does_not_support(dev, apdev):
try:
dev[0].global_request("SET p2p_go_freq_change_policy 1")
set_country("US", dev[0])
dev[0].global_request("SET p2p_no_group_iface 0")
if "OK" not in dev[1].request("DRIVER_EVENT AVOID_FREQUENCIES 2400-2500"):
raise Exception("Could not simulate driver event")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz - did not follow 5 GHz preference" % freq)
hapd = hostapd.add_ap(apdev[0], { "ssid" : 'ap-test',
"channel" : '11' })
logger.info('Connecting client to to an AP on channel 11')
dev[0].connect("ap-test", key_mgmt="NONE",
scan_freq="2462")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"],
timeout=10)
if ev is not None:
raise Exception("Unexpected P2P-REMOVE-AND-REFORM-GROUP or AP-CSA-FINISHED seen")
dev[0].remove_group()
finally:
dev[0].global_request("SET p2p_go_freq_change_policy 2")
dev[1].request("DRIVER_EVENT AVOID_FREQUENCIES")
set_country("00")
def test_p2p_go_move_scm_multi(dev, apdev):
"""P2P GO move due to SCM operation preference multiple times"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
ndev = [ wpas, dev[1] ]
_test_p2p_go_move_scm_multi(ndev, apdev)
def _test_p2p_go_move_scm_multi(dev, apdev):
dev[0].request("SET p2p_no_group_iface 0")
try:
dev[0].global_request("P2P_SET disallow_freq 2430-6000")
hapd = hostapd.add_ap(apdev[0], { "ssid" : 'ap-test-1',
"channel" : '11' })
dev[0].connect("ap-test-1", key_mgmt="NONE",
scan_freq="2462")
dev[0].global_request("SET p2p_go_freq_change_policy 0")
res = autogo(dev[0])
freq = int(res['freq'])
if freq > 2430:
raise Exception("Unexpected channel %d MHz" % freq)
# GO move is not allowed while waiting for initial client connection
connect_cli(dev[0], dev[1], freq=freq)
dev[1].remove_group()
freq = dev[0].get_group_status_field('freq')
if int(freq) > 2430:
raise Exception("Unexpected freq after initial client: " + freq)
dev[0].dump_monitor()
dev[0].global_request("P2P_SET disallow_freq ")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"], timeout=3)
if ev is None:
raise Exception("P2P-REMOVE-AND-REFORM-GROUP or AP-CSA-FINISHED not seen")
if "P2P-REMOVE-AND-REFORM-GROUP" in ev:
wait_go_down_up(dev[0])
freq = dev[0].get_group_status_field('freq')
if freq != '2462':
raise Exception("Unexpected freq after group reform=" + freq)
hapd = hostapd.add_ap(apdev[0], { "ssid" : 'ap-test-2',
"channel" : '6' })
dev[0].connect("ap-test-2", key_mgmt="NONE",
scan_freq="2437")
ev = dev[0].wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"], timeout=5)
if ev is None:
raise Exception("(2) P2P-REMOVE-AND-REFORM-GROUP or AP-CSA-FINISHED not seen")
if "P2P-REMOVE-AND-REFORM-GROUP" in ev:
wait_go_down_up(dev[0])
freq = dev[0].get_group_status_field('freq')
if freq != '2437':
raise Exception("(2) Unexpected freq after group reform=" + freq)
dev[0].remove_group()
finally:
dev[0].global_request("P2P_SET disallow_freq ")
dev[0].global_request("SET p2p_go_freq_change_policy 2")
def test_p2p_delay_go_csa(dev, apdev, params):
"""P2P GO CSA delayed when inviting a P2P Device to an active P2P Group"""
with HWSimRadio(n_channels=2) as (radio, iface):
wpas = WpaSupplicant(global_iface='/tmp/wpas-wlan5')
wpas.interface_add(iface)
wpas.global_request("SET p2p_no_group_iface 0")
if wpas.get_mcc() < 2:
raise Exception("New radio does not support MCC")
addr0 = wpas.p2p_dev_addr()
addr1 = dev[1].p2p_dev_addr()
try:
dev[1].p2p_listen()
if not wpas.discover_peer(addr1, social=True):
raise Exception("Peer " + addr1 + " not found")
wpas.p2p_stop_find()
hapd = hostapd.add_ap(apdev[0], { "ssid": 'bss-2.4ghz',
"channel": '1' })
wpas.connect("bss-2.4ghz", key_mgmt="NONE", scan_freq="2412")
wpas.global_request("SET p2p_go_freq_change_policy 0")
wpas.dump_monitor()
logger.info("Start GO on channel 6")
res = autogo(wpas, freq=2437)
if res['freq'] != "2437":
raise Exception("GO set on a freq=%s instead of 2437" % res['freq'])
# Start find on dev[1] to run scans with dev[2] in parallel
dev[1].p2p_find(social=True)
# Use another client device to stop the initial client connection
# timeout on the GO
if not dev[2].discover_peer(addr0, social=True):
raise Exception("Peer2 did not find the GO")
dev[2].p2p_stop_find()
pin = dev[2].wps_read_pin()
wpas.p2p_go_authorize_client(pin)
dev[2].global_request("P2P_CONNECT " + addr0 + " " + pin + " join freq=2437")
ev = dev[2].wait_global_event(["P2P-GROUP-STARTED"], timeout=10)
if ev is None:
raise Exception("Peer2 did not get connected")
if not dev[1].discover_peer(addr0, social=True):
raise Exception("Peer did not find the GO")
pin = dev[1].wps_read_pin()
dev[1].global_request("P2P_CONNECT " + addr0 + " " + pin + " join auth")
dev[1].p2p_listen()
# Force P2P GO channel switch on successful invitation signaling
wpas.group_request("SET p2p_go_csa_on_inv 1")
logger.info("Starting invitation")
wpas.p2p_go_authorize_client(pin)
wpas.global_request("P2P_INVITE group=" + wpas.group_ifname + " peer=" + addr1)
ev = dev[1].wait_global_event(["P2P-INVITATION-RECEIVED",
"P2P-GROUP-STARTED"], timeout=10)
if ev is None:
raise Exception("Timeout on invitation on peer")
if "P2P-INVITATION-RECEIVED" in ev:
raise Exception("Unexpected request to accept pre-authorized invitation")
# A P2P GO move is not expected at this stage, as during the
# invitation signaling, the P2P GO includes only its current
# operating channel in the channel list, and as the invitation
# response can only include channels that were also in the
# invitation request channel list, the group common channels
# includes only the current P2P GO operating channel.
ev = wpas.wait_group_event(["P2P-REMOVE-AND-REFORM-GROUP",
"AP-CSA-FINISHED"], timeout=1)
if ev is not None:
raise Exception("Unexpected + " + ev + " event")
finally:
wpas.global_request("SET p2p_go_freq_change_policy 2")
def test_p2p_channel_vht80(dev):
"""P2P group formation with VHT 80 MHz"""
try:
set_country("FI", dev[0])
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
i_freq=5180,
i_max_oper_chwidth=80,
i_ht40=True, i_vht=True,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz - did not follow 5 GHz preference" % freq)
sig = dev[1].group_request("SIGNAL_POLL").splitlines()
if "FREQUENCY=5180" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(1): " + str(sig))
if "WIDTH=80 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2): " + str(sig))
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[1].flush_scan_cache()
def test_p2p_channel_vht80p80(dev):
"""P2P group formation and VHT 80+80 MHz channel"""
try:
set_country("US", dev[0])
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
i_freq=5180,
i_freq2=5775,
i_max_oper_chwidth=160,
i_ht40=True, i_vht=True,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq < 5000:
raise Exception("Unexpected channel %d MHz - did not follow 5 GHz preference" % freq)
sig = dev[1].group_request("SIGNAL_POLL").splitlines()
if "FREQUENCY=5180" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(1): " + str(sig))
if "WIDTH=80+80 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2): " + str(sig))
if "CENTER_FRQ1=5210" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(3): " + str(sig))
if "CENTER_FRQ2=5775" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(4): " + str(sig))
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[1].flush_scan_cache()
def test_p2p_channel_vht80p80_autogo(dev):
"""P2P autonomous GO and VHT 80+80 MHz channel"""
addr0 = dev[0].p2p_dev_addr()
try:
set_country("US", dev[0])
if "OK" not in dev[0].global_request("P2P_GROUP_ADD vht freq=5180 freq2=5775"):
raise Exception("Could not start GO")
ev = dev[0].wait_global_event(["P2P-GROUP-STARTED"], timeout=5)
if ev is None:
raise Exception("GO start up timed out")
dev[0].group_form_result(ev)
pin = dev[1].wps_read_pin()
dev[0].p2p_go_authorize_client(pin)
dev[1].global_request("P2P_CONNECT " + addr0 + " " + pin + " join freq=5180")
ev = dev[1].wait_global_event(["P2P-GROUP-STARTED"], timeout=10)
if ev is None:
raise Exception("Peer did not get connected")
dev[1].group_form_result(ev)
sig = dev[1].group_request("SIGNAL_POLL").splitlines()
if "FREQUENCY=5180" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(1): " + str(sig))
if "WIDTH=80+80 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2): " + str(sig))
if "CENTER_FRQ1=5210" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(3): " + str(sig))
if "CENTER_FRQ2=5775" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(4): " + str(sig))
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[1].flush_scan_cache()
def test_p2p_channel_vht80_autogo(dev):
"""P2P autonomous GO and VHT 80 MHz channel"""
addr0 = dev[0].p2p_dev_addr()
try:
set_country("US", dev[0])
if "OK" not in dev[0].global_request("P2P_GROUP_ADD vht freq=5180 max_oper_chwidth=80"):
raise Exception("Could not start GO")
ev = dev[0].wait_global_event(["P2P-GROUP-STARTED"], timeout=5)
if ev is None:
raise Exception("GO start up timed out")
dev[0].group_form_result(ev)
pin = dev[1].wps_read_pin()
dev[0].p2p_go_authorize_client(pin)
dev[1].global_request("P2P_CONNECT " + addr0 + " " + pin + " join freq=5180")
ev = dev[1].wait_global_event(["P2P-GROUP-STARTED"], timeout=10)
if ev is None:
raise Exception("Peer did not get connected")
dev[1].group_form_result(ev)
sig = dev[1].group_request("SIGNAL_POLL").splitlines()
if "FREQUENCY=5180" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(1): " + str(sig))
if "WIDTH=80 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2): " + str(sig))
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[1].flush_scan_cache()
def test_p2p_channel_vht80p80_persistent(dev):
"""P2P persistent group re-invocation and VHT 80+80 MHz channel"""
addr0 = dev[0].p2p_dev_addr()
form(dev[0], dev[1])
try:
set_country("US", dev[0])
invite(dev[0], dev[1], extra="vht freq=5745 freq2=5210")
[go_res, cli_res] = check_result(dev[0], dev[1])
sig = dev[1].group_request("SIGNAL_POLL").splitlines()
if "FREQUENCY=5745" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(1): " + str(sig))
if "WIDTH=80+80 MHz" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(2): " + str(sig))
if "CENTER_FRQ1=5775" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(3): " + str(sig))
if "CENTER_FRQ2=5210" not in sig:
raise Exception("Unexpected SIGNAL_POLL value(4): " + str(sig))
remove_group(dev[0], dev[1])
finally:
set_country("00")
dev[1].flush_scan_cache()
def test_p2p_channel_drv_pref_go_neg(dev):
"""P2P GO Negotiation with GO device channel preference"""
dev[0].global_request("SET get_pref_freq_list_override 3:2417 4:2422")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq != 2417:
raise Exception("Unexpected channel selected: %d" % freq)
remove_group(dev[0], dev[1])
def test_p2p_channel_drv_pref_go_neg2(dev):
"""P2P GO Negotiation with P2P client device channel preference"""
dev[0].global_request("SET get_pref_freq_list_override 3:2417,2422")
dev[1].global_request("SET get_pref_freq_list_override 4:2422")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq != 2422:
raise Exception("Unexpected channel selected: %d" % freq)
remove_group(dev[0], dev[1])
def test_p2p_channel_drv_pref_go_neg3(dev):
"""P2P GO Negotiation with GO device channel preference"""
dev[1].global_request("SET get_pref_freq_list_override 3:2417,2427 4:2422")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=0,
r_dev=dev[1], r_intent=15,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq != 2417:
raise Exception("Unexpected channel selected: %d" % freq)
remove_group(dev[0], dev[1])
def test_p2p_channel_drv_pref_go_neg4(dev):
"""P2P GO Negotiation with P2P client device channel preference"""
dev[0].global_request("SET get_pref_freq_list_override 3:2417,2422,5180")
dev[1].global_request("P2P_SET override_pref_op_chan 115:36")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq != 2417:
raise Exception("Unexpected channel selected: %d" % freq)
remove_group(dev[0], dev[1])
def test_p2p_channel_drv_pref_go_neg5(dev):
"""P2P GO Negotiation with P2P client device channel preference"""
dev[0].global_request("SET get_pref_freq_list_override 3:2417")
dev[1].global_request("SET get_pref_freq_list_override 4:2422")
dev[1].global_request("P2P_SET override_pref_op_chan 115:36")
[i_res, r_res] = go_neg_pin_authorized(i_dev=dev[0], i_intent=15,
r_dev=dev[1], r_intent=0,
test_data=False)
check_grpform_results(i_res, r_res)
freq = int(i_res['freq'])
if freq != 2417:
raise Exception("Unexpected channel selected: %d" % freq)
remove_group(dev[0], dev[1])
def test_p2p_channel_drv_pref_autogo(dev):
"""P2P autonomous GO with driver channel preference"""
dev[0].global_request("SET get_pref_freq_list_override 3:2417,2422,5180")
res_go = autogo(dev[0])
if res_go['freq'] != "2417":
raise Exception("Unexpected operating frequency: " + res_go['freq'])
| [
"[email protected]"
]
| |
260b0f3bafcd890c3109cd70051c8239142693c3 | 27d670c976ce0771087008c74e8960e25c4b2b01 | /src/main/resources/script_templates/Hadim_Scripts/Preprocessing/Preprocess_TIRF_Images.py | bd2a01cd42ed7c27cae7045060318000cfffdb88 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"MIT"
]
| permissive | hadim/Hadim_Scripts | 2f068ead5df4a299e2ff871ebff4ddc6fdb4a39f | dc44a12e294402ac3c76062197f391dad1ebcaaf | refs/heads/master | 2022-12-31T08:47:49.386465 | 2020-10-07T15:51:29 | 2020-10-07T15:51:29 | 29,701,480 | 2 | 3 | BSD-3-Clause | 2019-05-17T03:06:59 | 2015-01-22T21:35:15 | Python | UTF-8 | Python | false | false | 1,592 | py | # @Float(label="Sigma 1", value=6) sigma1
# @Float(label="Sigma 2", value=2) sigma2
# @Float(label="Gaussian Filter Size", value=50) gaussian_filter_size
# @Boolean(label="Iterate XY plane (reduce memory usage)", value=False) iterate_plane
# @Boolean(label="Save Preprocessed Image", value=False) save_image
# @Dataset data
# @CommandService cs
# @ModuleService ms
# @PluginService ps
from sc.fiji.plugin.hadimscripts import DOGFilterCommand
from sc.fiji.plugin.hadimscripts import PseudoFlatFieldCorrectionCommand
def runCommand(inputs, command, showOutputs=False):
from org.scijava.module.process import PreprocessorPlugin
from org.scijava.module.process import PostprocessorPlugin
command = cs.getCommand(command)
pre = ps.createInstancesOfType(PreprocessorPlugin)
post = ps.createInstancesOfType(PostprocessorPlugin)
if showOutputs:
module = ms.waitFor(ms.run(command, pre, post, inputs))
else:
module = ms.waitFor(ms.run(command, pre, None, inputs))
return module
inputs = {"input": data,
"sigma1": sigma1,
"sigma2": sigma2,
"normalizeIntensity": True,
"saveImage": False,
"suffix": ""}
module = runCommand(inputs, DOGFilterCommand, showOutputs=False)
filtered_dataset = module.getOutput("output")
inputs = {"input": filtered_dataset,
"gaussianFilterSize": gaussian_filter_size,
"normalizeIntensity": True,
"iteratePlane": iterate_plane,
"saveImage": save_image,
"suffix": "-Preprocessed"}
module = runCommand(inputs, PseudoFlatFieldCorrectionCommand, showOutputs=True)
| [
"[email protected]"
]
| |
53603194cbdad317cab36c3658300ab37fce1e8f | 658e2e3cb8a4d5343a125f7deed19c9ebf06fa68 | /course_DE/data-engineering-nanodegree-master/4-data-pipelines-with-airflow/L2_exercises/exercise3.py | f0b82c490da3c700ec1b44526eff4e1861fa0d42 | []
| no_license | yennanliu/analysis | 3f0018809cdc2403f4fbfe4b245df1ad73fa08a5 | 643ad3fed41961cddd006fadceb0e927f1db1f23 | refs/heads/master | 2021-01-23T21:48:58.572269 | 2020-10-13T22:47:12 | 2020-10-13T22:47:12 | 57,648,676 | 11 | 9 | null | null | null | null | UTF-8 | Python | false | false | 2,315 | py | #Instructions
#1 - Modify the bikeshare DAG to load data month by month, instead of loading it all at once, every time.
#2 - Use time partitioning to parallelize the execution of the DAG.
import datetime
import logging
from airflow import DAG
from airflow.contrib.hooks.aws_hook import AwsHook
from airflow.hooks.postgres_hook import PostgresHook
from airflow.operators.postgres_operator import PostgresOperator
from airflow.operators.python_operator import PythonOperator
import sql_statements
def load_trip_data_to_redshift(*args, **kwargs):
aws_hook = AwsHook("aws_credentials")
credentials = aws_hook.get_credentials()
redshift_hook = PostgresHook("redshift")
# # #
execution_date = datetime.datetime.strptime(kwargs["ds"], '%Y-%m-%d')
# # #
sql_stmt = sql_statements.COPY_MONTHLY_TRIPS_SQL.format(
credentials.access_key,
credentials.secret_key,
year=execution_date.year,
month=execution_date.month
)
redshift_hook.run(sql_stmt)
def load_station_data_to_redshift(*args, **kwargs):
aws_hook = AwsHook("aws_credentials")
credentials = aws_hook.get_credentials()
redshift_hook = PostgresHook("redshift")
sql_stmt = sql_statements.COPY_STATIONS_SQL.format(
credentials.access_key,
credentials.secret_key,
)
redshift_hook.run(sql_stmt)
dag = DAG(
'lesson2.exercise3',
start_date=datetime.datetime(2018, 1, 1, 0, 0, 0, 0),
end_date=datetime.datetime(2019, 1, 1, 0, 0, 0, 0),
schedule_interval='@monthly',
max_active_runs=1
)
create_trips_table = PostgresOperator(
task_id="create_trips_table",
dag=dag,
postgres_conn_id="redshift",
sql=sql_statements.CREATE_TRIPS_TABLE_SQL
)
copy_trips_task = PythonOperator(
task_id='load_trips_from_s3_to_redshift',
dag=dag,
python_callable=load_trip_data_to_redshift,
provide_context=True
)
create_stations_table = PostgresOperator(
task_id="create_stations_table",
dag=dag,
postgres_conn_id="redshift",
sql=sql_statements.CREATE_STATIONS_TABLE_SQL,
)
copy_stations_task = PythonOperator(
task_id='load_stations_from_s3_to_redshift',
dag=dag,
python_callable=load_station_data_to_redshift,
)
create_trips_table >> copy_trips_task
create_stations_table >> copy_stations_task
| [
"[email protected]"
]
| |
76a9353851b054c98e8378600f4307a284e242c9 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02662/s484498630.py | e29ce3a687f8624090192e32bfac0f5f683a21d9 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 417 | py | import numpy as np
n,s = map(int,input().split())
a = list(map(int,input().split()))
dp = np.zeros(s+1, dtype=np.int64)
"""
dp[i][j] := i็ช็ฎใพใงใ้ธใใงใๅใใกใใใฉjใฎใจใใฎๅ ดๅใฎๆฐ
้ท็งป โ 1ๅขใใใจๅ
จไฝ้ๅใซ้ธใถ, Aใซใฏ้ธใฐใชใ, Aใซ้ธใถใฎ3็จฎ้ก.
"""
mod = 998244353
dp[0] = 1
for i in range(n):
p = (dp * 2) % mod
p[a[i]:] += dp[:-a[i]]
dp = p % mod
print(dp[s]) | [
"[email protected]"
]
| |
37533e7e38a05044b0bdae8d1f37b6698f44c722 | b53816386ea9ec52e7fce862a06fd065928c0083 | /EpExtractor.py | aa1b30688f6534562cb53a1d826dfc6f5830a2be | []
| no_license | grburgess/mnEpFit | 3e3bcbfca1e244029f04144e67e3d86a65853219 | c7008f3a71ebd9dade7e4746507024868ede7d3a | refs/heads/master | 2021-05-30T01:21:23.896379 | 2015-05-20T07:50:46 | 2015-05-20T07:50:46 | 35,933,877 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 742 | py | import json
import matplotlib.pyplot as plt
class EpExtractor(object):
def __init__(self,file,ext_name=""):
self.ext_name = ext_name
self._ReadFile(file)
self._PrepareData()
print self._tbins[0]
plt.loglog(self._tbins,self._Ep)
self._WriteJSON()
def _ReadFile(self,file):
'''
Virtual function
'''
pass
def _WriteJSON(self):
outdata = {"Ep":self._Ep,\
"EpErr":self._EpErr,\
"tbins":self._tbins}
f = open("%sep_save_file.json" % self.ext_name,'w')
json.dump(outdata,f) # Write to a JSON file
f.close()
| [
"[email protected]"
]
| |
58ddad151756db23c9a9cf8a848d1e2796b0cd70 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_elderly.py | e148c0716dce29ed715268660229fed3e87d894e | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 316 | py |
#calss header
class _ELDERLY():
def __init__(self,):
self.name = "ELDERLY"
self.definitions = [u'old people considered as a group: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
f9a2749282d1c35368f740f61adfa1e06a07cfa0 | 131caeecc070839555b95382fe9c6ea77a618dce | /.history/Classiles/light_switch_20210614212521.py | c23dce879e163a83c076bdb9319ed48fee6b9b64 | [
"Unlicense"
]
| permissive | minefarmer/Coding101-OOP | f128e34c95f5362b3d9a53bbac3d862c3f256263 | d5655977559e3bd1acf6a4f185a6121cc3b05ce4 | refs/heads/main | 2023-05-22T18:42:37.769345 | 2021-06-18T00:28:06 | 2021-06-18T00:28:06 | 376,620,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 291 | py | """[Practice: Light Switch]
Variable name class name
("instance")
ice = Ice)
"""
class Light:
def toggle(self):
print
self.on = not self.on
def __init__(self):
self.on = False
def is_on(sel):
return self.on
| [
"[email protected]"
]
| |
c6486839fa928e444c6ac41aad0403ed310a77f4 | 67d9310435d8f99fc1c76f1374da97e4af1137a6 | /myProject/settings.py | 3587bb0cc665c5955134e27fe93c8dfe4c86bb87 | []
| no_license | coralisland-git/Account-app | a99c400b9ceb469626843e5b6441700f72e2c5ef | dbac5f677860fdbc03ebf4878c030542746b214f | refs/heads/master | 2021-06-18T13:09:07.360079 | 2017-06-23T15:18:35 | 2017-06-23T15:18:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,946 | py | """
Django settings for myProject project.
Generated by 'django-admin startproject' using Django 1.8.7.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'a6-gzpe_89pwr@0)s2bv_%92*p&*sm11s6(nui7+ngpqn!1o5n'
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'mydemo',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'myProject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'myProject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static")
]
ADMIN_NAME = "rubyadmin"
ADMIN_PASSWORD = "rubyadmin"
adminName = "ruby"
adminpassword = "rubyhome1127password" | [
"[email protected]"
]
| |
ba4c496e73c24377a0a74528dc5b31c715cbf133 | 10717fe6f68c4ee9bcf27ee62e89581f4a030b8e | /extractor/douyutv.py | 1560c79cb11ad6615149dcf68205d73daa098fda | []
| no_license | HagerHosny199/Testing_Project | ff7f9a54b7a213c9d9ade0c5192845c2a29adc8b | 9bc170263e239cc24ccfb2aa33b9913ff799ffe9 | refs/heads/master | 2020-05-17T20:57:01.750640 | 2019-05-08T22:13:06 | 2019-05-08T22:13:06 | 183,954,736 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,876 | py | # coding: utf-8
from __future__ import unicode_literals
import time
import hashlib
import re
from .common import InfoExtractor
from utils import (
ExtractorError,
unescapeHTML,
unified_strdate,
urljoin,
)
class DouyuTVIE(InfoExtractor):
IE_DESC = 'ๆ้ฑผ'
_VALID_URL = r'https?://(?:www\.)?douyu(?:tv)?\.com/(?:[^/]+/)*(?P<id>[A-Za-z0-9]+)'
_TESTS = [{
'url': 'http://www.douyutv.com/iseven',
'info_dict': {
'id': '17732',
'display_id': 'iseven',
'ext': 'flv',
'title': 're:^ๆธ
ๆจ้่๏ผๆ นๆฌๅไธไธๆฅ๏ผ [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': r're:.*m7show@163\.com.*',
'thumbnail': r're:^https?://.*\.jpg$',
'uploader': '7ๅธๅ
',
'is_live': True,
},
'params': {
'skip_download': True,
},
}, {
'url': 'http://www.douyutv.com/85982',
'info_dict': {
'id': '85982',
'display_id': '85982',
'ext': 'flv',
'title': 're:^ๅฐๆผ ไป้ถๅๆ่ฎฐ๏ผโโCSOL2่บฒ็ซ็ซ [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': 'md5:746a2f7a253966a06755a912f0acc0d2',
'thumbnail': r're:^https?://.*\.jpg$',
'uploader': 'douyuๅฐๆผ ',
'is_live': True,
},
'params': {
'skip_download': True,
},
'skip': 'Room not found',
}, {
'url': 'http://www.douyutv.com/17732',
'info_dict': {
'id': '17732',
'display_id': '17732',
'ext': 'flv',
'title': 're:^ๆธ
ๆจ้่๏ผๆ นๆฌๅไธไธๆฅ๏ผ [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': r're:.*m7show@163\.com.*',
'thumbnail': r're:^https?://.*\.jpg$',
'uploader': '7ๅธๅ
',
'is_live': True,
},
'params': {
'skip_download': True,
},
}, {
'url': 'http://www.douyu.com/xiaocang',
'only_matching': True,
}, {
# \"room_id\"
'url': 'http://www.douyu.com/t/lpl',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
if video_id.isdigit():
room_id = video_id
else:
page = self._download_webpage(url, video_id)
room_id = self._html_search_regex(
r'"room_id\\?"\s*:\s*(\d+),', page, 'room id')
# Grab metadata from mobile API
room = self._download_json(
'http://m.douyu.com/html5/live?roomId=%s' % room_id, video_id,
note='Downloading room info')['data']
# 1 = live, 2 = offline
if room.get('show_status') == '2':
raise ExtractorError('Live stream is offline', expected=True)
# Grab the URL from PC client API
# The m3u8 url from mobile API requires re-authentication every 5 minutes
tt = int(time.time())
signContent = 'lapi/live/thirdPart/getPlay/%s?aid=pcclient&rate=0&time=%d9TUk5fjjUjg9qIMH3sdnh' % (room_id, tt)
sign = hashlib.md5(signContent.encode('ascii')).hexdigest()
video_url = self._download_json(
'http://coapi.douyucdn.cn/lapi/live/thirdPart/getPlay/' + room_id,
video_id, note='Downloading video URL info',
query={'rate': 0}, headers={
'auth': sign,
'time': str(tt),
'aid': 'pcclient'
})['data']['live_url']
title = self._live_title(unescapeHTML(room['room_name']))
description = room.get('show_details')
thumbnail = room.get('room_src')
uploader = room.get('nickname')
return {
'id': room_id,
'display_id': video_id,
'url': video_url,
'title': title,
'description': description,
'thumbnail': thumbnail,
'uploader': uploader,
'is_live': True,
}
class DouyuShowIE(InfoExtractor):
_VALID_URL = r'https?://v(?:mobile)?\.douyu\.com/show/(?P<id>[0-9a-zA-Z]+)'
_TESTS = [{
'url': 'https://v.douyu.com/show/rjNBdvnVXNzvE2yw',
'md5': '0c2cfd068ee2afe657801269b2d86214',
'info_dict': {
'id': 'rjNBdvnVXNzvE2yw',
'ext': 'mp4',
'title': '้ไธๅๅฟ๏ผ็ ้ ๆๆไธชๅฎคๅ็ณปๅ๏ผ04-01 22็นๅบ',
'duration': 7150.08,
'thumbnail': r're:^https?://.*\.jpg$',
'uploader': '้ไธๅๅฟ',
'uploader_id': 'XrZwYelr5wbK',
'uploader_url': 'https://v.douyu.com/author/XrZwYelr5wbK',
'upload_date': '20170402',
},
}, {
'url': 'https://vmobile.douyu.com/show/rjNBdvnVXNzvE2yw',
'only_matching': True,
}]
def _real_extract(self, url):
url = url.replace('vmobile.', 'v.')
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
room_info = self._parse_json(self._search_regex(
r'var\s+\$ROOM\s*=\s*({.+});', webpage, 'room info'), video_id)
video_info = None
for trial in range(5):
# Sometimes Douyu rejects our request. Let's try it more times
try:
video_info = self._download_json(
'https://vmobile.douyu.com/video/getInfo', video_id,
query={'vid': video_id},
headers={
'Referer': url,
'x-requested-with': 'XMLHttpRequest',
})
break
except ExtractorError:
self._sleep(1, video_id)
if not video_info:
raise ExtractorError('Can\'t fetch video info')
formats = self._extract_m3u8_formats(
video_info['data']['video_url'], video_id,
entry_protocol='m3u8_native', ext='mp4')
upload_date = unified_strdate(self._html_search_regex(
r'<em>ไธไผ ๆถ้ด๏ผ</em><span>([^<]+)</span>', webpage,
'upload date', fatal=False))
uploader = uploader_id = uploader_url = None
mobj = re.search(
r'(?m)<a[^>]+href="/author/([0-9a-zA-Z]+)".+?<strong[^>]+title="([^"]+)"',
webpage)
if mobj:
uploader_id, uploader = mobj.groups()
uploader_url = urljoin(url, '/author/' + uploader_id)
return {
'id': video_id,
'title': room_info['name'],
'formats': formats,
'duration': room_info.get('duration'),
'thumbnail': room_info.get('pic'),
'upload_date': upload_date,
'uploader': uploader,
'uploader_id': uploader_id,
'uploader_url': uploader_url,
}
| [
"[email protected]"
]
| |
a52be94d2d29ec70efb3833b2e39a16938664fb2 | 1beb0d3a73a97c5367cc54d37b34a7536b975d68 | /object/LocateDealOrderObject.py | 3ce43eb2ac775bea904fa2525cfb92d284d3714e | []
| no_license | Hardworking-tester/HuaYing | a24aa271afe81c95241818586b1d1d5abd6b4282 | 4dd065806f20bfdec885fa2b40f2c22e5a8d4f15 | refs/heads/master | 2021-06-03T10:06:33.604494 | 2017-06-22T09:32:13 | 2017-06-22T09:32:13 | 42,507,030 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,034 | py | #encoding:utf-8
from Data import get_number_by_data,ReadExcel
from selenium import webdriver
from object import OperateDealOrderObject
from selenium.webdriver.common.by import By
from PublicMethod.LocateElementCommonClass import CommonClass
class LocateDealOrderObject():
def getElementList(self,br,testcaseId):
testcase_id=testcaseId
excel=ReadExcel.ReadExcel()
excel_path="F:\\pytest\\xebest-autotest\\Data\\deal_order.xls"
excel_sheet=excel.getTableBySheetName(excel_path,"objname_locatemethod_locatedata")
rows=excel_sheet.nrows
element_list=[]
for i in range(1,rows):
element_list.append(excel_sheet.cell_value(i,0))
for element in element_list:
#่ฐ็จๅคๆญๅ
็ด ๆฏๅฆๆไฝ็ๆนๆณ
if self.judgeElementIsOperate(element,testcase_id):
#่ฐ็จๅฎไฝๅ
็ด ็ๆนๆณ
self.getLocateMethodAndData(br,element)
else:
print ("ๅ
็ด ๏ผ%sไธ้่ฆๆไฝ" %element)
def judgeElementIsOperate(self,element,testcase_id):
excel=ReadExcel.ReadExcel()
excel_path="F:\\pytest\\xebest-autotest\\Data\\deal_order.xls"
excel_sheet=excel.getTableBySheetName(excel_path,"isoperateElement")
#ๅพๅฐๅ
็ด ๆๅจ็่กๅท
row_index=get_number_by_data.GetRowAndColNumber().getRowAndColNumber(excel_path,"isoperateElement",element)[0]
#ๅพๅฐๆต่ฏ็จไพๆๅจ็ๅๅท
col_index=get_number_by_data.GetRowAndColNumber().getRowAndColNumber(excel_path,"isoperateElement",testcase_id)[1]
operateFlag=excel_sheet.cell_value(row_index,col_index)
if operateFlag=="Y":
return True
else:
return False
def getLocateMethodAndData(self,br,element):
element=element
excel=ReadExcel.ReadExcel()
object_excel_path="F:\\pytest\\xebest-autotest\\Data\\deal_order.xls"
object_sheet=excel.getTableBySheetName(object_excel_path,"objname_locatemethod_locatedata")
#ๅพๅฐๅ
็ด ๆๅจ่ก
row_index=get_number_by_data.GetRowAndColNumber().getRowAndColNumber(object_excel_path,"objname_locatemethod_locatedata",element)[0]
#ๅพๅฐๅฎไฝๆนๅผๆๅจๅ
locateMethod_Key=u"ๅฎไฝๆนๅผ็ฎ่ฟฐ"
locateMethod_colNumber=get_number_by_data.GetRowAndColNumber().getRowAndColNumber(object_excel_path,"objname_locatemethod_locatedata",locateMethod_Key)[1]
#ๅพๅฐๅฎไฝๆ้ๆฐๆฎๆๅจๅ
locateData_key=u"ๅฎไฝๆ้ๆฐๆฎ"
locateData_colNumber=get_number_by_data.GetRowAndColNumber().getRowAndColNumber(object_excel_path,"objname_locatemethod_locatedata",locateData_key)[1]
#ๅพๅฐๅคๆญๅ
็ด ๆฏๅฆ้่ฆไบๆฌกๅฎไฝ็ๅคๆญๆกไปถๆๅจๅ
isSecondLocate_key=u"ๆฏๅฆ้่ฆไบๆฌกๅฎไฝ"
SecondLocate_key_colNumber=get_number_by_data.GetRowAndColNumber().getRowAndColNumber(object_excel_path,"objname_locatemethod_locatedata",isSecondLocate_key)[1]
#ๅญๅจไปexcel่กจไธญๅๅบ็ๅฎไฝๆนๅผๅๅฎไฝๆฐๆฎ,ไปฅๅๅคๆญๆฏๅฆ้่ฆไบๆฌกๅฎไฝ็ๆกไปถ
old_how=object_sheet.cell_value(row_index,locateMethod_colNumber)
old_what=object_sheet.cell_value(row_index,locateData_colNumber)
secondLocate=object_sheet.cell_value(row_index,SecondLocate_key_colNumber)
locate_method_dict={'id':By.ID,'css':By.CSS_SELECTOR,'xpath':By.XPATH,'linktext':By.LINK_TEXT}
if not(CommonClass().judgeSecondLocateElement(secondLocate)):
if old_how=="switchHandle":
index=int(old_what)
CommonClass().switchHandle(br,index)
else:
new_how=locate_method_dict[old_how]
located_element=CommonClass().findElement(br,new_how,old_what)
OperateDealOrderObject.OperateDealOrderObject().operateLocatedElement(element,located_element)
else:
new_first_how=locate_method_dict[old_how]
#ๅพๅฐไบๆฌกๅฎไฝๆนๅผ็ๅผๆๅจๅ
secondLocatemethod_key=u"ไบๆฌกๅฎไฝ็ๆนๅผ"
secondLocate_colNumber=get_number_by_data.GetRowAndColNumber().getRowAndColNumber(object_excel_path,"objname_locatemethod_locatedata",secondLocatemethod_key)[1]
old_second_how=object_sheet.cell_value(row_index,secondLocate_colNumber)
#ๅพๅฐไบๆฌกๅฎไฝๆ้ๅผ
secondLocateData_key=u"ไบๆฌกๅฎไฝๆ้ๆฐๆฎ"
secondLocateData_colNumber=get_number_by_data.GetRowAndColNumber().getRowAndColNumber(object_excel_path,"objname_locatemethod_locatedata",secondLocateData_key)[1]
secondLocate_what=object_sheet.cell_value(row_index,secondLocateData_colNumber)
new_second_how=locate_method_dict[old_second_how]
second_located_element=CommonClass().locateElementIndirectly(br,new_first_how,old_what,new_second_how,secondLocate_what)
OperateDealOrderObject.OperateDealOrderObject().operateLocatedElement(element,second_located_element)
| [
"[email protected]"
]
| |
1e8d43e99c4a6c18af0fa23b95b9988a735d38cb | 437e905d8c214dc25c559b1dc03eaf9f0c85326f | /is28/beresnev28/beresnev_lr/zadanie_2-16.py | c09833e26b9950e9eca8f7d7c177ee605c551572 | []
| no_license | AnatolyDomrachev/karantin | 542ca22c275e39ef3491b1c0d9838e922423b5a9 | 0d9f60207e80305eb713fd43774e911fdbb9fbad | refs/heads/master | 2021-03-29T03:42:43.954727 | 2020-05-27T13:24:36 | 2020-05-27T13:24:36 | 247,916,390 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | a=[[1,2,3,4],[5,6,7,8],[9,10,11,12],[13,14,15,16]]
b=[[],[],[],]
print(a[0])
print(a[1])
print(a[2])
print(a[3])
i=(int(input('ะฒะฒะตะดะธัe ะฝะพะผะตั ัััะพะบะธ = '))-1)
k=(int(input('ะฒะฒะตะดะธัะต ะฝะพะผะตั ััะพะปะฑัะฐ = '))-1)
for f in range(4):
a[f][k]=0
for t in range(4):
a[i][t]=0
print(a[0])
print(a[1])
print(a[2])
print(a[3])
| [
"[email protected]"
]
| |
99520ba9383618cd1f2b8302fea12bb4177eff2a | 8f8ac99fd3ed9ceb36778b404f6fdd0b6899d3f4 | /pyobjc-framework-FileProviderUI/PyObjCTest/test_fpuiactionextensioncontext.py | 5cce4d7ed43ae4829e41889ea9bd508810e4738a | [
"MIT"
]
| permissive | strogo/pyobjc | ac4201c7742eb75348328eeecb7eedf4e3458de3 | 2579c5eaf44b0c5af77ee195c417d2c65e72dfda | refs/heads/master | 2023-07-13T00:41:56.448005 | 2021-08-24T06:42:53 | 2021-08-24T06:42:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 698 | py | import FileProviderUI
from PyObjCTools.TestSupport import TestCase, min_os_level
class TestFPUIActionExtensionContext(TestCase):
def test_constants(self):
self.assertEqual(FileProviderUI.FPUIExtensionErrorCodeUserCancelled, 0)
self.assertEqual(FileProviderUI.FPUIExtensionErrorCodeFailed, 1)
@min_os_level("10.15")
def test_constants10_15(self):
self.assertIsInstance(FileProviderUI.FPUIErrorDomain, str)
@min_os_level("10.15")
def test_methods10_15(self):
self.assertArgIsBlock(
FileProviderUI.FPUIActionExtensionContext.completeRequestReturningItems_completionHandler_, # noqa: B950
1,
b"vZ",
)
| [
"[email protected]"
]
| |
4feae1113ac543f26e086b2e4dd8be4307a102f4 | 08bb74d61a8bd8da07784f1146b6a2461f56b02a | /demo_python/p_1/tm.py | 406a15bead40a566d612b61596980efdcf5e7757 | []
| no_license | hphp/demo | 6ee25df2e6b4f33e5c2c4b276ec394ea1a0f817c | f83cd0ce569d92410ca1320e229cde0e88561ec8 | refs/heads/master | 2021-05-16T02:04:47.979776 | 2016-09-19T13:16:10 | 2016-09-19T13:16:10 | 9,994,208 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,779 | py | #!/usr/local/bin/python
import os
import time
import _mysql
def hp_query(query):
con = None
version = -1
try :
con = _mysql.connect('localhost','root','','ip_distribution')
con.query(query)
result = con.use_result()
v = []
while True:
rows = result.fetch_row()
if not rows :
break
v.append(rows[0])
for i in enumerate(v):
version = i[1][0]
print version
except _mysql.Error, e :
print "Error %d: %s" % (e.args[0] , e.args[1])
sys.exit(1)
finally:
if con:
con.close()
return version
while True :
version = -1
version = hp_query("select max(version) from configure where state = 1 ")
print 'hi' + format(version)
if version < 0 :
time.sleep(6)
print "no state = 1 -- pause"
continue
'''
query = "select count(*) from step_display where version = %d and if_stored = 0 " % (version)
cnt = hp_query(query)
if(cnt > 0)
'''
file = time.gmtime()
file_name = format(file.tm_year) + format(file.tm_mon) + format(file.tm_mday) + format(file.tm_hour) + format(file.tm_min) + format(file.tm_sec)
print file_name
out_file = "/root/happyhan/ip_distribution/tm.out" + file_name
print out_file
f_tm_in = open("/root/happyhan/ip_distribution/tm.in","r+")
content = format(version) + "\n100 100 0.85\n4\n1000\n0\n1\n1\n"
f_tm_in.write(content)
f_tm_in = open("/root/happyhan/ip_distribution/tm.in","r")
print f_tm_in.readlines()
cmd_2 = "/root/happyhan/ip_distribution/tm > " + out_file + " < /root/happyhan/ip_distribution/tm.in"
print cmd_2
os.system(cmd_2)
time.sleep(6)
| [
"[email protected]"
]
| |
e7c4de9a6b3d90e5b4be81f8597dbcefe813443e | b980c0bae0cff8533253c135449beb6e09759dca | /Grader_Exercise/08_Dict/08_Dict_23.py | 82d1da939f42f32157d3dff8a5303588ffde5419 | []
| no_license | manhanton/COM-PROG | 1f76985b3f3fea54057a0da1d3911dc91998c5be | 7a4f2c62ecd6677ec1f818a5d115aa0fb182b3a2 | refs/heads/main | 2023-06-18T10:25:26.448133 | 2021-07-16T07:46:45 | 2021-07-16T07:46:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | def reverse(d):
nd = {}
for key,value in d.items() : nd[value] = key
return nd
d = {}
for i in range(int(input())) :
x = input().split()
d[x[0]+' '+x[1]] = x[2]
rd = reverse(d)
for i in range(int(input())) :
y = input()
if y in d.keys() : print('%s --> %s' % (y,d[y]))
elif y in rd.keys() : print('%s --> %s' % (y,rd[y]))
else : print('%s --> %s' % (y,'Not found')) | [
"[email protected]"
]
| |
0429e2284d15b50a7b4df5b5e76249314c694204 | 2d01e5d2da7bf0836bd0ebb9185469e041e0f577 | /script/script_scrape_symbol_ard_marketwatch.py | 2fe5444add389a69002f5f70bd7ee5400c2687ef | []
| no_license | kozzion/rivernode_finance | 434c0f668a3131ad0bf94b9d57c1886c32ac6d5e | 17e88f990c9f5e65dc3fe892a28c163a4e6769f2 | refs/heads/master | 2022-11-15T03:44:14.936657 | 2020-06-22T15:44:22 | 2020-06-22T15:44:22 | 254,896,067 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,538 | py | import sys
import os
import json
import hashlib
sys.path.append(os.path.abspath('../../rivernode_core'))
from rivernode_core.system_config import SystemConfig
from rivernode_core.persistency.table_object_loader_disk import TableObjectLoaderDisk
sys.path.append(os.path.abspath('../../rivernode_finance'))
from rivernode_finance.client_marketwatch import ClientMarketwatch
from rivernode_finance.persistency_quote import PersistencyQuote
system_config = SystemConfig()
loader_table = TableObjectLoaderDisk(system_config.load_path_dir_database())
pq = PersistencyQuote(loader_table.load_table_for_list_key(['trading']))
client = ClientMarketwatch()
## scrape
# client.scape_symbol(pq)
## clean up
# pq.delete_tag('sector_')
# pq.replace_tag('country_trade_de&iso=xber', 'country_trade_de')
# pq.replace_tag('country_trade_de&iso=xfra', 'country_trade_de')
# pq.replace_tag('country_trade_de&iso=xstu', 'country_trade_de')
# pq.replace_tag('country_trade_de&iso=xdus', 'country_trade_de')
# pq.replace_tag('country_trade_de&iso=xham', 'country_trade_de')
# pq.replace_tag('country_trade_de&iso=xhan', 'country_trade_de')
# pq.replace_tag('country_trade_de&iso=xmun', 'country_trade_de')
pq.add_tag_default()
# # country_ar = 'argentina'
list_symbol = pq.load_list_symbol_for_tag_all(['country_work_pe', 'exchange_xnys'])
for symbol in list_symbol:
print(symbol)
list_symbol = pq.load_list_symbol_for_tag_all(['country_work_pe', 'exchange_xnas'])
for symbol in list_symbol:
print(symbol)
| [
"[email protected]"
]
| |
a74dca56a62b18a2bb84892722b4cfe1650e9df0 | 2709e527c217a8264b48e2f549b3284e5ccb9551 | /0x0F-python-object_relational_mapping/relationship_state.py | d28e8a488307b18586655ba4e87563fc4629fed4 | []
| no_license | kwhit2/holbertonschool-higher_level_programming | 489d6b88ed14b9f2efd4637d8a71ae569b5027f6 | 2660516b12fee0f03c4025ba1d8d2762a8880a06 | refs/heads/main | 2023-05-22T17:57:02.035803 | 2021-06-12T18:43:54 | 2021-06-12T18:43:54 | 319,346,696 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 533 | py | #!/usr/bin/python3
""" Improved the file model_state.py with relationship class attribute """
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship
Base = declarative_base()
class State(Base):
""" class State that inherits from Base """
__tablename__ = 'states'
id = Column(Integer, primary_key=True, nullable=False)
name = Column(String(128), nullable=False)
cities = relationship('City', backref='state')
| [
"[email protected]"
]
| |
64c3eb71437a4c5058884aa72b33396beb0067b1 | 18a1a72b5f2f1c4a668a96a6365daabe35729750 | /user/social_views.py | e457377a82d90aadc07ff6d46f278b3873719d25 | []
| no_license | AktanKasymaliev/django-ashar-app-backend | 200d0af0fd60d90834895f2c8924d0f5fe3a03c5 | 0b8d59203399487a1ee4dcae24ed6c716960380c | refs/heads/main | 2023-07-02T06:55:26.903742 | 2021-08-04T14:54:03 | 2021-08-04T14:54:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 358 | py | from allauth.socialaccount.providers.google.views import GoogleOAuth2Adapter
from allauth.socialaccount.providers.facebook.views import FacebookOAuth2Adapter
from user.views import SocialLoginView
class GoogleLogin(SocialLoginView):
adapter_class = GoogleOAuth2Adapter
class FacebookLogin(SocialLoginView):
adapter_class = FacebookOAuth2Adapter
| [
"[email protected]"
]
| |
c2839fdaac33d51f5cc655bfc494acba3ed70d91 | 9a819fc91e17ef9a44e45cf68e76cf696381d06d | /api_gateway_examples/.env/lib/python3.6/site-packages/aws_cdk/aws_cloudwatch/__init__.py | dbbfba21e77e6539b687f68d85f00e427ce6fce1 | []
| no_license | Gautam3994/Dark-Knight | aef1d6383e0785130db75e80ed40f544a120579e | 327b2d58851a42da1b707addea73e40fac6a61cc | refs/heads/master | 2022-12-01T11:58:39.857379 | 2020-09-05T18:07:51 | 2020-09-05T18:07:55 | 203,866,327 | 0 | 1 | null | 2022-11-24T09:16:18 | 2019-08-22T20:14:43 | Python | UTF-8 | Python | false | false | 213,882 | py | """
## Amazon CloudWatch Construct Library
<!--BEGIN STABILITY BANNER-->---

---
<!--END STABILITY BANNER-->
## Metric objects
Metric objects represent a metric that is emitted by AWS services or your own
application, such as `CPUUsage`, `FailureCount` or `Bandwidth`.
Metric objects can be constructed directly or are exposed by resources as
attributes. Resources that expose metrics will have functions that look
like `metricXxx()` which will return a Metric object, initialized with defaults
that make sense.
For example, `lambda.Function` objects have the `fn.metricErrors()` method, which
represents the amount of errors reported by that Lambda function:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
errors = fn.metric_errors()
```
### Instantiating a new Metric object
If you want to reference a metric that is not yet exposed by an existing construct,
you can instantiate a `Metric` object to represent it. For example:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
metric = Metric(
namespace="MyNamespace",
metric_name="MyMetric",
dimensions={
"ProcessingStep": "Download"
}
)
```
### Metric Math
Math expressions are supported by instantiating the `MathExpression` class.
For example, a math expression that sums two other metrics looks like this:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
all_problems = MathExpression(
expression="errors + faults",
using_metrics={
"errors": my_construct.metric_errors(),
"faults": my_construct.metric_faults()
}
)
```
You can use `MathExpression` objects like any other metric, including using
them in other math expressions:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
problem_percentage = MathExpression(
expression="(problems / invocations) * 100",
using_metrics={
"problems": all_problems,
"invocations": my_construct.metric_invocations()
}
)
```
### Aggregation
To graph or alarm on metrics you must aggregate them first, using a function
like `Average` or a percentile function like `P99`. By default, most Metric objects
returned by CDK libraries will be configured as `Average` over `300 seconds` (5 minutes).
The exception is if the metric represents a count of discrete events, such as
failures. In that case, the Metric object will be configured as `Sum` over `300 seconds`, i.e. it represents the number of times that event occurred over the
time period.
If you want to change the default aggregation of the Metric object (for example,
the function or the period), you can do so by passing additional parameters
to the metric function call:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
minute_error_rate = fn.metric_errors(
statistic="avg",
period=Duration.minutes(1),
label="Lambda failure rate"
)
```
This function also allows changing the metric label or color (which will be
useful when embedding them in graphs, see below).
> Rates versus Sums
>
> The reason for using `Sum` to count discrete events is that *some* events are
> emitted as either `0` or `1` (for example `Errors` for a Lambda) and some are
> only emitted as `1` (for example `NumberOfMessagesPublished` for an SNS
> topic).
>
> In case `0`-metrics are emitted, it makes sense to take the `Average` of this
> metric: the result will be the fraction of errors over all executions.
>
> If `0`-metrics are not emitted, the `Average` will always be equal to `1`,
> and not be very useful.
>
> In order to simplify the mental model of `Metric` objects, we default to
> aggregating using `Sum`, which will be the same for both metrics types. If you
> happen to know the Metric you want to alarm on makes sense as a rate
> (`Average`) you can always choose to change the statistic.
## Alarms
Alarms can be created on metrics in one of two ways. Either create an `Alarm`
object, passing the `Metric` object to set the alarm on:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
Alarm(self, "Alarm",
metric=fn.metric_errors(),
threshold=100,
evaluation_periods=2
)
```
Alternatively, you can call `metric.createAlarm()`:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
fn.metric_errors().create_alarm(self, "Alarm",
threshold=100,
evaluation_periods=2
)
```
The most important properties to set while creating an Alarms are:
* `threshold`: the value to compare the metric against.
* `comparisonOperator`: the comparison operation to use, defaults to `metric >= threshold`.
* `evaluationPeriods`: how many consecutive periods the metric has to be
breaching the the threshold for the alarm to trigger.
### A note on units
In CloudWatch, Metrics datums are emitted with units, such as `seconds` or
`bytes`. When `Metric` objects are given a `unit` attribute, it will be used to
*filter* the stream of metric datums for datums emitted using the same `unit`
attribute.
In particular, the `unit` field is *not* used to rescale datums or alarm threshold
values (for example, it cannot be used to specify an alarm threshold in
*Megabytes* if the metric stream is being emitted as *bytes*).
You almost certainly don't want to specify the `unit` property when creating
`Metric` objects (which will retrieve all datums regardless of their unit),
unless you have very specific requirements. Note that in any case, CloudWatch
only supports filtering by `unit` for Alarms, not in Dashboard graphs.
Please see the following GitHub issue for a discussion on real unit
calculations in CDK: https://github.com/aws/aws-cdk/issues/5595
## Dashboards
Dashboards are set of Widgets stored server-side which can be accessed quickly
from the AWS console. Available widgets are graphs of a metric over time, the
current value of a metric, or a static piece of Markdown which explains what the
graphs mean.
The following widgets are available:
* `GraphWidget` -- shows any number of metrics on both the left and right
vertical axes.
* `AlarmWidget` -- shows the graph and alarm line for a single alarm.
* `SingleValueWidget` -- shows the current value of a set of metrics.
* `TextWidget` -- shows some static Markdown.
### Graph widget
A graph widget can display any number of metrics on either the `left` or
`right` vertical axis:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
dashboard.add_widgets(GraphWidget(
title="Executions vs error rate",
left=[execution_count_metric],
right=[error_count_metric.with(
statistic="average",
label="Error rate",
color="00FF00"
)]
))
```
### Alarm widget
An alarm widget shows the graph and the alarm line of a single alarm:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
dashboard.add_widgets(AlarmWidget(
title="Errors",
alarm=error_alarm
))
```
### Single value widget
A single-value widget shows the latest value of a set of metrics (as opposed
to a graph of the value over time):
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
dashboard.add_widgets(SingleValueWidget(
metrics=[visitor_count, purchase_count]
))
```
### Text widget
A text widget shows an arbitrary piece of MarkDown. Use this to add explanations
to your dashboard:
```python
# Example automatically generated without compilation. See https://github.com/aws/jsii/issues/826
dashboard.add_widgets(TextWidget(
markdown="# Key Performance Indicators"
))
```
### Dashboard Layout
The widgets on a dashboard are visually laid out in a grid that is 24 columns
wide. Normally you specify X and Y coordinates for the widgets on a Dashboard,
but because this is inconvenient to do manually, the library contains a simple
layout system to help you lay out your dashboards the way you want them to.
Widgets have a `width` and `height` property, and they will be automatically
laid out either horizontally or vertically stacked to fill out the available
space.
Widgets are added to a Dashboard by calling `add(widget1, widget2, ...)`.
Widgets given in the same call will be laid out horizontally. Widgets given
in different calls will be laid out vertically. To make more complex layouts,
you can use the following widgets to pack widgets together in different ways:
* `Column`: stack two or more widgets vertically.
* `Row`: lay out two or more widgets horizontally.
* `Spacer`: take up empty space
"""
import abc
import builtins
import datetime
import enum
import typing
import jsii
import jsii.compat
import publication
import aws_cdk.aws_iam
import aws_cdk.core
__jsii_assembly__ = jsii.JSIIAssembly.load("@aws-cdk/aws-cloudwatch", "1.20.0", __name__, "[email protected]")
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.AlarmActionConfig", jsii_struct_bases=[], name_mapping={'alarm_action_arn': 'alarmActionArn'})
class AlarmActionConfig():
def __init__(self, *, alarm_action_arn: str):
"""Properties for an alarm action.
:param alarm_action_arn: Return the ARN that should be used for a CloudWatch Alarm action.
"""
self._values = {
'alarm_action_arn': alarm_action_arn,
}
@builtins.property
def alarm_action_arn(self) -> str:
"""Return the ARN that should be used for a CloudWatch Alarm action."""
return self._values.get('alarm_action_arn')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'AlarmActionConfig(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.implements(aws_cdk.core.IInspectable)
class CfnAlarm(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.CfnAlarm"):
"""A CloudFormation ``AWS::CloudWatch::Alarm``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html
cloudformationResource:
:cloudformationResource:: AWS::CloudWatch::Alarm
"""
def __init__(self, scope: aws_cdk.core.Construct, id: str, *, comparison_operator: str, evaluation_periods: jsii.Number, actions_enabled: typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]=None, alarm_actions: typing.Optional[typing.List[str]]=None, alarm_description: typing.Optional[str]=None, alarm_name: typing.Optional[str]=None, datapoints_to_alarm: typing.Optional[jsii.Number]=None, dimensions: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "DimensionProperty"]]]]]=None, evaluate_low_sample_count_percentile: typing.Optional[str]=None, extended_statistic: typing.Optional[str]=None, insufficient_data_actions: typing.Optional[typing.List[str]]=None, metric_name: typing.Optional[str]=None, metrics: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "MetricDataQueryProperty"]]]]]=None, namespace: typing.Optional[str]=None, ok_actions: typing.Optional[typing.List[str]]=None, period: typing.Optional[jsii.Number]=None, statistic: typing.Optional[str]=None, threshold: typing.Optional[jsii.Number]=None, threshold_metric_id: typing.Optional[str]=None, treat_missing_data: typing.Optional[str]=None, unit: typing.Optional[str]=None) -> None:
"""Create a new ``AWS::CloudWatch::Alarm``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param comparison_operator: ``AWS::CloudWatch::Alarm.ComparisonOperator``.
:param evaluation_periods: ``AWS::CloudWatch::Alarm.EvaluationPeriods``.
:param actions_enabled: ``AWS::CloudWatch::Alarm.ActionsEnabled``.
:param alarm_actions: ``AWS::CloudWatch::Alarm.AlarmActions``.
:param alarm_description: ``AWS::CloudWatch::Alarm.AlarmDescription``.
:param alarm_name: ``AWS::CloudWatch::Alarm.AlarmName``.
:param datapoints_to_alarm: ``AWS::CloudWatch::Alarm.DatapointsToAlarm``.
:param dimensions: ``AWS::CloudWatch::Alarm.Dimensions``.
:param evaluate_low_sample_count_percentile: ``AWS::CloudWatch::Alarm.EvaluateLowSampleCountPercentile``.
:param extended_statistic: ``AWS::CloudWatch::Alarm.ExtendedStatistic``.
:param insufficient_data_actions: ``AWS::CloudWatch::Alarm.InsufficientDataActions``.
:param metric_name: ``AWS::CloudWatch::Alarm.MetricName``.
:param metrics: ``AWS::CloudWatch::Alarm.Metrics``.
:param namespace: ``AWS::CloudWatch::Alarm.Namespace``.
:param ok_actions: ``AWS::CloudWatch::Alarm.OKActions``.
:param period: ``AWS::CloudWatch::Alarm.Period``.
:param statistic: ``AWS::CloudWatch::Alarm.Statistic``.
:param threshold: ``AWS::CloudWatch::Alarm.Threshold``.
:param threshold_metric_id: ``AWS::CloudWatch::Alarm.ThresholdMetricId``.
:param treat_missing_data: ``AWS::CloudWatch::Alarm.TreatMissingData``.
:param unit: ``AWS::CloudWatch::Alarm.Unit``.
"""
props = CfnAlarmProps(comparison_operator=comparison_operator, evaluation_periods=evaluation_periods, actions_enabled=actions_enabled, alarm_actions=alarm_actions, alarm_description=alarm_description, alarm_name=alarm_name, datapoints_to_alarm=datapoints_to_alarm, dimensions=dimensions, evaluate_low_sample_count_percentile=evaluate_low_sample_count_percentile, extended_statistic=extended_statistic, insufficient_data_actions=insufficient_data_actions, metric_name=metric_name, metrics=metrics, namespace=namespace, ok_actions=ok_actions, period=period, statistic=statistic, threshold=threshold, threshold_metric_id=threshold_metric_id, treat_missing_data=treat_missing_data, unit=unit)
jsii.create(CfnAlarm, self, [scope, id, props])
@jsii.member(jsii_name="inspect")
def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None:
"""Examines the CloudFormation resource and discloses attributes.
:param inspector: - tree inspector to collect and process attributes.
stability
:stability: experimental
"""
return jsii.invoke(self, "inspect", [inspector])
@jsii.member(jsii_name="renderProperties")
def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]:
"""
:param props: -
"""
return jsii.invoke(self, "renderProperties", [props])
@jsii.python.classproperty
@jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME")
def CFN_RESOURCE_TYPE_NAME(cls) -> str:
"""The CloudFormation resource type name for this resource class."""
return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME")
@builtins.property
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> str:
"""
cloudformationAttribute:
:cloudformationAttribute:: Arn
"""
return jsii.get(self, "attrArn")
@builtins.property
@jsii.member(jsii_name="cfnProperties")
def _cfn_properties(self) -> typing.Mapping[str,typing.Any]:
return jsii.get(self, "cfnProperties")
@builtins.property
@jsii.member(jsii_name="comparisonOperator")
def comparison_operator(self) -> str:
"""``AWS::CloudWatch::Alarm.ComparisonOperator``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-comparisonoperator
"""
return jsii.get(self, "comparisonOperator")
@comparison_operator.setter
def comparison_operator(self, value: str):
jsii.set(self, "comparisonOperator", value)
@builtins.property
@jsii.member(jsii_name="evaluationPeriods")
def evaluation_periods(self) -> jsii.Number:
"""``AWS::CloudWatch::Alarm.EvaluationPeriods``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-evaluationperiods
"""
return jsii.get(self, "evaluationPeriods")
@evaluation_periods.setter
def evaluation_periods(self, value: jsii.Number):
jsii.set(self, "evaluationPeriods", value)
@builtins.property
@jsii.member(jsii_name="actionsEnabled")
def actions_enabled(self) -> typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]:
"""``AWS::CloudWatch::Alarm.ActionsEnabled``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-actionsenabled
"""
return jsii.get(self, "actionsEnabled")
@actions_enabled.setter
def actions_enabled(self, value: typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]):
jsii.set(self, "actionsEnabled", value)
@builtins.property
@jsii.member(jsii_name="alarmActions")
def alarm_actions(self) -> typing.Optional[typing.List[str]]:
"""``AWS::CloudWatch::Alarm.AlarmActions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-alarmactions
"""
return jsii.get(self, "alarmActions")
@alarm_actions.setter
def alarm_actions(self, value: typing.Optional[typing.List[str]]):
jsii.set(self, "alarmActions", value)
@builtins.property
@jsii.member(jsii_name="alarmDescription")
def alarm_description(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.AlarmDescription``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-alarmdescription
"""
return jsii.get(self, "alarmDescription")
@alarm_description.setter
def alarm_description(self, value: typing.Optional[str]):
jsii.set(self, "alarmDescription", value)
@builtins.property
@jsii.member(jsii_name="alarmName")
def alarm_name(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.AlarmName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-alarmname
"""
return jsii.get(self, "alarmName")
@alarm_name.setter
def alarm_name(self, value: typing.Optional[str]):
jsii.set(self, "alarmName", value)
@builtins.property
@jsii.member(jsii_name="datapointsToAlarm")
def datapoints_to_alarm(self) -> typing.Optional[jsii.Number]:
"""``AWS::CloudWatch::Alarm.DatapointsToAlarm``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarm-datapointstoalarm
"""
return jsii.get(self, "datapointsToAlarm")
@datapoints_to_alarm.setter
def datapoints_to_alarm(self, value: typing.Optional[jsii.Number]):
jsii.set(self, "datapointsToAlarm", value)
@builtins.property
@jsii.member(jsii_name="dimensions")
def dimensions(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "DimensionProperty"]]]]]:
"""``AWS::CloudWatch::Alarm.Dimensions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-dimension
"""
return jsii.get(self, "dimensions")
@dimensions.setter
def dimensions(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "DimensionProperty"]]]]]):
jsii.set(self, "dimensions", value)
@builtins.property
@jsii.member(jsii_name="evaluateLowSampleCountPercentile")
def evaluate_low_sample_count_percentile(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.EvaluateLowSampleCountPercentile``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-evaluatelowsamplecountpercentile
"""
return jsii.get(self, "evaluateLowSampleCountPercentile")
@evaluate_low_sample_count_percentile.setter
def evaluate_low_sample_count_percentile(self, value: typing.Optional[str]):
jsii.set(self, "evaluateLowSampleCountPercentile", value)
@builtins.property
@jsii.member(jsii_name="extendedStatistic")
def extended_statistic(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.ExtendedStatistic``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-extendedstatistic
"""
return jsii.get(self, "extendedStatistic")
@extended_statistic.setter
def extended_statistic(self, value: typing.Optional[str]):
jsii.set(self, "extendedStatistic", value)
@builtins.property
@jsii.member(jsii_name="insufficientDataActions")
def insufficient_data_actions(self) -> typing.Optional[typing.List[str]]:
"""``AWS::CloudWatch::Alarm.InsufficientDataActions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-insufficientdataactions
"""
return jsii.get(self, "insufficientDataActions")
@insufficient_data_actions.setter
def insufficient_data_actions(self, value: typing.Optional[typing.List[str]]):
jsii.set(self, "insufficientDataActions", value)
@builtins.property
@jsii.member(jsii_name="metricName")
def metric_name(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.MetricName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-metricname
"""
return jsii.get(self, "metricName")
@metric_name.setter
def metric_name(self, value: typing.Optional[str]):
jsii.set(self, "metricName", value)
@builtins.property
@jsii.member(jsii_name="metrics")
def metrics(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "MetricDataQueryProperty"]]]]]:
"""``AWS::CloudWatch::Alarm.Metrics``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarm-metrics
"""
return jsii.get(self, "metrics")
@metrics.setter
def metrics(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "MetricDataQueryProperty"]]]]]):
jsii.set(self, "metrics", value)
@builtins.property
@jsii.member(jsii_name="namespace")
def namespace(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.Namespace``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-namespace
"""
return jsii.get(self, "namespace")
@namespace.setter
def namespace(self, value: typing.Optional[str]):
jsii.set(self, "namespace", value)
@builtins.property
@jsii.member(jsii_name="okActions")
def ok_actions(self) -> typing.Optional[typing.List[str]]:
"""``AWS::CloudWatch::Alarm.OKActions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-okactions
"""
return jsii.get(self, "okActions")
@ok_actions.setter
def ok_actions(self, value: typing.Optional[typing.List[str]]):
jsii.set(self, "okActions", value)
@builtins.property
@jsii.member(jsii_name="period")
def period(self) -> typing.Optional[jsii.Number]:
"""``AWS::CloudWatch::Alarm.Period``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-period
"""
return jsii.get(self, "period")
@period.setter
def period(self, value: typing.Optional[jsii.Number]):
jsii.set(self, "period", value)
@builtins.property
@jsii.member(jsii_name="statistic")
def statistic(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.Statistic``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-statistic
"""
return jsii.get(self, "statistic")
@statistic.setter
def statistic(self, value: typing.Optional[str]):
jsii.set(self, "statistic", value)
@builtins.property
@jsii.member(jsii_name="threshold")
def threshold(self) -> typing.Optional[jsii.Number]:
"""``AWS::CloudWatch::Alarm.Threshold``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-threshold
"""
return jsii.get(self, "threshold")
@threshold.setter
def threshold(self, value: typing.Optional[jsii.Number]):
jsii.set(self, "threshold", value)
@builtins.property
@jsii.member(jsii_name="thresholdMetricId")
def threshold_metric_id(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.ThresholdMetricId``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-dynamic-threshold
"""
return jsii.get(self, "thresholdMetricId")
@threshold_metric_id.setter
def threshold_metric_id(self, value: typing.Optional[str]):
jsii.set(self, "thresholdMetricId", value)
@builtins.property
@jsii.member(jsii_name="treatMissingData")
def treat_missing_data(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.TreatMissingData``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-treatmissingdata
"""
return jsii.get(self, "treatMissingData")
@treat_missing_data.setter
def treat_missing_data(self, value: typing.Optional[str]):
jsii.set(self, "treatMissingData", value)
@builtins.property
@jsii.member(jsii_name="unit")
def unit(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.Unit``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-unit
"""
return jsii.get(self, "unit")
@unit.setter
def unit(self, value: typing.Optional[str]):
jsii.set(self, "unit", value)
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAlarm.DimensionProperty", jsii_struct_bases=[], name_mapping={'name': 'name', 'value': 'value'})
class DimensionProperty():
def __init__(self, *, name: str, value: str):
"""
:param name: ``CfnAlarm.DimensionProperty.Name``.
:param value: ``CfnAlarm.DimensionProperty.Value``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-dimension.html
"""
self._values = {
'name': name,
'value': value,
}
@builtins.property
def name(self) -> str:
"""``CfnAlarm.DimensionProperty.Name``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-dimension.html#cfn-cloudwatch-alarm-dimension-name
"""
return self._values.get('name')
@builtins.property
def value(self) -> str:
"""``CfnAlarm.DimensionProperty.Value``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-dimension.html#cfn-cloudwatch-alarm-dimension-value
"""
return self._values.get('value')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'DimensionProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAlarm.MetricDataQueryProperty", jsii_struct_bases=[], name_mapping={'id': 'id', 'expression': 'expression', 'label': 'label', 'metric_stat': 'metricStat', 'return_data': 'returnData'})
class MetricDataQueryProperty():
def __init__(self, *, id: str, expression: typing.Optional[str]=None, label: typing.Optional[str]=None, metric_stat: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnAlarm.MetricStatProperty"]]]=None, return_data: typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]=None):
"""
:param id: ``CfnAlarm.MetricDataQueryProperty.Id``.
:param expression: ``CfnAlarm.MetricDataQueryProperty.Expression``.
:param label: ``CfnAlarm.MetricDataQueryProperty.Label``.
:param metric_stat: ``CfnAlarm.MetricDataQueryProperty.MetricStat``.
:param return_data: ``CfnAlarm.MetricDataQueryProperty.ReturnData``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricdataquery.html
"""
self._values = {
'id': id,
}
if expression is not None: self._values["expression"] = expression
if label is not None: self._values["label"] = label
if metric_stat is not None: self._values["metric_stat"] = metric_stat
if return_data is not None: self._values["return_data"] = return_data
@builtins.property
def id(self) -> str:
"""``CfnAlarm.MetricDataQueryProperty.Id``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricdataquery.html#cfn-cloudwatch-alarm-metricdataquery-id
"""
return self._values.get('id')
@builtins.property
def expression(self) -> typing.Optional[str]:
"""``CfnAlarm.MetricDataQueryProperty.Expression``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricdataquery.html#cfn-cloudwatch-alarm-metricdataquery-expression
"""
return self._values.get('expression')
@builtins.property
def label(self) -> typing.Optional[str]:
"""``CfnAlarm.MetricDataQueryProperty.Label``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricdataquery.html#cfn-cloudwatch-alarm-metricdataquery-label
"""
return self._values.get('label')
@builtins.property
def metric_stat(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnAlarm.MetricStatProperty"]]]:
"""``CfnAlarm.MetricDataQueryProperty.MetricStat``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricdataquery.html#cfn-cloudwatch-alarm-metricdataquery-metricstat
"""
return self._values.get('metric_stat')
@builtins.property
def return_data(self) -> typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]:
"""``CfnAlarm.MetricDataQueryProperty.ReturnData``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricdataquery.html#cfn-cloudwatch-alarm-metricdataquery-returndata
"""
return self._values.get('return_data')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricDataQueryProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAlarm.MetricProperty", jsii_struct_bases=[], name_mapping={'dimensions': 'dimensions', 'metric_name': 'metricName', 'namespace': 'namespace'})
class MetricProperty():
def __init__(self, *, dimensions: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAlarm.DimensionProperty"]]]]]=None, metric_name: typing.Optional[str]=None, namespace: typing.Optional[str]=None):
"""
:param dimensions: ``CfnAlarm.MetricProperty.Dimensions``.
:param metric_name: ``CfnAlarm.MetricProperty.MetricName``.
:param namespace: ``CfnAlarm.MetricProperty.Namespace``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metric.html
"""
self._values = {
}
if dimensions is not None: self._values["dimensions"] = dimensions
if metric_name is not None: self._values["metric_name"] = metric_name
if namespace is not None: self._values["namespace"] = namespace
@builtins.property
def dimensions(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAlarm.DimensionProperty"]]]]]:
"""``CfnAlarm.MetricProperty.Dimensions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metric.html#cfn-cloudwatch-alarm-metric-dimensions
"""
return self._values.get('dimensions')
@builtins.property
def metric_name(self) -> typing.Optional[str]:
"""``CfnAlarm.MetricProperty.MetricName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metric.html#cfn-cloudwatch-alarm-metric-metricname
"""
return self._values.get('metric_name')
@builtins.property
def namespace(self) -> typing.Optional[str]:
"""``CfnAlarm.MetricProperty.Namespace``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metric.html#cfn-cloudwatch-alarm-metric-namespace
"""
return self._values.get('namespace')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAlarm.MetricStatProperty", jsii_struct_bases=[], name_mapping={'metric': 'metric', 'period': 'period', 'stat': 'stat', 'unit': 'unit'})
class MetricStatProperty():
def __init__(self, *, metric: typing.Union[aws_cdk.core.IResolvable, "CfnAlarm.MetricProperty"], period: jsii.Number, stat: str, unit: typing.Optional[str]=None):
"""
:param metric: ``CfnAlarm.MetricStatProperty.Metric``.
:param period: ``CfnAlarm.MetricStatProperty.Period``.
:param stat: ``CfnAlarm.MetricStatProperty.Stat``.
:param unit: ``CfnAlarm.MetricStatProperty.Unit``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricstat.html
"""
self._values = {
'metric': metric,
'period': period,
'stat': stat,
}
if unit is not None: self._values["unit"] = unit
@builtins.property
def metric(self) -> typing.Union[aws_cdk.core.IResolvable, "CfnAlarm.MetricProperty"]:
"""``CfnAlarm.MetricStatProperty.Metric``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricstat.html#cfn-cloudwatch-alarm-metricstat-metric
"""
return self._values.get('metric')
@builtins.property
def period(self) -> jsii.Number:
"""``CfnAlarm.MetricStatProperty.Period``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricstat.html#cfn-cloudwatch-alarm-metricstat-period
"""
return self._values.get('period')
@builtins.property
def stat(self) -> str:
"""``CfnAlarm.MetricStatProperty.Stat``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricstat.html#cfn-cloudwatch-alarm-metricstat-stat
"""
return self._values.get('stat')
@builtins.property
def unit(self) -> typing.Optional[str]:
"""``CfnAlarm.MetricStatProperty.Unit``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-alarm-metricstat.html#cfn-cloudwatch-alarm-metricstat-unit
"""
return self._values.get('unit')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricStatProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAlarmProps", jsii_struct_bases=[], name_mapping={'comparison_operator': 'comparisonOperator', 'evaluation_periods': 'evaluationPeriods', 'actions_enabled': 'actionsEnabled', 'alarm_actions': 'alarmActions', 'alarm_description': 'alarmDescription', 'alarm_name': 'alarmName', 'datapoints_to_alarm': 'datapointsToAlarm', 'dimensions': 'dimensions', 'evaluate_low_sample_count_percentile': 'evaluateLowSampleCountPercentile', 'extended_statistic': 'extendedStatistic', 'insufficient_data_actions': 'insufficientDataActions', 'metric_name': 'metricName', 'metrics': 'metrics', 'namespace': 'namespace', 'ok_actions': 'okActions', 'period': 'period', 'statistic': 'statistic', 'threshold': 'threshold', 'threshold_metric_id': 'thresholdMetricId', 'treat_missing_data': 'treatMissingData', 'unit': 'unit'})
class CfnAlarmProps():
def __init__(self, *, comparison_operator: str, evaluation_periods: jsii.Number, actions_enabled: typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]=None, alarm_actions: typing.Optional[typing.List[str]]=None, alarm_description: typing.Optional[str]=None, alarm_name: typing.Optional[str]=None, datapoints_to_alarm: typing.Optional[jsii.Number]=None, dimensions: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAlarm.DimensionProperty"]]]]]=None, evaluate_low_sample_count_percentile: typing.Optional[str]=None, extended_statistic: typing.Optional[str]=None, insufficient_data_actions: typing.Optional[typing.List[str]]=None, metric_name: typing.Optional[str]=None, metrics: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAlarm.MetricDataQueryProperty"]]]]]=None, namespace: typing.Optional[str]=None, ok_actions: typing.Optional[typing.List[str]]=None, period: typing.Optional[jsii.Number]=None, statistic: typing.Optional[str]=None, threshold: typing.Optional[jsii.Number]=None, threshold_metric_id: typing.Optional[str]=None, treat_missing_data: typing.Optional[str]=None, unit: typing.Optional[str]=None):
"""Properties for defining a ``AWS::CloudWatch::Alarm``.
:param comparison_operator: ``AWS::CloudWatch::Alarm.ComparisonOperator``.
:param evaluation_periods: ``AWS::CloudWatch::Alarm.EvaluationPeriods``.
:param actions_enabled: ``AWS::CloudWatch::Alarm.ActionsEnabled``.
:param alarm_actions: ``AWS::CloudWatch::Alarm.AlarmActions``.
:param alarm_description: ``AWS::CloudWatch::Alarm.AlarmDescription``.
:param alarm_name: ``AWS::CloudWatch::Alarm.AlarmName``.
:param datapoints_to_alarm: ``AWS::CloudWatch::Alarm.DatapointsToAlarm``.
:param dimensions: ``AWS::CloudWatch::Alarm.Dimensions``.
:param evaluate_low_sample_count_percentile: ``AWS::CloudWatch::Alarm.EvaluateLowSampleCountPercentile``.
:param extended_statistic: ``AWS::CloudWatch::Alarm.ExtendedStatistic``.
:param insufficient_data_actions: ``AWS::CloudWatch::Alarm.InsufficientDataActions``.
:param metric_name: ``AWS::CloudWatch::Alarm.MetricName``.
:param metrics: ``AWS::CloudWatch::Alarm.Metrics``.
:param namespace: ``AWS::CloudWatch::Alarm.Namespace``.
:param ok_actions: ``AWS::CloudWatch::Alarm.OKActions``.
:param period: ``AWS::CloudWatch::Alarm.Period``.
:param statistic: ``AWS::CloudWatch::Alarm.Statistic``.
:param threshold: ``AWS::CloudWatch::Alarm.Threshold``.
:param threshold_metric_id: ``AWS::CloudWatch::Alarm.ThresholdMetricId``.
:param treat_missing_data: ``AWS::CloudWatch::Alarm.TreatMissingData``.
:param unit: ``AWS::CloudWatch::Alarm.Unit``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html
"""
self._values = {
'comparison_operator': comparison_operator,
'evaluation_periods': evaluation_periods,
}
if actions_enabled is not None: self._values["actions_enabled"] = actions_enabled
if alarm_actions is not None: self._values["alarm_actions"] = alarm_actions
if alarm_description is not None: self._values["alarm_description"] = alarm_description
if alarm_name is not None: self._values["alarm_name"] = alarm_name
if datapoints_to_alarm is not None: self._values["datapoints_to_alarm"] = datapoints_to_alarm
if dimensions is not None: self._values["dimensions"] = dimensions
if evaluate_low_sample_count_percentile is not None: self._values["evaluate_low_sample_count_percentile"] = evaluate_low_sample_count_percentile
if extended_statistic is not None: self._values["extended_statistic"] = extended_statistic
if insufficient_data_actions is not None: self._values["insufficient_data_actions"] = insufficient_data_actions
if metric_name is not None: self._values["metric_name"] = metric_name
if metrics is not None: self._values["metrics"] = metrics
if namespace is not None: self._values["namespace"] = namespace
if ok_actions is not None: self._values["ok_actions"] = ok_actions
if period is not None: self._values["period"] = period
if statistic is not None: self._values["statistic"] = statistic
if threshold is not None: self._values["threshold"] = threshold
if threshold_metric_id is not None: self._values["threshold_metric_id"] = threshold_metric_id
if treat_missing_data is not None: self._values["treat_missing_data"] = treat_missing_data
if unit is not None: self._values["unit"] = unit
@builtins.property
def comparison_operator(self) -> str:
"""``AWS::CloudWatch::Alarm.ComparisonOperator``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-comparisonoperator
"""
return self._values.get('comparison_operator')
@builtins.property
def evaluation_periods(self) -> jsii.Number:
"""``AWS::CloudWatch::Alarm.EvaluationPeriods``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-evaluationperiods
"""
return self._values.get('evaluation_periods')
@builtins.property
def actions_enabled(self) -> typing.Optional[typing.Union[typing.Optional[bool], typing.Optional[aws_cdk.core.IResolvable]]]:
"""``AWS::CloudWatch::Alarm.ActionsEnabled``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-actionsenabled
"""
return self._values.get('actions_enabled')
@builtins.property
def alarm_actions(self) -> typing.Optional[typing.List[str]]:
"""``AWS::CloudWatch::Alarm.AlarmActions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-alarmactions
"""
return self._values.get('alarm_actions')
@builtins.property
def alarm_description(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.AlarmDescription``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-alarmdescription
"""
return self._values.get('alarm_description')
@builtins.property
def alarm_name(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.AlarmName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-alarmname
"""
return self._values.get('alarm_name')
@builtins.property
def datapoints_to_alarm(self) -> typing.Optional[jsii.Number]:
"""``AWS::CloudWatch::Alarm.DatapointsToAlarm``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarm-datapointstoalarm
"""
return self._values.get('datapoints_to_alarm')
@builtins.property
def dimensions(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAlarm.DimensionProperty"]]]]]:
"""``AWS::CloudWatch::Alarm.Dimensions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-dimension
"""
return self._values.get('dimensions')
@builtins.property
def evaluate_low_sample_count_percentile(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.EvaluateLowSampleCountPercentile``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-evaluatelowsamplecountpercentile
"""
return self._values.get('evaluate_low_sample_count_percentile')
@builtins.property
def extended_statistic(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.ExtendedStatistic``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-extendedstatistic
"""
return self._values.get('extended_statistic')
@builtins.property
def insufficient_data_actions(self) -> typing.Optional[typing.List[str]]:
"""``AWS::CloudWatch::Alarm.InsufficientDataActions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-insufficientdataactions
"""
return self._values.get('insufficient_data_actions')
@builtins.property
def metric_name(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.MetricName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-metricname
"""
return self._values.get('metric_name')
@builtins.property
def metrics(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAlarm.MetricDataQueryProperty"]]]]]:
"""``AWS::CloudWatch::Alarm.Metrics``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarm-metrics
"""
return self._values.get('metrics')
@builtins.property
def namespace(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.Namespace``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-namespace
"""
return self._values.get('namespace')
@builtins.property
def ok_actions(self) -> typing.Optional[typing.List[str]]:
"""``AWS::CloudWatch::Alarm.OKActions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-okactions
"""
return self._values.get('ok_actions')
@builtins.property
def period(self) -> typing.Optional[jsii.Number]:
"""``AWS::CloudWatch::Alarm.Period``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-period
"""
return self._values.get('period')
@builtins.property
def statistic(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.Statistic``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-statistic
"""
return self._values.get('statistic')
@builtins.property
def threshold(self) -> typing.Optional[jsii.Number]:
"""``AWS::CloudWatch::Alarm.Threshold``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-threshold
"""
return self._values.get('threshold')
@builtins.property
def threshold_metric_id(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.ThresholdMetricId``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-dynamic-threshold
"""
return self._values.get('threshold_metric_id')
@builtins.property
def treat_missing_data(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.TreatMissingData``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-treatmissingdata
"""
return self._values.get('treat_missing_data')
@builtins.property
def unit(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Alarm.Unit``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cw-alarm.html#cfn-cloudwatch-alarms-unit
"""
return self._values.get('unit')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'CfnAlarmProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.implements(aws_cdk.core.IInspectable)
class CfnAnomalyDetector(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.CfnAnomalyDetector"):
"""A CloudFormation ``AWS::CloudWatch::AnomalyDetector``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html
cloudformationResource:
:cloudformationResource:: AWS::CloudWatch::AnomalyDetector
"""
def __init__(self, scope: aws_cdk.core.Construct, id: str, *, metric_name: str, namespace: str, stat: str, configuration: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["ConfigurationProperty"]]]=None, dimensions: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "DimensionProperty"]]]]]=None) -> None:
"""Create a new ``AWS::CloudWatch::AnomalyDetector``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param metric_name: ``AWS::CloudWatch::AnomalyDetector.MetricName``.
:param namespace: ``AWS::CloudWatch::AnomalyDetector.Namespace``.
:param stat: ``AWS::CloudWatch::AnomalyDetector.Stat``.
:param configuration: ``AWS::CloudWatch::AnomalyDetector.Configuration``.
:param dimensions: ``AWS::CloudWatch::AnomalyDetector.Dimensions``.
"""
props = CfnAnomalyDetectorProps(metric_name=metric_name, namespace=namespace, stat=stat, configuration=configuration, dimensions=dimensions)
jsii.create(CfnAnomalyDetector, self, [scope, id, props])
@jsii.member(jsii_name="inspect")
def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None:
"""Examines the CloudFormation resource and discloses attributes.
:param inspector: - tree inspector to collect and process attributes.
stability
:stability: experimental
"""
return jsii.invoke(self, "inspect", [inspector])
@jsii.member(jsii_name="renderProperties")
def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]:
"""
:param props: -
"""
return jsii.invoke(self, "renderProperties", [props])
@jsii.python.classproperty
@jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME")
def CFN_RESOURCE_TYPE_NAME(cls) -> str:
"""The CloudFormation resource type name for this resource class."""
return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME")
@builtins.property
@jsii.member(jsii_name="cfnProperties")
def _cfn_properties(self) -> typing.Mapping[str,typing.Any]:
return jsii.get(self, "cfnProperties")
@builtins.property
@jsii.member(jsii_name="metricName")
def metric_name(self) -> str:
"""``AWS::CloudWatch::AnomalyDetector.MetricName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-metricname
"""
return jsii.get(self, "metricName")
@metric_name.setter
def metric_name(self, value: str):
jsii.set(self, "metricName", value)
@builtins.property
@jsii.member(jsii_name="namespace")
def namespace(self) -> str:
"""``AWS::CloudWatch::AnomalyDetector.Namespace``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-namespace
"""
return jsii.get(self, "namespace")
@namespace.setter
def namespace(self, value: str):
jsii.set(self, "namespace", value)
@builtins.property
@jsii.member(jsii_name="stat")
def stat(self) -> str:
"""``AWS::CloudWatch::AnomalyDetector.Stat``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-stat
"""
return jsii.get(self, "stat")
@stat.setter
def stat(self, value: str):
jsii.set(self, "stat", value)
@builtins.property
@jsii.member(jsii_name="configuration")
def configuration(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["ConfigurationProperty"]]]:
"""``AWS::CloudWatch::AnomalyDetector.Configuration``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-configuration
"""
return jsii.get(self, "configuration")
@configuration.setter
def configuration(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["ConfigurationProperty"]]]):
jsii.set(self, "configuration", value)
@builtins.property
@jsii.member(jsii_name="dimensions")
def dimensions(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "DimensionProperty"]]]]]:
"""``AWS::CloudWatch::AnomalyDetector.Dimensions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-dimensions
"""
return jsii.get(self, "dimensions")
@dimensions.setter
def dimensions(self, value: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "DimensionProperty"]]]]]):
jsii.set(self, "dimensions", value)
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAnomalyDetector.ConfigurationProperty", jsii_struct_bases=[], name_mapping={'excluded_time_ranges': 'excludedTimeRanges', 'metric_time_zone': 'metricTimeZone'})
class ConfigurationProperty():
def __init__(self, *, excluded_time_ranges: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAnomalyDetector.RangeProperty"]]]]]=None, metric_time_zone: typing.Optional[str]=None):
"""
:param excluded_time_ranges: ``CfnAnomalyDetector.ConfigurationProperty.ExcludedTimeRanges``.
:param metric_time_zone: ``CfnAnomalyDetector.ConfigurationProperty.MetricTimeZone``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-configuration.html
"""
self._values = {
}
if excluded_time_ranges is not None: self._values["excluded_time_ranges"] = excluded_time_ranges
if metric_time_zone is not None: self._values["metric_time_zone"] = metric_time_zone
@builtins.property
def excluded_time_ranges(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAnomalyDetector.RangeProperty"]]]]]:
"""``CfnAnomalyDetector.ConfigurationProperty.ExcludedTimeRanges``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-configuration.html#cfn-cloudwatch-anomalydetector-configuration-excludedtimeranges
"""
return self._values.get('excluded_time_ranges')
@builtins.property
def metric_time_zone(self) -> typing.Optional[str]:
"""``CfnAnomalyDetector.ConfigurationProperty.MetricTimeZone``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-configuration.html#cfn-cloudwatch-anomalydetector-configuration-metrictimezone
"""
return self._values.get('metric_time_zone')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'ConfigurationProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAnomalyDetector.DimensionProperty", jsii_struct_bases=[], name_mapping={'name': 'name', 'value': 'value'})
class DimensionProperty():
def __init__(self, *, name: str, value: str):
"""
:param name: ``CfnAnomalyDetector.DimensionProperty.Name``.
:param value: ``CfnAnomalyDetector.DimensionProperty.Value``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-dimension.html
"""
self._values = {
'name': name,
'value': value,
}
@builtins.property
def name(self) -> str:
"""``CfnAnomalyDetector.DimensionProperty.Name``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-dimension.html#cfn-cloudwatch-anomalydetector-dimension-name
"""
return self._values.get('name')
@builtins.property
def value(self) -> str:
"""``CfnAnomalyDetector.DimensionProperty.Value``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-dimension.html#cfn-cloudwatch-anomalydetector-dimension-value
"""
return self._values.get('value')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'DimensionProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAnomalyDetector.RangeProperty", jsii_struct_bases=[], name_mapping={'end_time': 'endTime', 'start_time': 'startTime'})
class RangeProperty():
def __init__(self, *, end_time: str, start_time: str):
"""
:param end_time: ``CfnAnomalyDetector.RangeProperty.EndTime``.
:param start_time: ``CfnAnomalyDetector.RangeProperty.StartTime``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-range.html
"""
self._values = {
'end_time': end_time,
'start_time': start_time,
}
@builtins.property
def end_time(self) -> str:
"""``CfnAnomalyDetector.RangeProperty.EndTime``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-range.html#cfn-cloudwatch-anomalydetector-range-endtime
"""
return self._values.get('end_time')
@builtins.property
def start_time(self) -> str:
"""``CfnAnomalyDetector.RangeProperty.StartTime``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-cloudwatch-anomalydetector-range.html#cfn-cloudwatch-anomalydetector-range-starttime
"""
return self._values.get('start_time')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'RangeProperty(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnAnomalyDetectorProps", jsii_struct_bases=[], name_mapping={'metric_name': 'metricName', 'namespace': 'namespace', 'stat': 'stat', 'configuration': 'configuration', 'dimensions': 'dimensions'})
class CfnAnomalyDetectorProps():
def __init__(self, *, metric_name: str, namespace: str, stat: str, configuration: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnAnomalyDetector.ConfigurationProperty"]]]=None, dimensions: typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAnomalyDetector.DimensionProperty"]]]]]=None):
"""Properties for defining a ``AWS::CloudWatch::AnomalyDetector``.
:param metric_name: ``AWS::CloudWatch::AnomalyDetector.MetricName``.
:param namespace: ``AWS::CloudWatch::AnomalyDetector.Namespace``.
:param stat: ``AWS::CloudWatch::AnomalyDetector.Stat``.
:param configuration: ``AWS::CloudWatch::AnomalyDetector.Configuration``.
:param dimensions: ``AWS::CloudWatch::AnomalyDetector.Dimensions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html
"""
self._values = {
'metric_name': metric_name,
'namespace': namespace,
'stat': stat,
}
if configuration is not None: self._values["configuration"] = configuration
if dimensions is not None: self._values["dimensions"] = dimensions
@builtins.property
def metric_name(self) -> str:
"""``AWS::CloudWatch::AnomalyDetector.MetricName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-metricname
"""
return self._values.get('metric_name')
@builtins.property
def namespace(self) -> str:
"""``AWS::CloudWatch::AnomalyDetector.Namespace``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-namespace
"""
return self._values.get('namespace')
@builtins.property
def stat(self) -> str:
"""``AWS::CloudWatch::AnomalyDetector.Stat``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-stat
"""
return self._values.get('stat')
@builtins.property
def configuration(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional["CfnAnomalyDetector.ConfigurationProperty"]]]:
"""``AWS::CloudWatch::AnomalyDetector.Configuration``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-configuration
"""
return self._values.get('configuration')
@builtins.property
def dimensions(self) -> typing.Optional[typing.Union[typing.Optional[aws_cdk.core.IResolvable], typing.Optional[typing.List[typing.Union[aws_cdk.core.IResolvable, "CfnAnomalyDetector.DimensionProperty"]]]]]:
"""``AWS::CloudWatch::AnomalyDetector.Dimensions``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-anomalydetector.html#cfn-cloudwatch-anomalydetector-dimensions
"""
return self._values.get('dimensions')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'CfnAnomalyDetectorProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.implements(aws_cdk.core.IInspectable)
class CfnDashboard(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.CfnDashboard"):
"""A CloudFormation ``AWS::CloudWatch::Dashboard``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-dashboard.html
cloudformationResource:
:cloudformationResource:: AWS::CloudWatch::Dashboard
"""
def __init__(self, scope: aws_cdk.core.Construct, id: str, *, dashboard_body: str, dashboard_name: typing.Optional[str]=None) -> None:
"""Create a new ``AWS::CloudWatch::Dashboard``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param dashboard_body: ``AWS::CloudWatch::Dashboard.DashboardBody``.
:param dashboard_name: ``AWS::CloudWatch::Dashboard.DashboardName``.
"""
props = CfnDashboardProps(dashboard_body=dashboard_body, dashboard_name=dashboard_name)
jsii.create(CfnDashboard, self, [scope, id, props])
@jsii.member(jsii_name="inspect")
def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None:
"""Examines the CloudFormation resource and discloses attributes.
:param inspector: - tree inspector to collect and process attributes.
stability
:stability: experimental
"""
return jsii.invoke(self, "inspect", [inspector])
@jsii.member(jsii_name="renderProperties")
def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]:
"""
:param props: -
"""
return jsii.invoke(self, "renderProperties", [props])
@jsii.python.classproperty
@jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME")
def CFN_RESOURCE_TYPE_NAME(cls) -> str:
"""The CloudFormation resource type name for this resource class."""
return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME")
@builtins.property
@jsii.member(jsii_name="cfnProperties")
def _cfn_properties(self) -> typing.Mapping[str,typing.Any]:
return jsii.get(self, "cfnProperties")
@builtins.property
@jsii.member(jsii_name="dashboardBody")
def dashboard_body(self) -> str:
"""``AWS::CloudWatch::Dashboard.DashboardBody``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-dashboard.html#cfn-cloudwatch-dashboard-dashboardbody
"""
return jsii.get(self, "dashboardBody")
@dashboard_body.setter
def dashboard_body(self, value: str):
jsii.set(self, "dashboardBody", value)
@builtins.property
@jsii.member(jsii_name="dashboardName")
def dashboard_name(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Dashboard.DashboardName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-dashboard.html#cfn-cloudwatch-dashboard-dashboardname
"""
return jsii.get(self, "dashboardName")
@dashboard_name.setter
def dashboard_name(self, value: typing.Optional[str]):
jsii.set(self, "dashboardName", value)
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnDashboardProps", jsii_struct_bases=[], name_mapping={'dashboard_body': 'dashboardBody', 'dashboard_name': 'dashboardName'})
class CfnDashboardProps():
def __init__(self, *, dashboard_body: str, dashboard_name: typing.Optional[str]=None):
"""Properties for defining a ``AWS::CloudWatch::Dashboard``.
:param dashboard_body: ``AWS::CloudWatch::Dashboard.DashboardBody``.
:param dashboard_name: ``AWS::CloudWatch::Dashboard.DashboardName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-dashboard.html
"""
self._values = {
'dashboard_body': dashboard_body,
}
if dashboard_name is not None: self._values["dashboard_name"] = dashboard_name
@builtins.property
def dashboard_body(self) -> str:
"""``AWS::CloudWatch::Dashboard.DashboardBody``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-dashboard.html#cfn-cloudwatch-dashboard-dashboardbody
"""
return self._values.get('dashboard_body')
@builtins.property
def dashboard_name(self) -> typing.Optional[str]:
"""``AWS::CloudWatch::Dashboard.DashboardName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-dashboard.html#cfn-cloudwatch-dashboard-dashboardname
"""
return self._values.get('dashboard_name')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'CfnDashboardProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.implements(aws_cdk.core.IInspectable)
class CfnInsightRule(aws_cdk.core.CfnResource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.CfnInsightRule"):
"""A CloudFormation ``AWS::CloudWatch::InsightRule``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-insightrule.html
cloudformationResource:
:cloudformationResource:: AWS::CloudWatch::InsightRule
"""
def __init__(self, scope: aws_cdk.core.Construct, id: str, *, rule_body: str, rule_name: str, rule_state: str) -> None:
"""Create a new ``AWS::CloudWatch::InsightRule``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param rule_body: ``AWS::CloudWatch::InsightRule.RuleBody``.
:param rule_name: ``AWS::CloudWatch::InsightRule.RuleName``.
:param rule_state: ``AWS::CloudWatch::InsightRule.RuleState``.
"""
props = CfnInsightRuleProps(rule_body=rule_body, rule_name=rule_name, rule_state=rule_state)
jsii.create(CfnInsightRule, self, [scope, id, props])
@jsii.member(jsii_name="inspect")
def inspect(self, inspector: aws_cdk.core.TreeInspector) -> None:
"""Examines the CloudFormation resource and discloses attributes.
:param inspector: - tree inspector to collect and process attributes.
stability
:stability: experimental
"""
return jsii.invoke(self, "inspect", [inspector])
@jsii.member(jsii_name="renderProperties")
def _render_properties(self, props: typing.Mapping[str,typing.Any]) -> typing.Mapping[str,typing.Any]:
"""
:param props: -
"""
return jsii.invoke(self, "renderProperties", [props])
@jsii.python.classproperty
@jsii.member(jsii_name="CFN_RESOURCE_TYPE_NAME")
def CFN_RESOURCE_TYPE_NAME(cls) -> str:
"""The CloudFormation resource type name for this resource class."""
return jsii.sget(cls, "CFN_RESOURCE_TYPE_NAME")
@builtins.property
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> str:
"""
cloudformationAttribute:
:cloudformationAttribute:: Arn
"""
return jsii.get(self, "attrArn")
@builtins.property
@jsii.member(jsii_name="attrRuleName")
def attr_rule_name(self) -> str:
"""
cloudformationAttribute:
:cloudformationAttribute:: RuleName
"""
return jsii.get(self, "attrRuleName")
@builtins.property
@jsii.member(jsii_name="cfnProperties")
def _cfn_properties(self) -> typing.Mapping[str,typing.Any]:
return jsii.get(self, "cfnProperties")
@builtins.property
@jsii.member(jsii_name="ruleBody")
def rule_body(self) -> str:
"""``AWS::CloudWatch::InsightRule.RuleBody``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-insightrule.html#cfn-cloudwatch-insightrule-rulebody
"""
return jsii.get(self, "ruleBody")
@rule_body.setter
def rule_body(self, value: str):
jsii.set(self, "ruleBody", value)
@builtins.property
@jsii.member(jsii_name="ruleName")
def rule_name(self) -> str:
"""``AWS::CloudWatch::InsightRule.RuleName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-insightrule.html#cfn-cloudwatch-insightrule-rulename
"""
return jsii.get(self, "ruleName")
@rule_name.setter
def rule_name(self, value: str):
jsii.set(self, "ruleName", value)
@builtins.property
@jsii.member(jsii_name="ruleState")
def rule_state(self) -> str:
"""``AWS::CloudWatch::InsightRule.RuleState``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-insightrule.html#cfn-cloudwatch-insightrule-rulestate
"""
return jsii.get(self, "ruleState")
@rule_state.setter
def rule_state(self, value: str):
jsii.set(self, "ruleState", value)
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CfnInsightRuleProps", jsii_struct_bases=[], name_mapping={'rule_body': 'ruleBody', 'rule_name': 'ruleName', 'rule_state': 'ruleState'})
class CfnInsightRuleProps():
def __init__(self, *, rule_body: str, rule_name: str, rule_state: str):
"""Properties for defining a ``AWS::CloudWatch::InsightRule``.
:param rule_body: ``AWS::CloudWatch::InsightRule.RuleBody``.
:param rule_name: ``AWS::CloudWatch::InsightRule.RuleName``.
:param rule_state: ``AWS::CloudWatch::InsightRule.RuleState``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-insightrule.html
"""
self._values = {
'rule_body': rule_body,
'rule_name': rule_name,
'rule_state': rule_state,
}
@builtins.property
def rule_body(self) -> str:
"""``AWS::CloudWatch::InsightRule.RuleBody``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-insightrule.html#cfn-cloudwatch-insightrule-rulebody
"""
return self._values.get('rule_body')
@builtins.property
def rule_name(self) -> str:
"""``AWS::CloudWatch::InsightRule.RuleName``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-insightrule.html#cfn-cloudwatch-insightrule-rulename
"""
return self._values.get('rule_name')
@builtins.property
def rule_state(self) -> str:
"""``AWS::CloudWatch::InsightRule.RuleState``.
see
:see: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-cloudwatch-insightrule.html#cfn-cloudwatch-insightrule-rulestate
"""
return self._values.get('rule_state')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'CfnInsightRuleProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CommonMetricOptions", jsii_struct_bases=[], name_mapping={'account': 'account', 'color': 'color', 'dimensions': 'dimensions', 'label': 'label', 'period': 'period', 'region': 'region', 'statistic': 'statistic', 'unit': 'unit'})
class CommonMetricOptions():
def __init__(self, *, account: typing.Optional[str]=None, color: typing.Optional[str]=None, dimensions: typing.Optional[typing.Mapping[str,typing.Any]]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, region: typing.Optional[str]=None, statistic: typing.Optional[str]=None, unit: typing.Optional["Unit"]=None):
"""Options shared by most methods accepting metric options.
:param account: Account which this metric comes from. Default: Deployment account.
:param color: Color for this metric when added to a Graph in a Dashboard.
:param dimensions: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: All metric datums in the given metric stream
"""
self._values = {
}
if account is not None: self._values["account"] = account
if color is not None: self._values["color"] = color
if dimensions is not None: self._values["dimensions"] = dimensions
if label is not None: self._values["label"] = label
if period is not None: self._values["period"] = period
if region is not None: self._values["region"] = region
if statistic is not None: self._values["statistic"] = statistic
if unit is not None: self._values["unit"] = unit
@builtins.property
def account(self) -> typing.Optional[str]:
"""Account which this metric comes from.
default
:default: Deployment account.
"""
return self._values.get('account')
@builtins.property
def color(self) -> typing.Optional[str]:
"""Color for this metric when added to a Graph in a Dashboard."""
return self._values.get('color')
@builtins.property
def dimensions(self) -> typing.Optional[typing.Mapping[str,typing.Any]]:
"""Dimensions of the metric.
default
:default: - No dimensions.
"""
return self._values.get('dimensions')
@builtins.property
def label(self) -> typing.Optional[str]:
"""Label for this metric when added to a Graph in a Dashboard."""
return self._values.get('label')
@builtins.property
def period(self) -> typing.Optional[aws_cdk.core.Duration]:
"""The period over which the specified statistic is applied.
default
:default: Duration.minutes(5)
"""
return self._values.get('period')
@builtins.property
def region(self) -> typing.Optional[str]:
"""Region which this metric comes from.
default
:default: Deployment region.
"""
return self._values.get('region')
@builtins.property
def statistic(self) -> typing.Optional[str]:
"""What function to use for aggregating.
Can be one of the following:
- "Minimum" | "min"
- "Maximum" | "max"
- "Average" | "avg"
- "Sum" | "sum"
- "SampleCount | "n"
- "pNN.NN"
default
:default: Average
"""
return self._values.get('statistic')
@builtins.property
def unit(self) -> typing.Optional["Unit"]:
"""Unit used to filter the metric stream.
Only refer to datums emitted to the metric stream with the given unit and
ignore all others. Only useful when datums are being emitted to the same
metric stream under different units.
The default is to use all matric datums in the stream, regardless of unit,
which is recommended in nearly all cases.
CloudWatch does not honor this property for graphs.
default
:default: All metric datums in the given metric stream
"""
return self._values.get('unit')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'CommonMetricOptions(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.enum(jsii_type="@aws-cdk/aws-cloudwatch.ComparisonOperator")
class ComparisonOperator(enum.Enum):
"""Comparison operator for evaluating alarms."""
GREATER_THAN_OR_EQUAL_TO_THRESHOLD = "GREATER_THAN_OR_EQUAL_TO_THRESHOLD"
GREATER_THAN_THRESHOLD = "GREATER_THAN_THRESHOLD"
LESS_THAN_THRESHOLD = "LESS_THAN_THRESHOLD"
LESS_THAN_OR_EQUAL_TO_THRESHOLD = "LESS_THAN_OR_EQUAL_TO_THRESHOLD"
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.CreateAlarmOptions", jsii_struct_bases=[], name_mapping={'evaluation_periods': 'evaluationPeriods', 'threshold': 'threshold', 'actions_enabled': 'actionsEnabled', 'alarm_description': 'alarmDescription', 'alarm_name': 'alarmName', 'comparison_operator': 'comparisonOperator', 'datapoints_to_alarm': 'datapointsToAlarm', 'evaluate_low_sample_count_percentile': 'evaluateLowSampleCountPercentile', 'period': 'period', 'statistic': 'statistic', 'treat_missing_data': 'treatMissingData'})
class CreateAlarmOptions():
def __init__(self, *, evaluation_periods: jsii.Number, threshold: jsii.Number, actions_enabled: typing.Optional[bool]=None, alarm_description: typing.Optional[str]=None, alarm_name: typing.Optional[str]=None, comparison_operator: typing.Optional["ComparisonOperator"]=None, datapoints_to_alarm: typing.Optional[jsii.Number]=None, evaluate_low_sample_count_percentile: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, statistic: typing.Optional[str]=None, treat_missing_data: typing.Optional["TreatMissingData"]=None):
"""Properties needed to make an alarm from a metric.
:param evaluation_periods: The number of periods over which data is compared to the specified threshold.
:param threshold: The value against which the specified statistic is compared.
:param actions_enabled: Whether the actions for this alarm are enabled. Default: true
:param alarm_description: Description for the alarm. Default: No description
:param alarm_name: Name of the alarm. Default: Automatically generated name
:param comparison_operator: Comparison to use to check if metric is breaching. Default: GreaterThanOrEqualToThreshold
:param datapoints_to_alarm: The number of datapoints that must be breaching to trigger the alarm. This is used only if you are setting an "M out of N" alarm. In that case, this value is the M. For more information, see Evaluating an Alarm in the Amazon CloudWatch User Guide. Default: ``evaluationPeriods``
:param evaluate_low_sample_count_percentile: Specifies whether to evaluate the data and potentially change the alarm state if there are too few data points to be statistically significant. Used only for alarms that are based on percentiles. Default: - Not configured.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param treat_missing_data: Sets how this alarm is to handle missing data points. Default: TreatMissingData.Missing
"""
self._values = {
'evaluation_periods': evaluation_periods,
'threshold': threshold,
}
if actions_enabled is not None: self._values["actions_enabled"] = actions_enabled
if alarm_description is not None: self._values["alarm_description"] = alarm_description
if alarm_name is not None: self._values["alarm_name"] = alarm_name
if comparison_operator is not None: self._values["comparison_operator"] = comparison_operator
if datapoints_to_alarm is not None: self._values["datapoints_to_alarm"] = datapoints_to_alarm
if evaluate_low_sample_count_percentile is not None: self._values["evaluate_low_sample_count_percentile"] = evaluate_low_sample_count_percentile
if period is not None: self._values["period"] = period
if statistic is not None: self._values["statistic"] = statistic
if treat_missing_data is not None: self._values["treat_missing_data"] = treat_missing_data
@builtins.property
def evaluation_periods(self) -> jsii.Number:
"""The number of periods over which data is compared to the specified threshold."""
return self._values.get('evaluation_periods')
@builtins.property
def threshold(self) -> jsii.Number:
"""The value against which the specified statistic is compared."""
return self._values.get('threshold')
@builtins.property
def actions_enabled(self) -> typing.Optional[bool]:
"""Whether the actions for this alarm are enabled.
default
:default: true
"""
return self._values.get('actions_enabled')
@builtins.property
def alarm_description(self) -> typing.Optional[str]:
"""Description for the alarm.
default
:default: No description
"""
return self._values.get('alarm_description')
@builtins.property
def alarm_name(self) -> typing.Optional[str]:
"""Name of the alarm.
default
:default: Automatically generated name
"""
return self._values.get('alarm_name')
@builtins.property
def comparison_operator(self) -> typing.Optional["ComparisonOperator"]:
"""Comparison to use to check if metric is breaching.
default
:default: GreaterThanOrEqualToThreshold
"""
return self._values.get('comparison_operator')
@builtins.property
def datapoints_to_alarm(self) -> typing.Optional[jsii.Number]:
"""The number of datapoints that must be breaching to trigger the alarm.
This is used only if you are setting an "M
out of N" alarm. In that case, this value is the M. For more information, see Evaluating an Alarm in the Amazon
CloudWatch User Guide.
default
:default: ``evaluationPeriods``
see
:see: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/AlarmThatSendsEmail.html#alarm-evaluation
"""
return self._values.get('datapoints_to_alarm')
@builtins.property
def evaluate_low_sample_count_percentile(self) -> typing.Optional[str]:
"""Specifies whether to evaluate the data and potentially change the alarm state if there are too few data points to be statistically significant.
Used only for alarms that are based on percentiles.
default
:default: - Not configured.
"""
return self._values.get('evaluate_low_sample_count_percentile')
@builtins.property
def period(self) -> typing.Optional[aws_cdk.core.Duration]:
"""The period over which the specified statistic is applied.
default
:default: Duration.minutes(5)
"""
return self._values.get('period')
@builtins.property
def statistic(self) -> typing.Optional[str]:
"""What function to use for aggregating.
Can be one of the following:
- "Minimum" | "min"
- "Maximum" | "max"
- "Average" | "avg"
- "Sum" | "sum"
- "SampleCount | "n"
- "pNN.NN"
default
:default: Average
"""
return self._values.get('statistic')
@builtins.property
def treat_missing_data(self) -> typing.Optional["TreatMissingData"]:
"""Sets how this alarm is to handle missing data points.
default
:default: TreatMissingData.Missing
"""
return self._values.get('treat_missing_data')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'CreateAlarmOptions(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.AlarmProps", jsii_struct_bases=[CreateAlarmOptions], name_mapping={'evaluation_periods': 'evaluationPeriods', 'threshold': 'threshold', 'actions_enabled': 'actionsEnabled', 'alarm_description': 'alarmDescription', 'alarm_name': 'alarmName', 'comparison_operator': 'comparisonOperator', 'datapoints_to_alarm': 'datapointsToAlarm', 'evaluate_low_sample_count_percentile': 'evaluateLowSampleCountPercentile', 'period': 'period', 'statistic': 'statistic', 'treat_missing_data': 'treatMissingData', 'metric': 'metric'})
class AlarmProps(CreateAlarmOptions):
def __init__(self, *, evaluation_periods: jsii.Number, threshold: jsii.Number, actions_enabled: typing.Optional[bool]=None, alarm_description: typing.Optional[str]=None, alarm_name: typing.Optional[str]=None, comparison_operator: typing.Optional["ComparisonOperator"]=None, datapoints_to_alarm: typing.Optional[jsii.Number]=None, evaluate_low_sample_count_percentile: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, statistic: typing.Optional[str]=None, treat_missing_data: typing.Optional["TreatMissingData"]=None, metric: "IMetric"):
"""Properties for Alarms.
:param evaluation_periods: The number of periods over which data is compared to the specified threshold.
:param threshold: The value against which the specified statistic is compared.
:param actions_enabled: Whether the actions for this alarm are enabled. Default: true
:param alarm_description: Description for the alarm. Default: No description
:param alarm_name: Name of the alarm. Default: Automatically generated name
:param comparison_operator: Comparison to use to check if metric is breaching. Default: GreaterThanOrEqualToThreshold
:param datapoints_to_alarm: The number of datapoints that must be breaching to trigger the alarm. This is used only if you are setting an "M out of N" alarm. In that case, this value is the M. For more information, see Evaluating an Alarm in the Amazon CloudWatch User Guide. Default: ``evaluationPeriods``
:param evaluate_low_sample_count_percentile: Specifies whether to evaluate the data and potentially change the alarm state if there are too few data points to be statistically significant. Used only for alarms that are based on percentiles. Default: - Not configured.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param treat_missing_data: Sets how this alarm is to handle missing data points. Default: TreatMissingData.Missing
:param metric: The metric to add the alarm on. Metric objects can be obtained from most resources, or you can construct custom Metric objects by instantiating one.
"""
self._values = {
'evaluation_periods': evaluation_periods,
'threshold': threshold,
'metric': metric,
}
if actions_enabled is not None: self._values["actions_enabled"] = actions_enabled
if alarm_description is not None: self._values["alarm_description"] = alarm_description
if alarm_name is not None: self._values["alarm_name"] = alarm_name
if comparison_operator is not None: self._values["comparison_operator"] = comparison_operator
if datapoints_to_alarm is not None: self._values["datapoints_to_alarm"] = datapoints_to_alarm
if evaluate_low_sample_count_percentile is not None: self._values["evaluate_low_sample_count_percentile"] = evaluate_low_sample_count_percentile
if period is not None: self._values["period"] = period
if statistic is not None: self._values["statistic"] = statistic
if treat_missing_data is not None: self._values["treat_missing_data"] = treat_missing_data
@builtins.property
def evaluation_periods(self) -> jsii.Number:
"""The number of periods over which data is compared to the specified threshold."""
return self._values.get('evaluation_periods')
@builtins.property
def threshold(self) -> jsii.Number:
"""The value against which the specified statistic is compared."""
return self._values.get('threshold')
@builtins.property
def actions_enabled(self) -> typing.Optional[bool]:
"""Whether the actions for this alarm are enabled.
default
:default: true
"""
return self._values.get('actions_enabled')
@builtins.property
def alarm_description(self) -> typing.Optional[str]:
"""Description for the alarm.
default
:default: No description
"""
return self._values.get('alarm_description')
@builtins.property
def alarm_name(self) -> typing.Optional[str]:
"""Name of the alarm.
default
:default: Automatically generated name
"""
return self._values.get('alarm_name')
@builtins.property
def comparison_operator(self) -> typing.Optional["ComparisonOperator"]:
"""Comparison to use to check if metric is breaching.
default
:default: GreaterThanOrEqualToThreshold
"""
return self._values.get('comparison_operator')
@builtins.property
def datapoints_to_alarm(self) -> typing.Optional[jsii.Number]:
"""The number of datapoints that must be breaching to trigger the alarm.
This is used only if you are setting an "M
out of N" alarm. In that case, this value is the M. For more information, see Evaluating an Alarm in the Amazon
CloudWatch User Guide.
default
:default: ``evaluationPeriods``
see
:see: https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/AlarmThatSendsEmail.html#alarm-evaluation
"""
return self._values.get('datapoints_to_alarm')
@builtins.property
def evaluate_low_sample_count_percentile(self) -> typing.Optional[str]:
"""Specifies whether to evaluate the data and potentially change the alarm state if there are too few data points to be statistically significant.
Used only for alarms that are based on percentiles.
default
:default: - Not configured.
"""
return self._values.get('evaluate_low_sample_count_percentile')
@builtins.property
def period(self) -> typing.Optional[aws_cdk.core.Duration]:
"""The period over which the specified statistic is applied.
default
:default: Duration.minutes(5)
"""
return self._values.get('period')
@builtins.property
def statistic(self) -> typing.Optional[str]:
"""What function to use for aggregating.
Can be one of the following:
- "Minimum" | "min"
- "Maximum" | "max"
- "Average" | "avg"
- "Sum" | "sum"
- "SampleCount | "n"
- "pNN.NN"
default
:default: Average
"""
return self._values.get('statistic')
@builtins.property
def treat_missing_data(self) -> typing.Optional["TreatMissingData"]:
"""Sets how this alarm is to handle missing data points.
default
:default: TreatMissingData.Missing
"""
return self._values.get('treat_missing_data')
@builtins.property
def metric(self) -> "IMetric":
"""The metric to add the alarm on.
Metric objects can be obtained from most resources, or you can construct
custom Metric objects by instantiating one.
"""
return self._values.get('metric')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'AlarmProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
class Dashboard(aws_cdk.core.Resource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.Dashboard"):
"""A CloudWatch dashboard."""
def __init__(self, scope: aws_cdk.core.Construct, id: str, *, dashboard_name: typing.Optional[str]=None, end: typing.Optional[str]=None, period_override: typing.Optional["PeriodOverride"]=None, start: typing.Optional[str]=None, widgets: typing.Optional[typing.List[typing.List["IWidget"]]]=None) -> None:
"""
:param scope: -
:param id: -
:param dashboard_name: Name of the dashboard. If set, must only contain alphanumerics, dash (-) and underscore (_) Default: - automatically generated name
:param end: The end of the time range to use for each widget on the dashboard when the dashboard loads. If you specify a value for end, you must also specify a value for start. Specify an absolute time in the ISO 8601 format. For example, 2018-12-17T06:00:00.000Z. Default: When the dashboard loads, the end date will be the current time.
:param period_override: Use this field to specify the period for the graphs when the dashboard loads. Specifying ``Auto`` causes the period of all graphs on the dashboard to automatically adapt to the time range of the dashboard. Specifying ``Inherit`` ensures that the period set for each graph is always obeyed. Default: Auto
:param start: The start of the time range to use for each widget on the dashboard. You can specify start without specifying end to specify a relative time range that ends with the current time. In this case, the value of start must begin with -P, and you can use M, H, D, W and M as abbreviations for minutes, hours, days, weeks and months. For example, -PT8H shows the last 8 hours and -P3M shows the last three months. You can also use start along with an end field, to specify an absolute time range. When specifying an absolute time range, use the ISO 8601 format. For example, 2018-12-17T06:00:00.000Z. Default: When the dashboard loads, the start time will be the default time range.
:param widgets: Initial set of widgets on the dashboard. One array represents a row of widgets. Default: - No widgets
"""
props = DashboardProps(dashboard_name=dashboard_name, end=end, period_override=period_override, start=start, widgets=widgets)
jsii.create(Dashboard, self, [scope, id, props])
@jsii.member(jsii_name="addWidgets")
def add_widgets(self, *widgets: "IWidget") -> None:
"""Add a widget to the dashboard.
Widgets given in multiple calls to add() will be laid out stacked on
top of each other.
Multiple widgets added in the same call to add() will be laid out next
to each other.
:param widgets: -
"""
return jsii.invoke(self, "addWidgets", [*widgets])
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.DashboardProps", jsii_struct_bases=[], name_mapping={'dashboard_name': 'dashboardName', 'end': 'end', 'period_override': 'periodOverride', 'start': 'start', 'widgets': 'widgets'})
class DashboardProps():
def __init__(self, *, dashboard_name: typing.Optional[str]=None, end: typing.Optional[str]=None, period_override: typing.Optional["PeriodOverride"]=None, start: typing.Optional[str]=None, widgets: typing.Optional[typing.List[typing.List["IWidget"]]]=None):
"""
:param dashboard_name: Name of the dashboard. If set, must only contain alphanumerics, dash (-) and underscore (_) Default: - automatically generated name
:param end: The end of the time range to use for each widget on the dashboard when the dashboard loads. If you specify a value for end, you must also specify a value for start. Specify an absolute time in the ISO 8601 format. For example, 2018-12-17T06:00:00.000Z. Default: When the dashboard loads, the end date will be the current time.
:param period_override: Use this field to specify the period for the graphs when the dashboard loads. Specifying ``Auto`` causes the period of all graphs on the dashboard to automatically adapt to the time range of the dashboard. Specifying ``Inherit`` ensures that the period set for each graph is always obeyed. Default: Auto
:param start: The start of the time range to use for each widget on the dashboard. You can specify start without specifying end to specify a relative time range that ends with the current time. In this case, the value of start must begin with -P, and you can use M, H, D, W and M as abbreviations for minutes, hours, days, weeks and months. For example, -PT8H shows the last 8 hours and -P3M shows the last three months. You can also use start along with an end field, to specify an absolute time range. When specifying an absolute time range, use the ISO 8601 format. For example, 2018-12-17T06:00:00.000Z. Default: When the dashboard loads, the start time will be the default time range.
:param widgets: Initial set of widgets on the dashboard. One array represents a row of widgets. Default: - No widgets
"""
self._values = {
}
if dashboard_name is not None: self._values["dashboard_name"] = dashboard_name
if end is not None: self._values["end"] = end
if period_override is not None: self._values["period_override"] = period_override
if start is not None: self._values["start"] = start
if widgets is not None: self._values["widgets"] = widgets
@builtins.property
def dashboard_name(self) -> typing.Optional[str]:
"""Name of the dashboard.
If set, must only contain alphanumerics, dash (-) and underscore (_)
default
:default: - automatically generated name
"""
return self._values.get('dashboard_name')
@builtins.property
def end(self) -> typing.Optional[str]:
"""The end of the time range to use for each widget on the dashboard when the dashboard loads.
If you specify a value for end, you must also specify a value for start.
Specify an absolute time in the ISO 8601 format. For example, 2018-12-17T06:00:00.000Z.
default
:default: When the dashboard loads, the end date will be the current time.
"""
return self._values.get('end')
@builtins.property
def period_override(self) -> typing.Optional["PeriodOverride"]:
"""Use this field to specify the period for the graphs when the dashboard loads.
Specifying ``Auto`` causes the period of all graphs on the dashboard to automatically adapt to the time range of the dashboard.
Specifying ``Inherit`` ensures that the period set for each graph is always obeyed.
default
:default: Auto
"""
return self._values.get('period_override')
@builtins.property
def start(self) -> typing.Optional[str]:
"""The start of the time range to use for each widget on the dashboard.
You can specify start without specifying end to specify a relative time range that ends with the current time.
In this case, the value of start must begin with -P, and you can use M, H, D, W and M as abbreviations for
minutes, hours, days, weeks and months. For example, -PT8H shows the last 8 hours and -P3M shows the last three months.
You can also use start along with an end field, to specify an absolute time range.
When specifying an absolute time range, use the ISO 8601 format. For example, 2018-12-17T06:00:00.000Z.
default
:default: When the dashboard loads, the start time will be the default time range.
"""
return self._values.get('start')
@builtins.property
def widgets(self) -> typing.Optional[typing.List[typing.List["IWidget"]]]:
"""Initial set of widgets on the dashboard.
One array represents a row of widgets.
default
:default: - No widgets
"""
return self._values.get('widgets')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'DashboardProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.Dimension", jsii_struct_bases=[], name_mapping={'name': 'name', 'value': 'value'})
class Dimension():
def __init__(self, *, name: str, value: typing.Any):
"""Metric dimension.
:param name: Name of the dimension.
:param value: Value of the dimension.
"""
self._values = {
'name': name,
'value': value,
}
@builtins.property
def name(self) -> str:
"""Name of the dimension."""
return self._values.get('name')
@builtins.property
def value(self) -> typing.Any:
"""Value of the dimension."""
return self._values.get('value')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'Dimension(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.HorizontalAnnotation", jsii_struct_bases=[], name_mapping={'value': 'value', 'color': 'color', 'fill': 'fill', 'label': 'label', 'visible': 'visible'})
class HorizontalAnnotation():
def __init__(self, *, value: jsii.Number, color: typing.Optional[str]=None, fill: typing.Optional["Shading"]=None, label: typing.Optional[str]=None, visible: typing.Optional[bool]=None):
"""Horizontal annotation to be added to a graph.
:param value: The value of the annotation.
:param color: Hex color code to be used for the annotation. Default: Automatic color
:param fill: Add shading above or below the annotation. Default: No shading
:param label: Label for the annotation. Default: No label
:param visible: Whether the annotation is visible. Default: true
"""
self._values = {
'value': value,
}
if color is not None: self._values["color"] = color
if fill is not None: self._values["fill"] = fill
if label is not None: self._values["label"] = label
if visible is not None: self._values["visible"] = visible
@builtins.property
def value(self) -> jsii.Number:
"""The value of the annotation."""
return self._values.get('value')
@builtins.property
def color(self) -> typing.Optional[str]:
"""Hex color code to be used for the annotation.
default
:default: Automatic color
"""
return self._values.get('color')
@builtins.property
def fill(self) -> typing.Optional["Shading"]:
"""Add shading above or below the annotation.
default
:default: No shading
"""
return self._values.get('fill')
@builtins.property
def label(self) -> typing.Optional[str]:
"""Label for the annotation.
default
:default: No label
"""
return self._values.get('label')
@builtins.property
def visible(self) -> typing.Optional[bool]:
"""Whether the annotation is visible.
default
:default: true
"""
return self._values.get('visible')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'HorizontalAnnotation(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.interface(jsii_type="@aws-cdk/aws-cloudwatch.IAlarm")
class IAlarm(aws_cdk.core.IResource, jsii.compat.Protocol):
@builtins.staticmethod
def __jsii_proxy_class__():
return _IAlarmProxy
@builtins.property
@jsii.member(jsii_name="alarmArn")
def alarm_arn(self) -> str:
"""
attribute:
:attribute:: true
"""
...
@builtins.property
@jsii.member(jsii_name="alarmName")
def alarm_name(self) -> str:
"""
attribute:
:attribute:: true
"""
...
class _IAlarmProxy(jsii.proxy_for(aws_cdk.core.IResource)):
__jsii_type__ = "@aws-cdk/aws-cloudwatch.IAlarm"
@builtins.property
@jsii.member(jsii_name="alarmArn")
def alarm_arn(self) -> str:
"""
attribute:
:attribute:: true
"""
return jsii.get(self, "alarmArn")
@builtins.property
@jsii.member(jsii_name="alarmName")
def alarm_name(self) -> str:
"""
attribute:
:attribute:: true
"""
return jsii.get(self, "alarmName")
@jsii.implements(IAlarm)
class Alarm(aws_cdk.core.Resource, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.Alarm"):
"""An alarm on a CloudWatch metric."""
def __init__(self, scope: aws_cdk.core.Construct, id: str, *, metric: "IMetric", evaluation_periods: jsii.Number, threshold: jsii.Number, actions_enabled: typing.Optional[bool]=None, alarm_description: typing.Optional[str]=None, alarm_name: typing.Optional[str]=None, comparison_operator: typing.Optional["ComparisonOperator"]=None, datapoints_to_alarm: typing.Optional[jsii.Number]=None, evaluate_low_sample_count_percentile: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, statistic: typing.Optional[str]=None, treat_missing_data: typing.Optional["TreatMissingData"]=None) -> None:
"""
:param scope: -
:param id: -
:param metric: The metric to add the alarm on. Metric objects can be obtained from most resources, or you can construct custom Metric objects by instantiating one.
:param evaluation_periods: The number of periods over which data is compared to the specified threshold.
:param threshold: The value against which the specified statistic is compared.
:param actions_enabled: Whether the actions for this alarm are enabled. Default: true
:param alarm_description: Description for the alarm. Default: No description
:param alarm_name: Name of the alarm. Default: Automatically generated name
:param comparison_operator: Comparison to use to check if metric is breaching. Default: GreaterThanOrEqualToThreshold
:param datapoints_to_alarm: The number of datapoints that must be breaching to trigger the alarm. This is used only if you are setting an "M out of N" alarm. In that case, this value is the M. For more information, see Evaluating an Alarm in the Amazon CloudWatch User Guide. Default: ``evaluationPeriods``
:param evaluate_low_sample_count_percentile: Specifies whether to evaluate the data and potentially change the alarm state if there are too few data points to be statistically significant. Used only for alarms that are based on percentiles. Default: - Not configured.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param treat_missing_data: Sets how this alarm is to handle missing data points. Default: TreatMissingData.Missing
"""
props = AlarmProps(metric=metric, evaluation_periods=evaluation_periods, threshold=threshold, actions_enabled=actions_enabled, alarm_description=alarm_description, alarm_name=alarm_name, comparison_operator=comparison_operator, datapoints_to_alarm=datapoints_to_alarm, evaluate_low_sample_count_percentile=evaluate_low_sample_count_percentile, period=period, statistic=statistic, treat_missing_data=treat_missing_data)
jsii.create(Alarm, self, [scope, id, props])
@jsii.member(jsii_name="fromAlarmArn")
@builtins.classmethod
def from_alarm_arn(cls, scope: aws_cdk.core.Construct, id: str, alarm_arn: str) -> "IAlarm":
"""
:param scope: -
:param id: -
:param alarm_arn: -
"""
return jsii.sinvoke(cls, "fromAlarmArn", [scope, id, alarm_arn])
@jsii.member(jsii_name="addAlarmAction")
def add_alarm_action(self, *actions: "IAlarmAction") -> None:
"""Trigger this action if the alarm fires.
Typically the ARN of an SNS topic or ARN of an AutoScaling policy.
:param actions: -
"""
return jsii.invoke(self, "addAlarmAction", [*actions])
@jsii.member(jsii_name="addInsufficientDataAction")
def add_insufficient_data_action(self, *actions: "IAlarmAction") -> None:
"""Trigger this action if there is insufficient data to evaluate the alarm.
Typically the ARN of an SNS topic or ARN of an AutoScaling policy.
:param actions: -
"""
return jsii.invoke(self, "addInsufficientDataAction", [*actions])
@jsii.member(jsii_name="addOkAction")
def add_ok_action(self, *actions: "IAlarmAction") -> None:
"""Trigger this action if the alarm returns from breaching state into ok state.
Typically the ARN of an SNS topic or ARN of an AutoScaling policy.
:param actions: -
"""
return jsii.invoke(self, "addOkAction", [*actions])
@jsii.member(jsii_name="toAnnotation")
def to_annotation(self) -> "HorizontalAnnotation":
"""Turn this alarm into a horizontal annotation.
This is useful if you want to represent an Alarm in a non-AlarmWidget.
An ``AlarmWidget`` can directly show an alarm, but it can only show a
single alarm and no other metrics. Instead, you can convert the alarm to
a HorizontalAnnotation and add it as an annotation to another graph.
This might be useful if:
- You want to show multiple alarms inside a single graph, for example if
you have both a "small margin/long period" alarm as well as a
"large margin/short period" alarm.
- You want to show an Alarm line in a graph with multiple metrics in it.
"""
return jsii.invoke(self, "toAnnotation", [])
@builtins.property
@jsii.member(jsii_name="alarmArn")
def alarm_arn(self) -> str:
"""ARN of this alarm.
attribute:
:attribute:: true
"""
return jsii.get(self, "alarmArn")
@builtins.property
@jsii.member(jsii_name="alarmName")
def alarm_name(self) -> str:
"""Name of this alarm.
attribute:
:attribute:: true
"""
return jsii.get(self, "alarmName")
@builtins.property
@jsii.member(jsii_name="metric")
def metric(self) -> "IMetric":
"""The metric object this alarm was based on."""
return jsii.get(self, "metric")
@jsii.interface(jsii_type="@aws-cdk/aws-cloudwatch.IAlarmAction")
class IAlarmAction(jsii.compat.Protocol):
"""Interface for objects that can be the targets of CloudWatch alarm actions."""
@builtins.staticmethod
def __jsii_proxy_class__():
return _IAlarmActionProxy
@jsii.member(jsii_name="bind")
def bind(self, scope: aws_cdk.core.Construct, alarm: "IAlarm") -> "AlarmActionConfig":
"""
:param scope: -
:param alarm: -
"""
...
class _IAlarmActionProxy():
"""Interface for objects that can be the targets of CloudWatch alarm actions."""
__jsii_type__ = "@aws-cdk/aws-cloudwatch.IAlarmAction"
@jsii.member(jsii_name="bind")
def bind(self, scope: aws_cdk.core.Construct, alarm: "IAlarm") -> "AlarmActionConfig":
"""
:param scope: -
:param alarm: -
"""
return jsii.invoke(self, "bind", [scope, alarm])
@jsii.interface(jsii_type="@aws-cdk/aws-cloudwatch.IMetric")
class IMetric(jsii.compat.Protocol):
"""Interface for metrics."""
@builtins.staticmethod
def __jsii_proxy_class__():
return _IMetricProxy
@jsii.member(jsii_name="toAlarmConfig")
def to_alarm_config(self) -> "MetricAlarmConfig":
"""Turn this metric object into an alarm configuration.
deprecated
:deprecated: Use ``toMetricsConfig()`` instead.
stability
:stability: deprecated
"""
...
@jsii.member(jsii_name="toGraphConfig")
def to_graph_config(self) -> "MetricGraphConfig":
"""Turn this metric object into a graph configuration.
deprecated
:deprecated: Use ``toMetricsConfig()`` instead.
stability
:stability: deprecated
"""
...
@jsii.member(jsii_name="toMetricConfig")
def to_metric_config(self) -> "MetricConfig":
"""Inspect the details of the metric object."""
...
class _IMetricProxy():
"""Interface for metrics."""
__jsii_type__ = "@aws-cdk/aws-cloudwatch.IMetric"
@jsii.member(jsii_name="toAlarmConfig")
def to_alarm_config(self) -> "MetricAlarmConfig":
"""Turn this metric object into an alarm configuration.
deprecated
:deprecated: Use ``toMetricsConfig()`` instead.
stability
:stability: deprecated
"""
return jsii.invoke(self, "toAlarmConfig", [])
@jsii.member(jsii_name="toGraphConfig")
def to_graph_config(self) -> "MetricGraphConfig":
"""Turn this metric object into a graph configuration.
deprecated
:deprecated: Use ``toMetricsConfig()`` instead.
stability
:stability: deprecated
"""
return jsii.invoke(self, "toGraphConfig", [])
@jsii.member(jsii_name="toMetricConfig")
def to_metric_config(self) -> "MetricConfig":
"""Inspect the details of the metric object."""
return jsii.invoke(self, "toMetricConfig", [])
@jsii.interface(jsii_type="@aws-cdk/aws-cloudwatch.IWidget")
class IWidget(jsii.compat.Protocol):
"""A single dashboard widget."""
@builtins.staticmethod
def __jsii_proxy_class__():
return _IWidgetProxy
@builtins.property
@jsii.member(jsii_name="height")
def height(self) -> jsii.Number:
"""The amount of vertical grid units the widget will take up."""
...
@builtins.property
@jsii.member(jsii_name="width")
def width(self) -> jsii.Number:
"""The amount of horizontal grid units the widget will take up."""
...
@jsii.member(jsii_name="position")
def position(self, x: jsii.Number, y: jsii.Number) -> None:
"""Place the widget at a given position.
:param x: -
:param y: -
"""
...
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
...
class _IWidgetProxy():
"""A single dashboard widget."""
__jsii_type__ = "@aws-cdk/aws-cloudwatch.IWidget"
@builtins.property
@jsii.member(jsii_name="height")
def height(self) -> jsii.Number:
"""The amount of vertical grid units the widget will take up."""
return jsii.get(self, "height")
@builtins.property
@jsii.member(jsii_name="width")
def width(self) -> jsii.Number:
"""The amount of horizontal grid units the widget will take up."""
return jsii.get(self, "width")
@jsii.member(jsii_name="position")
def position(self, x: jsii.Number, y: jsii.Number) -> None:
"""Place the widget at a given position.
:param x: -
:param y: -
"""
return jsii.invoke(self, "position", [x, y])
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
@jsii.implements(IWidget)
class Column(metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.Column"):
"""A widget that contains other widgets in a vertical column.
Widgets will be laid out next to each other
"""
def __init__(self, *widgets: "IWidget") -> None:
"""
:param widgets: -
"""
jsii.create(Column, self, [*widgets])
@jsii.member(jsii_name="position")
def position(self, x: jsii.Number, y: jsii.Number) -> None:
"""Place the widget at a given position.
:param x: -
:param y: -
"""
return jsii.invoke(self, "position", [x, y])
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
@builtins.property
@jsii.member(jsii_name="height")
def height(self) -> jsii.Number:
"""The amount of vertical grid units the widget will take up."""
return jsii.get(self, "height")
@builtins.property
@jsii.member(jsii_name="width")
def width(self) -> jsii.Number:
"""The amount of horizontal grid units the widget will take up."""
return jsii.get(self, "width")
@jsii.implements(IWidget)
class ConcreteWidget(metaclass=jsii.JSIIAbstractClass, jsii_type="@aws-cdk/aws-cloudwatch.ConcreteWidget"):
"""A real CloudWatch widget that has its own fixed size and remembers its position.
This is in contrast to other widgets which exist for layout purposes.
"""
@builtins.staticmethod
def __jsii_proxy_class__():
return _ConcreteWidgetProxy
def __init__(self, width: jsii.Number, height: jsii.Number) -> None:
"""
:param width: -
:param height: -
"""
jsii.create(ConcreteWidget, self, [width, height])
@jsii.member(jsii_name="position")
def position(self, x: jsii.Number, y: jsii.Number) -> None:
"""Place the widget at a given position.
:param x: -
:param y: -
"""
return jsii.invoke(self, "position", [x, y])
@jsii.member(jsii_name="toJson")
@abc.abstractmethod
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
...
@builtins.property
@jsii.member(jsii_name="height")
def height(self) -> jsii.Number:
"""The amount of vertical grid units the widget will take up."""
return jsii.get(self, "height")
@builtins.property
@jsii.member(jsii_name="width")
def width(self) -> jsii.Number:
"""The amount of horizontal grid units the widget will take up."""
return jsii.get(self, "width")
@builtins.property
@jsii.member(jsii_name="x")
def _x(self) -> typing.Optional[jsii.Number]:
return jsii.get(self, "x")
@_x.setter
def _x(self, value: typing.Optional[jsii.Number]):
jsii.set(self, "x", value)
@builtins.property
@jsii.member(jsii_name="y")
def _y(self) -> typing.Optional[jsii.Number]:
return jsii.get(self, "y")
@_y.setter
def _y(self, value: typing.Optional[jsii.Number]):
jsii.set(self, "y", value)
class _ConcreteWidgetProxy(ConcreteWidget):
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
class AlarmWidget(ConcreteWidget, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.AlarmWidget"):
"""Display the metric associated with an alarm, including the alarm line."""
def __init__(self, *, alarm: "IAlarm", left_y_axis: typing.Optional["YAxisProps"]=None, height: typing.Optional[jsii.Number]=None, region: typing.Optional[str]=None, title: typing.Optional[str]=None, width: typing.Optional[jsii.Number]=None) -> None:
"""
:param alarm: The alarm to show.
:param left_y_axis: Left Y axis.
:param height: Height of the widget. Default: Depends on the type of widget
:param region: The region the metrics of this graph should be taken from. Default: Current region
:param title: Title for the graph.
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
"""
props = AlarmWidgetProps(alarm=alarm, left_y_axis=left_y_axis, height=height, region=region, title=title, width=width)
jsii.create(AlarmWidget, self, [props])
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
class GraphWidget(ConcreteWidget, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.GraphWidget"):
"""A dashboard widget that displays metrics."""
def __init__(self, *, left: typing.Optional[typing.List["IMetric"]]=None, left_annotations: typing.Optional[typing.List["HorizontalAnnotation"]]=None, left_y_axis: typing.Optional["YAxisProps"]=None, right: typing.Optional[typing.List["IMetric"]]=None, right_annotations: typing.Optional[typing.List["HorizontalAnnotation"]]=None, right_y_axis: typing.Optional["YAxisProps"]=None, stacked: typing.Optional[bool]=None, height: typing.Optional[jsii.Number]=None, region: typing.Optional[str]=None, title: typing.Optional[str]=None, width: typing.Optional[jsii.Number]=None) -> None:
"""
:param left: Metrics to display on left Y axis.
:param left_annotations: Annotations for the left Y axis.
:param left_y_axis: Left Y axis.
:param right: Metrics to display on right Y axis.
:param right_annotations: Annotations for the right Y axis.
:param right_y_axis: Right Y axis.
:param stacked: Whether the graph should be shown as stacked lines.
:param height: Height of the widget. Default: Depends on the type of widget
:param region: The region the metrics of this graph should be taken from. Default: Current region
:param title: Title for the graph.
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
"""
props = GraphWidgetProps(left=left, left_annotations=left_annotations, left_y_axis=left_y_axis, right=right, right_annotations=right_annotations, right_y_axis=right_y_axis, stacked=stacked, height=height, region=region, title=title, width=width)
jsii.create(GraphWidget, self, [props])
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
@jsii.implements(IMetric)
class MathExpression(metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.MathExpression"):
"""A math expression built with metric(s) emitted by a service.
The math expression is a combination of an expression (x+y) and metrics to apply expression on.
It also contains metadata which is used only in graphs, such as color and label.
It makes sense to embed this in here, so that compound constructs can attach
that metadata to metrics they expose.
This class does not represent a resource, so hence is not a construct. Instead,
MathExpression is an abstraction that makes it easy to specify metrics for use in both
alarms and graphs.
"""
def __init__(self, *, expression: str, using_metrics: typing.Mapping[str,"IMetric"], color: typing.Optional[str]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None) -> None:
"""
:param expression: The expression defining the metric.
:param using_metrics: The metrics used in the expression, in a map. The key is the identifier that represents the given metric in the expression, and the value is the actual Metric object.
:param color: Color for this metric when added to a Graph in a Dashboard. Default: - Automatic color
:param label: Label for this metric when added to a Graph in a Dashboard. Default: - Expression value is used as label
:param period: The period over which the expression's statistics are applied. This period overrides all periods in the metrics used in this math expression. Default: Duration.minutes(5)
"""
props = MathExpressionProps(expression=expression, using_metrics=using_metrics, color=color, label=label, period=period)
jsii.create(MathExpression, self, [props])
@jsii.member(jsii_name="createAlarm")
def create_alarm(self, scope: aws_cdk.core.Construct, id: str, *, evaluation_periods: jsii.Number, threshold: jsii.Number, actions_enabled: typing.Optional[bool]=None, alarm_description: typing.Optional[str]=None, alarm_name: typing.Optional[str]=None, comparison_operator: typing.Optional["ComparisonOperator"]=None, datapoints_to_alarm: typing.Optional[jsii.Number]=None, evaluate_low_sample_count_percentile: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, statistic: typing.Optional[str]=None, treat_missing_data: typing.Optional["TreatMissingData"]=None) -> "Alarm":
"""Make a new Alarm for this metric.
Combines both properties that may adjust the metric (aggregation) as well
as alarm properties.
:param scope: -
:param id: -
:param evaluation_periods: The number of periods over which data is compared to the specified threshold.
:param threshold: The value against which the specified statistic is compared.
:param actions_enabled: Whether the actions for this alarm are enabled. Default: true
:param alarm_description: Description for the alarm. Default: No description
:param alarm_name: Name of the alarm. Default: Automatically generated name
:param comparison_operator: Comparison to use to check if metric is breaching. Default: GreaterThanOrEqualToThreshold
:param datapoints_to_alarm: The number of datapoints that must be breaching to trigger the alarm. This is used only if you are setting an "M out of N" alarm. In that case, this value is the M. For more information, see Evaluating an Alarm in the Amazon CloudWatch User Guide. Default: ``evaluationPeriods``
:param evaluate_low_sample_count_percentile: Specifies whether to evaluate the data and potentially change the alarm state if there are too few data points to be statistically significant. Used only for alarms that are based on percentiles. Default: - Not configured.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param treat_missing_data: Sets how this alarm is to handle missing data points. Default: TreatMissingData.Missing
"""
props = CreateAlarmOptions(evaluation_periods=evaluation_periods, threshold=threshold, actions_enabled=actions_enabled, alarm_description=alarm_description, alarm_name=alarm_name, comparison_operator=comparison_operator, datapoints_to_alarm=datapoints_to_alarm, evaluate_low_sample_count_percentile=evaluate_low_sample_count_percentile, period=period, statistic=statistic, treat_missing_data=treat_missing_data)
return jsii.invoke(self, "createAlarm", [scope, id, props])
@jsii.member(jsii_name="toAlarmConfig")
def to_alarm_config(self) -> "MetricAlarmConfig":
"""Turn this metric object into an alarm configuration."""
return jsii.invoke(self, "toAlarmConfig", [])
@jsii.member(jsii_name="toGraphConfig")
def to_graph_config(self) -> "MetricGraphConfig":
"""Turn this metric object into a graph configuration."""
return jsii.invoke(self, "toGraphConfig", [])
@jsii.member(jsii_name="toMetricConfig")
def to_metric_config(self) -> "MetricConfig":
"""Inspect the details of the metric object."""
return jsii.invoke(self, "toMetricConfig", [])
@jsii.member(jsii_name="toString")
def to_string(self) -> str:
"""Returns a string representation of an object."""
return jsii.invoke(self, "toString", [])
@jsii.member(jsii_name="with")
def with_(self, *, color: typing.Optional[str]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None) -> "MathExpression":
"""Return a copy of Metric with properties changed.
All properties except namespace and metricName can be changed.
:param color: Color for this metric when added to a Graph in a Dashboard. Default: - Automatic color
:param label: Label for this metric when added to a Graph in a Dashboard. Default: - Expression value is used as label
:param period: The period over which the expression's statistics are applied. This period overrides all periods in the metrics used in this math expression. Default: Duration.minutes(5)
"""
props = MathExpressionOptions(color=color, label=label, period=period)
return jsii.invoke(self, "with", [props])
@builtins.property
@jsii.member(jsii_name="expression")
def expression(self) -> str:
"""The expression defining the metric."""
return jsii.get(self, "expression")
@builtins.property
@jsii.member(jsii_name="period")
def period(self) -> aws_cdk.core.Duration:
"""Aggregation period of this metric."""
return jsii.get(self, "period")
@builtins.property
@jsii.member(jsii_name="usingMetrics")
def using_metrics(self) -> typing.Mapping[str,"IMetric"]:
"""The metrics used in the expression as KeyValuePair <id, metric>."""
return jsii.get(self, "usingMetrics")
@builtins.property
@jsii.member(jsii_name="color")
def color(self) -> typing.Optional[str]:
"""Color for this metric when added to a Graph."""
return jsii.get(self, "color")
@builtins.property
@jsii.member(jsii_name="label")
def label(self) -> typing.Optional[str]:
"""Label for this metric when added to a Graph."""
return jsii.get(self, "label")
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MathExpressionOptions", jsii_struct_bases=[], name_mapping={'color': 'color', 'label': 'label', 'period': 'period'})
class MathExpressionOptions():
def __init__(self, *, color: typing.Optional[str]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None):
"""Configurable options for MathExpressions.
:param color: Color for this metric when added to a Graph in a Dashboard. Default: - Automatic color
:param label: Label for this metric when added to a Graph in a Dashboard. Default: - Expression value is used as label
:param period: The period over which the expression's statistics are applied. This period overrides all periods in the metrics used in this math expression. Default: Duration.minutes(5)
"""
self._values = {
}
if color is not None: self._values["color"] = color
if label is not None: self._values["label"] = label
if period is not None: self._values["period"] = period
@builtins.property
def color(self) -> typing.Optional[str]:
"""Color for this metric when added to a Graph in a Dashboard.
default
:default: - Automatic color
"""
return self._values.get('color')
@builtins.property
def label(self) -> typing.Optional[str]:
"""Label for this metric when added to a Graph in a Dashboard.
default
:default: - Expression value is used as label
"""
return self._values.get('label')
@builtins.property
def period(self) -> typing.Optional[aws_cdk.core.Duration]:
"""The period over which the expression's statistics are applied.
This period overrides all periods in the metrics used in this
math expression.
default
:default: Duration.minutes(5)
"""
return self._values.get('period')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MathExpressionOptions(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MathExpressionProps", jsii_struct_bases=[MathExpressionOptions], name_mapping={'color': 'color', 'label': 'label', 'period': 'period', 'expression': 'expression', 'using_metrics': 'usingMetrics'})
class MathExpressionProps(MathExpressionOptions):
def __init__(self, *, color: typing.Optional[str]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, expression: str, using_metrics: typing.Mapping[str,"IMetric"]):
"""Properties for a MathExpression.
:param color: Color for this metric when added to a Graph in a Dashboard. Default: - Automatic color
:param label: Label for this metric when added to a Graph in a Dashboard. Default: - Expression value is used as label
:param period: The period over which the expression's statistics are applied. This period overrides all periods in the metrics used in this math expression. Default: Duration.minutes(5)
:param expression: The expression defining the metric.
:param using_metrics: The metrics used in the expression, in a map. The key is the identifier that represents the given metric in the expression, and the value is the actual Metric object.
"""
self._values = {
'expression': expression,
'using_metrics': using_metrics,
}
if color is not None: self._values["color"] = color
if label is not None: self._values["label"] = label
if period is not None: self._values["period"] = period
@builtins.property
def color(self) -> typing.Optional[str]:
"""Color for this metric when added to a Graph in a Dashboard.
default
:default: - Automatic color
"""
return self._values.get('color')
@builtins.property
def label(self) -> typing.Optional[str]:
"""Label for this metric when added to a Graph in a Dashboard.
default
:default: - Expression value is used as label
"""
return self._values.get('label')
@builtins.property
def period(self) -> typing.Optional[aws_cdk.core.Duration]:
"""The period over which the expression's statistics are applied.
This period overrides all periods in the metrics used in this
math expression.
default
:default: Duration.minutes(5)
"""
return self._values.get('period')
@builtins.property
def expression(self) -> str:
"""The expression defining the metric."""
return self._values.get('expression')
@builtins.property
def using_metrics(self) -> typing.Mapping[str,"IMetric"]:
"""The metrics used in the expression, in a map.
The key is the identifier that represents the given metric in the
expression, and the value is the actual Metric object.
"""
return self._values.get('using_metrics')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MathExpressionProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.implements(IMetric)
class Metric(metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.Metric"):
"""A metric emitted by a service.
The metric is a combination of a metric identifier (namespace, name and dimensions)
and an aggregation function (statistic, period and unit).
It also contains metadata which is used only in graphs, such as color and label.
It makes sense to embed this in here, so that compound constructs can attach
that metadata to metrics they expose.
This class does not represent a resource, so hence is not a construct. Instead,
Metric is an abstraction that makes it easy to specify metrics for use in both
alarms and graphs.
"""
def __init__(self, *, metric_name: str, namespace: str, account: typing.Optional[str]=None, color: typing.Optional[str]=None, dimensions: typing.Optional[typing.Mapping[str,typing.Any]]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, region: typing.Optional[str]=None, statistic: typing.Optional[str]=None, unit: typing.Optional["Unit"]=None) -> None:
"""
:param metric_name: Name of the metric.
:param namespace: Namespace of the metric.
:param account: Account which this metric comes from. Default: Deployment account.
:param color: Color for this metric when added to a Graph in a Dashboard.
:param dimensions: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: All metric datums in the given metric stream
"""
props = MetricProps(metric_name=metric_name, namespace=namespace, account=account, color=color, dimensions=dimensions, label=label, period=period, region=region, statistic=statistic, unit=unit)
jsii.create(Metric, self, [props])
@jsii.member(jsii_name="grantPutMetricData")
@builtins.classmethod
def grant_put_metric_data(cls, grantee: aws_cdk.aws_iam.IGrantable) -> aws_cdk.aws_iam.Grant:
"""Grant permissions to the given identity to write metrics.
:param grantee: The IAM identity to give permissions to.
"""
return jsii.sinvoke(cls, "grantPutMetricData", [grantee])
@jsii.member(jsii_name="attachTo")
def attach_to(self, scope: aws_cdk.core.Construct) -> "Metric":
"""Attach the metric object to the given construct scope.
Returns a Metric object that uses the account and region from the Stack
the given construct is defined in. If the metric is subsequently used
in a Dashboard or Alarm in a different Stack defined in a different
account or region, the appropriate 'region' and 'account' fields
will be added to it.
If the scope we attach to is in an environment-agnostic stack,
nothing is done and the same Metric object is returned.
:param scope: -
"""
return jsii.invoke(self, "attachTo", [scope])
@jsii.member(jsii_name="createAlarm")
def create_alarm(self, scope: aws_cdk.core.Construct, id: str, *, evaluation_periods: jsii.Number, threshold: jsii.Number, actions_enabled: typing.Optional[bool]=None, alarm_description: typing.Optional[str]=None, alarm_name: typing.Optional[str]=None, comparison_operator: typing.Optional["ComparisonOperator"]=None, datapoints_to_alarm: typing.Optional[jsii.Number]=None, evaluate_low_sample_count_percentile: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, statistic: typing.Optional[str]=None, treat_missing_data: typing.Optional["TreatMissingData"]=None) -> "Alarm":
"""Make a new Alarm for this metric.
Combines both properties that may adjust the metric (aggregation) as well
as alarm properties.
:param scope: -
:param id: -
:param evaluation_periods: The number of periods over which data is compared to the specified threshold.
:param threshold: The value against which the specified statistic is compared.
:param actions_enabled: Whether the actions for this alarm are enabled. Default: true
:param alarm_description: Description for the alarm. Default: No description
:param alarm_name: Name of the alarm. Default: Automatically generated name
:param comparison_operator: Comparison to use to check if metric is breaching. Default: GreaterThanOrEqualToThreshold
:param datapoints_to_alarm: The number of datapoints that must be breaching to trigger the alarm. This is used only if you are setting an "M out of N" alarm. In that case, this value is the M. For more information, see Evaluating an Alarm in the Amazon CloudWatch User Guide. Default: ``evaluationPeriods``
:param evaluate_low_sample_count_percentile: Specifies whether to evaluate the data and potentially change the alarm state if there are too few data points to be statistically significant. Used only for alarms that are based on percentiles. Default: - Not configured.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param treat_missing_data: Sets how this alarm is to handle missing data points. Default: TreatMissingData.Missing
"""
props = CreateAlarmOptions(evaluation_periods=evaluation_periods, threshold=threshold, actions_enabled=actions_enabled, alarm_description=alarm_description, alarm_name=alarm_name, comparison_operator=comparison_operator, datapoints_to_alarm=datapoints_to_alarm, evaluate_low_sample_count_percentile=evaluate_low_sample_count_percentile, period=period, statistic=statistic, treat_missing_data=treat_missing_data)
return jsii.invoke(self, "createAlarm", [scope, id, props])
@jsii.member(jsii_name="toAlarmConfig")
def to_alarm_config(self) -> "MetricAlarmConfig":
"""Turn this metric object into an alarm configuration."""
return jsii.invoke(self, "toAlarmConfig", [])
@jsii.member(jsii_name="toGraphConfig")
def to_graph_config(self) -> "MetricGraphConfig":
"""Turn this metric object into a graph configuration."""
return jsii.invoke(self, "toGraphConfig", [])
@jsii.member(jsii_name="toMetricConfig")
def to_metric_config(self) -> "MetricConfig":
"""Inspect the details of the metric object."""
return jsii.invoke(self, "toMetricConfig", [])
@jsii.member(jsii_name="toString")
def to_string(self) -> str:
"""Returns a string representation of an object."""
return jsii.invoke(self, "toString", [])
@jsii.member(jsii_name="with")
def with_(self, *, account: typing.Optional[str]=None, color: typing.Optional[str]=None, dimensions: typing.Optional[typing.Mapping[str,typing.Any]]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, region: typing.Optional[str]=None, statistic: typing.Optional[str]=None, unit: typing.Optional["Unit"]=None) -> "Metric":
"""Return a copy of Metric with properties changed.
All properties except namespace and metricName can be changed.
:param account: Account which this metric comes from. Default: Deployment account.
:param color: Color for this metric when added to a Graph in a Dashboard.
:param dimensions: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: All metric datums in the given metric stream
"""
props = MetricOptions(account=account, color=color, dimensions=dimensions, label=label, period=period, region=region, statistic=statistic, unit=unit)
return jsii.invoke(self, "with", [props])
@builtins.property
@jsii.member(jsii_name="metricName")
def metric_name(self) -> str:
return jsii.get(self, "metricName")
@builtins.property
@jsii.member(jsii_name="namespace")
def namespace(self) -> str:
return jsii.get(self, "namespace")
@builtins.property
@jsii.member(jsii_name="period")
def period(self) -> aws_cdk.core.Duration:
return jsii.get(self, "period")
@builtins.property
@jsii.member(jsii_name="statistic")
def statistic(self) -> str:
return jsii.get(self, "statistic")
@builtins.property
@jsii.member(jsii_name="account")
def account(self) -> typing.Optional[str]:
"""Account which this metric comes from.
default
:default: Deployment account.
"""
return jsii.get(self, "account")
@builtins.property
@jsii.member(jsii_name="color")
def color(self) -> typing.Optional[str]:
return jsii.get(self, "color")
@builtins.property
@jsii.member(jsii_name="dimensions")
def dimensions(self) -> typing.Optional[typing.Mapping[str,typing.Any]]:
return jsii.get(self, "dimensions")
@builtins.property
@jsii.member(jsii_name="label")
def label(self) -> typing.Optional[str]:
return jsii.get(self, "label")
@builtins.property
@jsii.member(jsii_name="region")
def region(self) -> typing.Optional[str]:
"""Region which this metric comes from.
default
:default: Deployment region.
"""
return jsii.get(self, "region")
@builtins.property
@jsii.member(jsii_name="unit")
def unit(self) -> typing.Optional["Unit"]:
"""Unit of the metric.
default
:default: None
"""
return jsii.get(self, "unit")
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricAlarmConfig", jsii_struct_bases=[], name_mapping={'metric_name': 'metricName', 'namespace': 'namespace', 'period': 'period', 'dimensions': 'dimensions', 'extended_statistic': 'extendedStatistic', 'statistic': 'statistic', 'unit': 'unit'})
class MetricAlarmConfig():
def __init__(self, *, metric_name: str, namespace: str, period: jsii.Number, dimensions: typing.Optional[typing.List["Dimension"]]=None, extended_statistic: typing.Optional[str]=None, statistic: typing.Optional["Statistic"]=None, unit: typing.Optional["Unit"]=None):
"""Properties used to construct the Metric identifying part of an Alarm.
:param metric_name: Name of the metric.
:param namespace: Namespace of the metric.
:param period: How many seconds to aggregate over.
:param dimensions: The dimensions to apply to the alarm.
:param extended_statistic: Percentile aggregation function to use.
:param statistic: Simple aggregation function to use.
:param unit: The unit of the alarm.
deprecated
:deprecated: Replaced by MetricConfig
stability
:stability: deprecated
"""
self._values = {
'metric_name': metric_name,
'namespace': namespace,
'period': period,
}
if dimensions is not None: self._values["dimensions"] = dimensions
if extended_statistic is not None: self._values["extended_statistic"] = extended_statistic
if statistic is not None: self._values["statistic"] = statistic
if unit is not None: self._values["unit"] = unit
@builtins.property
def metric_name(self) -> str:
"""Name of the metric.
stability
:stability: deprecated
"""
return self._values.get('metric_name')
@builtins.property
def namespace(self) -> str:
"""Namespace of the metric.
stability
:stability: deprecated
"""
return self._values.get('namespace')
@builtins.property
def period(self) -> jsii.Number:
"""How many seconds to aggregate over.
stability
:stability: deprecated
"""
return self._values.get('period')
@builtins.property
def dimensions(self) -> typing.Optional[typing.List["Dimension"]]:
"""The dimensions to apply to the alarm.
stability
:stability: deprecated
"""
return self._values.get('dimensions')
@builtins.property
def extended_statistic(self) -> typing.Optional[str]:
"""Percentile aggregation function to use.
stability
:stability: deprecated
"""
return self._values.get('extended_statistic')
@builtins.property
def statistic(self) -> typing.Optional["Statistic"]:
"""Simple aggregation function to use.
stability
:stability: deprecated
"""
return self._values.get('statistic')
@builtins.property
def unit(self) -> typing.Optional["Unit"]:
"""The unit of the alarm.
stability
:stability: deprecated
"""
return self._values.get('unit')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricAlarmConfig(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricConfig", jsii_struct_bases=[], name_mapping={'math_expression': 'mathExpression', 'metric_stat': 'metricStat', 'rendering_properties': 'renderingProperties'})
class MetricConfig():
def __init__(self, *, math_expression: typing.Optional["MetricExpressionConfig"]=None, metric_stat: typing.Optional["MetricStatConfig"]=None, rendering_properties: typing.Optional[typing.Mapping[str,typing.Any]]=None):
"""Properties of a rendered metric.
:param math_expression: In case the metric is a math expression, the details of the math expression. Default: - unset
:param metric_stat: In case the metric represents a query, the details of the query. Default: - unset
:param rendering_properties: Additional properties which will be rendered if the metric is used in a dashboard. Examples are 'label' and 'color', but any key in here will be added to dashboard graphs. Default: {}
"""
if isinstance(math_expression, dict): math_expression = MetricExpressionConfig(**math_expression)
if isinstance(metric_stat, dict): metric_stat = MetricStatConfig(**metric_stat)
self._values = {
}
if math_expression is not None: self._values["math_expression"] = math_expression
if metric_stat is not None: self._values["metric_stat"] = metric_stat
if rendering_properties is not None: self._values["rendering_properties"] = rendering_properties
@builtins.property
def math_expression(self) -> typing.Optional["MetricExpressionConfig"]:
"""In case the metric is a math expression, the details of the math expression.
default
:default: - unset
"""
return self._values.get('math_expression')
@builtins.property
def metric_stat(self) -> typing.Optional["MetricStatConfig"]:
"""In case the metric represents a query, the details of the query.
default
:default: - unset
"""
return self._values.get('metric_stat')
@builtins.property
def rendering_properties(self) -> typing.Optional[typing.Mapping[str,typing.Any]]:
"""Additional properties which will be rendered if the metric is used in a dashboard.
Examples are 'label' and 'color', but any key in here will be
added to dashboard graphs.
default
:default: {}
"""
return self._values.get('rendering_properties')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricConfig(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricExpressionConfig", jsii_struct_bases=[], name_mapping={'expression': 'expression', 'using_metrics': 'usingMetrics'})
class MetricExpressionConfig():
def __init__(self, *, expression: str, using_metrics: typing.Mapping[str,"IMetric"]):
"""Properties for a concrete metric.
:param expression: Math expression for the metric.
:param using_metrics: Metrics used in the math expression.
"""
self._values = {
'expression': expression,
'using_metrics': using_metrics,
}
@builtins.property
def expression(self) -> str:
"""Math expression for the metric."""
return self._values.get('expression')
@builtins.property
def using_metrics(self) -> typing.Mapping[str,"IMetric"]:
"""Metrics used in the math expression."""
return self._values.get('using_metrics')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricExpressionConfig(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricGraphConfig", jsii_struct_bases=[], name_mapping={'metric_name': 'metricName', 'namespace': 'namespace', 'period': 'period', 'rendering_properties': 'renderingProperties', 'color': 'color', 'dimensions': 'dimensions', 'label': 'label', 'statistic': 'statistic', 'unit': 'unit'})
class MetricGraphConfig():
def __init__(self, *, metric_name: str, namespace: str, period: jsii.Number, rendering_properties: "MetricRenderingProperties", color: typing.Optional[str]=None, dimensions: typing.Optional[typing.List["Dimension"]]=None, label: typing.Optional[str]=None, statistic: typing.Optional[str]=None, unit: typing.Optional["Unit"]=None):
"""Properties used to construct the Metric identifying part of a Graph.
:param metric_name: Name of the metric.
:param namespace: Namespace of the metric.
:param period: How many seconds to aggregate over.
:param rendering_properties: Rendering properties override yAxis parameter of the widget object.
:param color: Color for the graph line.
:param dimensions: The dimensions to apply to the alarm.
:param label: Label for the metric.
:param statistic: Aggregation function to use (can be either simple or a percentile).
:param unit: The unit of the alarm.
deprecated
:deprecated: Replaced by MetricConfig
stability
:stability: deprecated
"""
if isinstance(rendering_properties, dict): rendering_properties = MetricRenderingProperties(**rendering_properties)
self._values = {
'metric_name': metric_name,
'namespace': namespace,
'period': period,
'rendering_properties': rendering_properties,
}
if color is not None: self._values["color"] = color
if dimensions is not None: self._values["dimensions"] = dimensions
if label is not None: self._values["label"] = label
if statistic is not None: self._values["statistic"] = statistic
if unit is not None: self._values["unit"] = unit
@builtins.property
def metric_name(self) -> str:
"""Name of the metric.
stability
:stability: deprecated
"""
return self._values.get('metric_name')
@builtins.property
def namespace(self) -> str:
"""Namespace of the metric.
stability
:stability: deprecated
"""
return self._values.get('namespace')
@builtins.property
def period(self) -> jsii.Number:
"""How many seconds to aggregate over.
deprecated
:deprecated: Use ``period`` in ``renderingProperties``
stability
:stability: deprecated
"""
return self._values.get('period')
@builtins.property
def rendering_properties(self) -> "MetricRenderingProperties":
"""Rendering properties override yAxis parameter of the widget object.
stability
:stability: deprecated
"""
return self._values.get('rendering_properties')
@builtins.property
def color(self) -> typing.Optional[str]:
"""Color for the graph line.
deprecated
:deprecated: Use ``color`` in ``renderingProperties``
stability
:stability: deprecated
"""
return self._values.get('color')
@builtins.property
def dimensions(self) -> typing.Optional[typing.List["Dimension"]]:
"""The dimensions to apply to the alarm.
stability
:stability: deprecated
"""
return self._values.get('dimensions')
@builtins.property
def label(self) -> typing.Optional[str]:
"""Label for the metric.
deprecated
:deprecated: Use ``label`` in ``renderingProperties``
stability
:stability: deprecated
"""
return self._values.get('label')
@builtins.property
def statistic(self) -> typing.Optional[str]:
"""Aggregation function to use (can be either simple or a percentile).
deprecated
:deprecated: Use ``stat`` in ``renderingProperties``
stability
:stability: deprecated
"""
return self._values.get('statistic')
@builtins.property
def unit(self) -> typing.Optional["Unit"]:
"""The unit of the alarm.
deprecated
:deprecated: not used in dashboard widgets
stability
:stability: deprecated
"""
return self._values.get('unit')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricGraphConfig(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricOptions", jsii_struct_bases=[CommonMetricOptions], name_mapping={'account': 'account', 'color': 'color', 'dimensions': 'dimensions', 'label': 'label', 'period': 'period', 'region': 'region', 'statistic': 'statistic', 'unit': 'unit'})
class MetricOptions(CommonMetricOptions):
def __init__(self, *, account: typing.Optional[str]=None, color: typing.Optional[str]=None, dimensions: typing.Optional[typing.Mapping[str,typing.Any]]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, region: typing.Optional[str]=None, statistic: typing.Optional[str]=None, unit: typing.Optional["Unit"]=None):
"""Properties of a metric that can be changed.
:param account: Account which this metric comes from. Default: Deployment account.
:param color: Color for this metric when added to a Graph in a Dashboard.
:param dimensions: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: All metric datums in the given metric stream
"""
self._values = {
}
if account is not None: self._values["account"] = account
if color is not None: self._values["color"] = color
if dimensions is not None: self._values["dimensions"] = dimensions
if label is not None: self._values["label"] = label
if period is not None: self._values["period"] = period
if region is not None: self._values["region"] = region
if statistic is not None: self._values["statistic"] = statistic
if unit is not None: self._values["unit"] = unit
@builtins.property
def account(self) -> typing.Optional[str]:
"""Account which this metric comes from.
default
:default: Deployment account.
"""
return self._values.get('account')
@builtins.property
def color(self) -> typing.Optional[str]:
"""Color for this metric when added to a Graph in a Dashboard."""
return self._values.get('color')
@builtins.property
def dimensions(self) -> typing.Optional[typing.Mapping[str,typing.Any]]:
"""Dimensions of the metric.
default
:default: - No dimensions.
"""
return self._values.get('dimensions')
@builtins.property
def label(self) -> typing.Optional[str]:
"""Label for this metric when added to a Graph in a Dashboard."""
return self._values.get('label')
@builtins.property
def period(self) -> typing.Optional[aws_cdk.core.Duration]:
"""The period over which the specified statistic is applied.
default
:default: Duration.minutes(5)
"""
return self._values.get('period')
@builtins.property
def region(self) -> typing.Optional[str]:
"""Region which this metric comes from.
default
:default: Deployment region.
"""
return self._values.get('region')
@builtins.property
def statistic(self) -> typing.Optional[str]:
"""What function to use for aggregating.
Can be one of the following:
- "Minimum" | "min"
- "Maximum" | "max"
- "Average" | "avg"
- "Sum" | "sum"
- "SampleCount | "n"
- "pNN.NN"
default
:default: Average
"""
return self._values.get('statistic')
@builtins.property
def unit(self) -> typing.Optional["Unit"]:
"""Unit used to filter the metric stream.
Only refer to datums emitted to the metric stream with the given unit and
ignore all others. Only useful when datums are being emitted to the same
metric stream under different units.
The default is to use all matric datums in the stream, regardless of unit,
which is recommended in nearly all cases.
CloudWatch does not honor this property for graphs.
default
:default: All metric datums in the given metric stream
"""
return self._values.get('unit')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricOptions(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricProps", jsii_struct_bases=[CommonMetricOptions], name_mapping={'account': 'account', 'color': 'color', 'dimensions': 'dimensions', 'label': 'label', 'period': 'period', 'region': 'region', 'statistic': 'statistic', 'unit': 'unit', 'metric_name': 'metricName', 'namespace': 'namespace'})
class MetricProps(CommonMetricOptions):
def __init__(self, *, account: typing.Optional[str]=None, color: typing.Optional[str]=None, dimensions: typing.Optional[typing.Mapping[str,typing.Any]]=None, label: typing.Optional[str]=None, period: typing.Optional[aws_cdk.core.Duration]=None, region: typing.Optional[str]=None, statistic: typing.Optional[str]=None, unit: typing.Optional["Unit"]=None, metric_name: str, namespace: str):
"""Properties for a metric.
:param account: Account which this metric comes from. Default: Deployment account.
:param color: Color for this metric when added to a Graph in a Dashboard.
:param dimensions: Dimensions of the metric. Default: - No dimensions.
:param label: Label for this metric when added to a Graph in a Dashboard.
:param period: The period over which the specified statistic is applied. Default: Duration.minutes(5)
:param region: Region which this metric comes from. Default: Deployment region.
:param statistic: What function to use for aggregating. Can be one of the following: - "Minimum" | "min" - "Maximum" | "max" - "Average" | "avg" - "Sum" | "sum" - "SampleCount | "n" - "pNN.NN" Default: Average
:param unit: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. The default is to use all matric datums in the stream, regardless of unit, which is recommended in nearly all cases. CloudWatch does not honor this property for graphs. Default: All metric datums in the given metric stream
:param metric_name: Name of the metric.
:param namespace: Namespace of the metric.
"""
self._values = {
'metric_name': metric_name,
'namespace': namespace,
}
if account is not None: self._values["account"] = account
if color is not None: self._values["color"] = color
if dimensions is not None: self._values["dimensions"] = dimensions
if label is not None: self._values["label"] = label
if period is not None: self._values["period"] = period
if region is not None: self._values["region"] = region
if statistic is not None: self._values["statistic"] = statistic
if unit is not None: self._values["unit"] = unit
@builtins.property
def account(self) -> typing.Optional[str]:
"""Account which this metric comes from.
default
:default: Deployment account.
"""
return self._values.get('account')
@builtins.property
def color(self) -> typing.Optional[str]:
"""Color for this metric when added to a Graph in a Dashboard."""
return self._values.get('color')
@builtins.property
def dimensions(self) -> typing.Optional[typing.Mapping[str,typing.Any]]:
"""Dimensions of the metric.
default
:default: - No dimensions.
"""
return self._values.get('dimensions')
@builtins.property
def label(self) -> typing.Optional[str]:
"""Label for this metric when added to a Graph in a Dashboard."""
return self._values.get('label')
@builtins.property
def period(self) -> typing.Optional[aws_cdk.core.Duration]:
"""The period over which the specified statistic is applied.
default
:default: Duration.minutes(5)
"""
return self._values.get('period')
@builtins.property
def region(self) -> typing.Optional[str]:
"""Region which this metric comes from.
default
:default: Deployment region.
"""
return self._values.get('region')
@builtins.property
def statistic(self) -> typing.Optional[str]:
"""What function to use for aggregating.
Can be one of the following:
- "Minimum" | "min"
- "Maximum" | "max"
- "Average" | "avg"
- "Sum" | "sum"
- "SampleCount | "n"
- "pNN.NN"
default
:default: Average
"""
return self._values.get('statistic')
@builtins.property
def unit(self) -> typing.Optional["Unit"]:
"""Unit used to filter the metric stream.
Only refer to datums emitted to the metric stream with the given unit and
ignore all others. Only useful when datums are being emitted to the same
metric stream under different units.
The default is to use all matric datums in the stream, regardless of unit,
which is recommended in nearly all cases.
CloudWatch does not honor this property for graphs.
default
:default: All metric datums in the given metric stream
"""
return self._values.get('unit')
@builtins.property
def metric_name(self) -> str:
"""Name of the metric."""
return self._values.get('metric_name')
@builtins.property
def namespace(self) -> str:
"""Namespace of the metric."""
return self._values.get('namespace')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricRenderingProperties", jsii_struct_bases=[], name_mapping={'period': 'period', 'color': 'color', 'label': 'label', 'stat': 'stat'})
class MetricRenderingProperties():
def __init__(self, *, period: jsii.Number, color: typing.Optional[str]=None, label: typing.Optional[str]=None, stat: typing.Optional[str]=None):
"""Custom rendering properties that override the default rendering properties specified in the yAxis parameter of the widget object.
:param period: How many seconds to aggregate over.
:param color: Color for the graph line.
:param label: Label for the metric.
:param stat: Aggregation function to use (can be either simple or a percentile).
deprecated
:deprecated: Replaced by MetricConfig.
stability
:stability: deprecated
"""
self._values = {
'period': period,
}
if color is not None: self._values["color"] = color
if label is not None: self._values["label"] = label
if stat is not None: self._values["stat"] = stat
@builtins.property
def period(self) -> jsii.Number:
"""How many seconds to aggregate over.
stability
:stability: deprecated
"""
return self._values.get('period')
@builtins.property
def color(self) -> typing.Optional[str]:
"""Color for the graph line.
stability
:stability: deprecated
"""
return self._values.get('color')
@builtins.property
def label(self) -> typing.Optional[str]:
"""Label for the metric.
stability
:stability: deprecated
"""
return self._values.get('label')
@builtins.property
def stat(self) -> typing.Optional[str]:
"""Aggregation function to use (can be either simple or a percentile).
stability
:stability: deprecated
"""
return self._values.get('stat')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricRenderingProperties(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricStatConfig", jsii_struct_bases=[], name_mapping={'metric_name': 'metricName', 'namespace': 'namespace', 'period': 'period', 'statistic': 'statistic', 'account': 'account', 'dimensions': 'dimensions', 'region': 'region', 'unit_filter': 'unitFilter'})
class MetricStatConfig():
def __init__(self, *, metric_name: str, namespace: str, period: aws_cdk.core.Duration, statistic: str, account: typing.Optional[str]=None, dimensions: typing.Optional[typing.List["Dimension"]]=None, region: typing.Optional[str]=None, unit_filter: typing.Optional["Unit"]=None):
"""Properties for a concrete metric.
NOTE: ``unit`` is no longer on this object since it is only used for ``Alarms``, and doesn't mean what one
would expect it to mean there anyway. It is most likely to be misused.
:param metric_name: Name of the metric.
:param namespace: Namespace of the metric.
:param period: How many seconds to aggregate over.
:param statistic: Aggregation function to use (can be either simple or a percentile).
:param account: Account which this metric comes from. Default: Deployment account.
:param dimensions: The dimensions to apply to the alarm. Default: []
:param region: Region which this metric comes from. Default: Deployment region.
:param unit_filter: Unit used to filter the metric stream. Only refer to datums emitted to the metric stream with the given unit and ignore all others. Only useful when datums are being emitted to the same metric stream under different units. This field has been renamed from plain ``unit`` to clearly communicate its purpose. Default: - Refer to all metric datums
"""
self._values = {
'metric_name': metric_name,
'namespace': namespace,
'period': period,
'statistic': statistic,
}
if account is not None: self._values["account"] = account
if dimensions is not None: self._values["dimensions"] = dimensions
if region is not None: self._values["region"] = region
if unit_filter is not None: self._values["unit_filter"] = unit_filter
@builtins.property
def metric_name(self) -> str:
"""Name of the metric."""
return self._values.get('metric_name')
@builtins.property
def namespace(self) -> str:
"""Namespace of the metric."""
return self._values.get('namespace')
@builtins.property
def period(self) -> aws_cdk.core.Duration:
"""How many seconds to aggregate over."""
return self._values.get('period')
@builtins.property
def statistic(self) -> str:
"""Aggregation function to use (can be either simple or a percentile)."""
return self._values.get('statistic')
@builtins.property
def account(self) -> typing.Optional[str]:
"""Account which this metric comes from.
default
:default: Deployment account.
"""
return self._values.get('account')
@builtins.property
def dimensions(self) -> typing.Optional[typing.List["Dimension"]]:
"""The dimensions to apply to the alarm.
default
:default: []
"""
return self._values.get('dimensions')
@builtins.property
def region(self) -> typing.Optional[str]:
"""Region which this metric comes from.
default
:default: Deployment region.
"""
return self._values.get('region')
@builtins.property
def unit_filter(self) -> typing.Optional["Unit"]:
"""Unit used to filter the metric stream.
Only refer to datums emitted to the metric stream with the given unit and
ignore all others. Only useful when datums are being emitted to the same
metric stream under different units.
This field has been renamed from plain ``unit`` to clearly communicate
its purpose.
default
:default: - Refer to all metric datums
"""
return self._values.get('unit_filter')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricStatConfig(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.MetricWidgetProps", jsii_struct_bases=[], name_mapping={'height': 'height', 'region': 'region', 'title': 'title', 'width': 'width'})
class MetricWidgetProps():
def __init__(self, *, height: typing.Optional[jsii.Number]=None, region: typing.Optional[str]=None, title: typing.Optional[str]=None, width: typing.Optional[jsii.Number]=None):
"""Basic properties for widgets that display metrics.
:param height: Height of the widget. Default: Depends on the type of widget
:param region: The region the metrics of this graph should be taken from. Default: Current region
:param title: Title for the graph.
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
"""
self._values = {
}
if height is not None: self._values["height"] = height
if region is not None: self._values["region"] = region
if title is not None: self._values["title"] = title
if width is not None: self._values["width"] = width
@builtins.property
def height(self) -> typing.Optional[jsii.Number]:
"""Height of the widget.
default
:default: Depends on the type of widget
"""
return self._values.get('height')
@builtins.property
def region(self) -> typing.Optional[str]:
"""The region the metrics of this graph should be taken from.
default
:default: Current region
"""
return self._values.get('region')
@builtins.property
def title(self) -> typing.Optional[str]:
"""Title for the graph."""
return self._values.get('title')
@builtins.property
def width(self) -> typing.Optional[jsii.Number]:
"""Width of the widget, in a grid of 24 units wide.
default
:default: 6
"""
return self._values.get('width')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'MetricWidgetProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.AlarmWidgetProps", jsii_struct_bases=[MetricWidgetProps], name_mapping={'height': 'height', 'region': 'region', 'title': 'title', 'width': 'width', 'alarm': 'alarm', 'left_y_axis': 'leftYAxis'})
class AlarmWidgetProps(MetricWidgetProps):
def __init__(self, *, height: typing.Optional[jsii.Number]=None, region: typing.Optional[str]=None, title: typing.Optional[str]=None, width: typing.Optional[jsii.Number]=None, alarm: "IAlarm", left_y_axis: typing.Optional["YAxisProps"]=None):
"""Properties for an AlarmWidget.
:param height: Height of the widget. Default: Depends on the type of widget
:param region: The region the metrics of this graph should be taken from. Default: Current region
:param title: Title for the graph.
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
:param alarm: The alarm to show.
:param left_y_axis: Left Y axis.
"""
if isinstance(left_y_axis, dict): left_y_axis = YAxisProps(**left_y_axis)
self._values = {
'alarm': alarm,
}
if height is not None: self._values["height"] = height
if region is not None: self._values["region"] = region
if title is not None: self._values["title"] = title
if width is not None: self._values["width"] = width
if left_y_axis is not None: self._values["left_y_axis"] = left_y_axis
@builtins.property
def height(self) -> typing.Optional[jsii.Number]:
"""Height of the widget.
default
:default: Depends on the type of widget
"""
return self._values.get('height')
@builtins.property
def region(self) -> typing.Optional[str]:
"""The region the metrics of this graph should be taken from.
default
:default: Current region
"""
return self._values.get('region')
@builtins.property
def title(self) -> typing.Optional[str]:
"""Title for the graph."""
return self._values.get('title')
@builtins.property
def width(self) -> typing.Optional[jsii.Number]:
"""Width of the widget, in a grid of 24 units wide.
default
:default: 6
"""
return self._values.get('width')
@builtins.property
def alarm(self) -> "IAlarm":
"""The alarm to show."""
return self._values.get('alarm')
@builtins.property
def left_y_axis(self) -> typing.Optional["YAxisProps"]:
"""Left Y axis."""
return self._values.get('left_y_axis')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'AlarmWidgetProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.GraphWidgetProps", jsii_struct_bases=[MetricWidgetProps], name_mapping={'height': 'height', 'region': 'region', 'title': 'title', 'width': 'width', 'left': 'left', 'left_annotations': 'leftAnnotations', 'left_y_axis': 'leftYAxis', 'right': 'right', 'right_annotations': 'rightAnnotations', 'right_y_axis': 'rightYAxis', 'stacked': 'stacked'})
class GraphWidgetProps(MetricWidgetProps):
def __init__(self, *, height: typing.Optional[jsii.Number]=None, region: typing.Optional[str]=None, title: typing.Optional[str]=None, width: typing.Optional[jsii.Number]=None, left: typing.Optional[typing.List["IMetric"]]=None, left_annotations: typing.Optional[typing.List["HorizontalAnnotation"]]=None, left_y_axis: typing.Optional["YAxisProps"]=None, right: typing.Optional[typing.List["IMetric"]]=None, right_annotations: typing.Optional[typing.List["HorizontalAnnotation"]]=None, right_y_axis: typing.Optional["YAxisProps"]=None, stacked: typing.Optional[bool]=None):
"""Properties for a GraphWidget.
:param height: Height of the widget. Default: Depends on the type of widget
:param region: The region the metrics of this graph should be taken from. Default: Current region
:param title: Title for the graph.
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
:param left: Metrics to display on left Y axis.
:param left_annotations: Annotations for the left Y axis.
:param left_y_axis: Left Y axis.
:param right: Metrics to display on right Y axis.
:param right_annotations: Annotations for the right Y axis.
:param right_y_axis: Right Y axis.
:param stacked: Whether the graph should be shown as stacked lines.
"""
if isinstance(left_y_axis, dict): left_y_axis = YAxisProps(**left_y_axis)
if isinstance(right_y_axis, dict): right_y_axis = YAxisProps(**right_y_axis)
self._values = {
}
if height is not None: self._values["height"] = height
if region is not None: self._values["region"] = region
if title is not None: self._values["title"] = title
if width is not None: self._values["width"] = width
if left is not None: self._values["left"] = left
if left_annotations is not None: self._values["left_annotations"] = left_annotations
if left_y_axis is not None: self._values["left_y_axis"] = left_y_axis
if right is not None: self._values["right"] = right
if right_annotations is not None: self._values["right_annotations"] = right_annotations
if right_y_axis is not None: self._values["right_y_axis"] = right_y_axis
if stacked is not None: self._values["stacked"] = stacked
@builtins.property
def height(self) -> typing.Optional[jsii.Number]:
"""Height of the widget.
default
:default: Depends on the type of widget
"""
return self._values.get('height')
@builtins.property
def region(self) -> typing.Optional[str]:
"""The region the metrics of this graph should be taken from.
default
:default: Current region
"""
return self._values.get('region')
@builtins.property
def title(self) -> typing.Optional[str]:
"""Title for the graph."""
return self._values.get('title')
@builtins.property
def width(self) -> typing.Optional[jsii.Number]:
"""Width of the widget, in a grid of 24 units wide.
default
:default: 6
"""
return self._values.get('width')
@builtins.property
def left(self) -> typing.Optional[typing.List["IMetric"]]:
"""Metrics to display on left Y axis."""
return self._values.get('left')
@builtins.property
def left_annotations(self) -> typing.Optional[typing.List["HorizontalAnnotation"]]:
"""Annotations for the left Y axis."""
return self._values.get('left_annotations')
@builtins.property
def left_y_axis(self) -> typing.Optional["YAxisProps"]:
"""Left Y axis."""
return self._values.get('left_y_axis')
@builtins.property
def right(self) -> typing.Optional[typing.List["IMetric"]]:
"""Metrics to display on right Y axis."""
return self._values.get('right')
@builtins.property
def right_annotations(self) -> typing.Optional[typing.List["HorizontalAnnotation"]]:
"""Annotations for the right Y axis."""
return self._values.get('right_annotations')
@builtins.property
def right_y_axis(self) -> typing.Optional["YAxisProps"]:
"""Right Y axis."""
return self._values.get('right_y_axis')
@builtins.property
def stacked(self) -> typing.Optional[bool]:
"""Whether the graph should be shown as stacked lines."""
return self._values.get('stacked')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'GraphWidgetProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.enum(jsii_type="@aws-cdk/aws-cloudwatch.PeriodOverride")
class PeriodOverride(enum.Enum):
AUTO = "AUTO"
INHERIT = "INHERIT"
@jsii.implements(IWidget)
class Row(metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.Row"):
"""A widget that contains other widgets in a horizontal row.
Widgets will be laid out next to each other
"""
def __init__(self, *widgets: "IWidget") -> None:
"""
:param widgets: -
"""
jsii.create(Row, self, [*widgets])
@jsii.member(jsii_name="position")
def position(self, x: jsii.Number, y: jsii.Number) -> None:
"""Place the widget at a given position.
:param x: -
:param y: -
"""
return jsii.invoke(self, "position", [x, y])
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
@builtins.property
@jsii.member(jsii_name="height")
def height(self) -> jsii.Number:
"""The amount of vertical grid units the widget will take up."""
return jsii.get(self, "height")
@builtins.property
@jsii.member(jsii_name="width")
def width(self) -> jsii.Number:
"""The amount of horizontal grid units the widget will take up."""
return jsii.get(self, "width")
@jsii.enum(jsii_type="@aws-cdk/aws-cloudwatch.Shading")
class Shading(enum.Enum):
NONE = "NONE"
"""Don't add shading."""
ABOVE = "ABOVE"
"""Add shading above the annotation."""
BELOW = "BELOW"
"""Add shading below the annotation."""
class SingleValueWidget(ConcreteWidget, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.SingleValueWidget"):
"""A dashboard widget that displays the most recent value for every metric."""
def __init__(self, *, metrics: typing.List["IMetric"], set_period_to_time_range: typing.Optional[bool]=None, height: typing.Optional[jsii.Number]=None, region: typing.Optional[str]=None, title: typing.Optional[str]=None, width: typing.Optional[jsii.Number]=None) -> None:
"""
:param metrics: Metrics to display.
:param set_period_to_time_range: Whether to show the value from the entire time range. Default: false
:param height: Height of the widget. Default: Depends on the type of widget
:param region: The region the metrics of this graph should be taken from. Default: Current region
:param title: Title for the graph.
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
"""
props = SingleValueWidgetProps(metrics=metrics, set_period_to_time_range=set_period_to_time_range, height=height, region=region, title=title, width=width)
jsii.create(SingleValueWidget, self, [props])
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.SingleValueWidgetProps", jsii_struct_bases=[MetricWidgetProps], name_mapping={'height': 'height', 'region': 'region', 'title': 'title', 'width': 'width', 'metrics': 'metrics', 'set_period_to_time_range': 'setPeriodToTimeRange'})
class SingleValueWidgetProps(MetricWidgetProps):
def __init__(self, *, height: typing.Optional[jsii.Number]=None, region: typing.Optional[str]=None, title: typing.Optional[str]=None, width: typing.Optional[jsii.Number]=None, metrics: typing.List["IMetric"], set_period_to_time_range: typing.Optional[bool]=None):
"""Properties for a SingleValueWidget.
:param height: Height of the widget. Default: Depends on the type of widget
:param region: The region the metrics of this graph should be taken from. Default: Current region
:param title: Title for the graph.
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
:param metrics: Metrics to display.
:param set_period_to_time_range: Whether to show the value from the entire time range. Default: false
"""
self._values = {
'metrics': metrics,
}
if height is not None: self._values["height"] = height
if region is not None: self._values["region"] = region
if title is not None: self._values["title"] = title
if width is not None: self._values["width"] = width
if set_period_to_time_range is not None: self._values["set_period_to_time_range"] = set_period_to_time_range
@builtins.property
def height(self) -> typing.Optional[jsii.Number]:
"""Height of the widget.
default
:default: Depends on the type of widget
"""
return self._values.get('height')
@builtins.property
def region(self) -> typing.Optional[str]:
"""The region the metrics of this graph should be taken from.
default
:default: Current region
"""
return self._values.get('region')
@builtins.property
def title(self) -> typing.Optional[str]:
"""Title for the graph."""
return self._values.get('title')
@builtins.property
def width(self) -> typing.Optional[jsii.Number]:
"""Width of the widget, in a grid of 24 units wide.
default
:default: 6
"""
return self._values.get('width')
@builtins.property
def metrics(self) -> typing.List["IMetric"]:
"""Metrics to display."""
return self._values.get('metrics')
@builtins.property
def set_period_to_time_range(self) -> typing.Optional[bool]:
"""Whether to show the value from the entire time range.
default
:default: false
"""
return self._values.get('set_period_to_time_range')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'SingleValueWidgetProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.implements(IWidget)
class Spacer(metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.Spacer"):
"""A widget that doesn't display anything but takes up space."""
def __init__(self, *, height: typing.Optional[jsii.Number]=None, width: typing.Optional[jsii.Number]=None) -> None:
"""
:param height: Height of the spacer. Default: : 1
:param width: Width of the spacer. Default: 1
"""
props = SpacerProps(height=height, width=width)
jsii.create(Spacer, self, [props])
@jsii.member(jsii_name="position")
def position(self, _x: jsii.Number, _y: jsii.Number) -> None:
"""Place the widget at a given position.
:param _x: -
:param _y: -
"""
return jsii.invoke(self, "position", [_x, _y])
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
@builtins.property
@jsii.member(jsii_name="height")
def height(self) -> jsii.Number:
"""The amount of vertical grid units the widget will take up."""
return jsii.get(self, "height")
@builtins.property
@jsii.member(jsii_name="width")
def width(self) -> jsii.Number:
"""The amount of horizontal grid units the widget will take up."""
return jsii.get(self, "width")
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.SpacerProps", jsii_struct_bases=[], name_mapping={'height': 'height', 'width': 'width'})
class SpacerProps():
def __init__(self, *, height: typing.Optional[jsii.Number]=None, width: typing.Optional[jsii.Number]=None):
"""Props of the spacer.
:param height: Height of the spacer. Default: : 1
:param width: Width of the spacer. Default: 1
"""
self._values = {
}
if height is not None: self._values["height"] = height
if width is not None: self._values["width"] = width
@builtins.property
def height(self) -> typing.Optional[jsii.Number]:
"""Height of the spacer.
default
:default: : 1
"""
return self._values.get('height')
@builtins.property
def width(self) -> typing.Optional[jsii.Number]:
"""Width of the spacer.
default
:default: 1
"""
return self._values.get('width')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'SpacerProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.enum(jsii_type="@aws-cdk/aws-cloudwatch.Statistic")
class Statistic(enum.Enum):
"""Statistic to use over the aggregation period."""
SAMPLE_COUNT = "SAMPLE_COUNT"
AVERAGE = "AVERAGE"
SUM = "SUM"
MINIMUM = "MINIMUM"
MAXIMUM = "MAXIMUM"
class TextWidget(ConcreteWidget, metaclass=jsii.JSIIMeta, jsii_type="@aws-cdk/aws-cloudwatch.TextWidget"):
"""A dashboard widget that displays MarkDown."""
def __init__(self, *, markdown: str, height: typing.Optional[jsii.Number]=None, width: typing.Optional[jsii.Number]=None) -> None:
"""
:param markdown: The text to display, in MarkDown format.
:param height: Height of the widget. Default: 2
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
"""
props = TextWidgetProps(markdown=markdown, height=height, width=width)
jsii.create(TextWidget, self, [props])
@jsii.member(jsii_name="position")
def position(self, x: jsii.Number, y: jsii.Number) -> None:
"""Place the widget at a given position.
:param x: -
:param y: -
"""
return jsii.invoke(self, "position", [x, y])
@jsii.member(jsii_name="toJson")
def to_json(self) -> typing.List[typing.Any]:
"""Return the widget JSON for use in the dashboard."""
return jsii.invoke(self, "toJson", [])
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.TextWidgetProps", jsii_struct_bases=[], name_mapping={'markdown': 'markdown', 'height': 'height', 'width': 'width'})
class TextWidgetProps():
def __init__(self, *, markdown: str, height: typing.Optional[jsii.Number]=None, width: typing.Optional[jsii.Number]=None):
"""Properties for a Text widget.
:param markdown: The text to display, in MarkDown format.
:param height: Height of the widget. Default: 2
:param width: Width of the widget, in a grid of 24 units wide. Default: 6
"""
self._values = {
'markdown': markdown,
}
if height is not None: self._values["height"] = height
if width is not None: self._values["width"] = width
@builtins.property
def markdown(self) -> str:
"""The text to display, in MarkDown format."""
return self._values.get('markdown')
@builtins.property
def height(self) -> typing.Optional[jsii.Number]:
"""Height of the widget.
default
:default: 2
"""
return self._values.get('height')
@builtins.property
def width(self) -> typing.Optional[jsii.Number]:
"""Width of the widget, in a grid of 24 units wide.
default
:default: 6
"""
return self._values.get('width')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'TextWidgetProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
@jsii.enum(jsii_type="@aws-cdk/aws-cloudwatch.TreatMissingData")
class TreatMissingData(enum.Enum):
"""Specify how missing data points are treated during alarm evaluation."""
BREACHING = "BREACHING"
"""Missing data points are treated as breaching the threshold."""
NOT_BREACHING = "NOT_BREACHING"
"""Missing data points are treated as being within the threshold."""
IGNORE = "IGNORE"
"""The current alarm state is maintained."""
MISSING = "MISSING"
"""The alarm does not consider missing data points when evaluating whether to change state."""
@jsii.enum(jsii_type="@aws-cdk/aws-cloudwatch.Unit")
class Unit(enum.Enum):
"""Unit for metric."""
SECONDS = "SECONDS"
MICROSECONDS = "MICROSECONDS"
MILLISECONDS = "MILLISECONDS"
BYTES = "BYTES"
KILOBYTES = "KILOBYTES"
MEGABYTES = "MEGABYTES"
GIGABYTES = "GIGABYTES"
TERABYTES = "TERABYTES"
BITS = "BITS"
KILOBITS = "KILOBITS"
MEGABITS = "MEGABITS"
GIGABITS = "GIGABITS"
TERABITS = "TERABITS"
PERCENT = "PERCENT"
COUNT = "COUNT"
BYTES_PER_SECOND = "BYTES_PER_SECOND"
KILOBYTES_PER_SECOND = "KILOBYTES_PER_SECOND"
MEGABYTES_PER_SECOND = "MEGABYTES_PER_SECOND"
GIGABYTES_PER_SECOND = "GIGABYTES_PER_SECOND"
TERABYTES_PER_SECOND = "TERABYTES_PER_SECOND"
BITS_PER_SECOND = "BITS_PER_SECOND"
KILOBITS_PER_SECOND = "KILOBITS_PER_SECOND"
MEGABITS_PER_SECOND = "MEGABITS_PER_SECOND"
GIGABITS_PER_SECOND = "GIGABITS_PER_SECOND"
TERABITS_PER_SECOND = "TERABITS_PER_SECOND"
COUNT_PER_SECOND = "COUNT_PER_SECOND"
NONE = "NONE"
@jsii.data_type(jsii_type="@aws-cdk/aws-cloudwatch.YAxisProps", jsii_struct_bases=[], name_mapping={'label': 'label', 'max': 'max', 'min': 'min', 'show_units': 'showUnits'})
class YAxisProps():
def __init__(self, *, label: typing.Optional[str]=None, max: typing.Optional[jsii.Number]=None, min: typing.Optional[jsii.Number]=None, show_units: typing.Optional[bool]=None):
"""Properties for a Y-Axis.
:param label: The label. Default: No label
:param max: The max value. Default: No maximum value
:param min: The min value. Default: 0
:param show_units: Whether to show units. Default: true
"""
self._values = {
}
if label is not None: self._values["label"] = label
if max is not None: self._values["max"] = max
if min is not None: self._values["min"] = min
if show_units is not None: self._values["show_units"] = show_units
@builtins.property
def label(self) -> typing.Optional[str]:
"""The label.
default
:default: No label
"""
return self._values.get('label')
@builtins.property
def max(self) -> typing.Optional[jsii.Number]:
"""The max value.
default
:default: No maximum value
"""
return self._values.get('max')
@builtins.property
def min(self) -> typing.Optional[jsii.Number]:
"""The min value.
default
:default: 0
"""
return self._values.get('min')
@builtins.property
def show_units(self) -> typing.Optional[bool]:
"""Whether to show units.
default
:default: true
"""
return self._values.get('show_units')
def __eq__(self, rhs) -> bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs) -> bool:
return not (rhs == self)
def __repr__(self) -> str:
return 'YAxisProps(%s)' % ', '.join(k + '=' + repr(v) for k, v in self._values.items())
__all__ = ["Alarm", "AlarmActionConfig", "AlarmProps", "AlarmWidget", "AlarmWidgetProps", "CfnAlarm", "CfnAlarmProps", "CfnAnomalyDetector", "CfnAnomalyDetectorProps", "CfnDashboard", "CfnDashboardProps", "CfnInsightRule", "CfnInsightRuleProps", "Column", "CommonMetricOptions", "ComparisonOperator", "ConcreteWidget", "CreateAlarmOptions", "Dashboard", "DashboardProps", "Dimension", "GraphWidget", "GraphWidgetProps", "HorizontalAnnotation", "IAlarm", "IAlarmAction", "IMetric", "IWidget", "MathExpression", "MathExpressionOptions", "MathExpressionProps", "Metric", "MetricAlarmConfig", "MetricConfig", "MetricExpressionConfig", "MetricGraphConfig", "MetricOptions", "MetricProps", "MetricRenderingProperties", "MetricStatConfig", "MetricWidgetProps", "PeriodOverride", "Row", "Shading", "SingleValueWidget", "SingleValueWidgetProps", "Spacer", "SpacerProps", "Statistic", "TextWidget", "TextWidgetProps", "TreatMissingData", "Unit", "YAxisProps", "__jsii_assembly__"]
publication.publish()
| [
"[email protected]"
]
| |
2f970ad0404436ed1c8ea829b0d349c8cf94ea3e | 09bb17f07b127ac7d7d44969d55c92fa979111e2 | /.PyCharmCE2019.2/system/python_stubs/-1448197883/PIL/_imagingcms.py | 627fb7996c066f82fdbeb37c708d6041179e88c7 | []
| no_license | Tusharec82/ROCKY | ea19201275f853603870a68275bdf7d1e64a6084 | d9812b0fefde8c3d9ffe7947cf331af12dd25ab5 | refs/heads/master | 2020-08-09T00:34:50.707405 | 2019-10-09T18:13:26 | 2019-10-09T18:13:26 | 213,957,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,810 | py | # encoding: utf-8
# module PIL._imagingcms
# from /usr/lib/python3/dist-packages/PIL/_imagingcms.cpython-36m-x86_64-linux-gnu.so
# by generator 1.147
# no doc
# no imports
# Variables with simple values
littlecms_version = '20.90'
# functions
def buildProofTransform(*args, **kwargs): # real signature unknown
pass
def buildTransform(*args, **kwargs): # real signature unknown
pass
def createProfile(*args, **kwargs): # real signature unknown
pass
def profile_frombytes(*args, **kwargs): # real signature unknown
pass
def profile_fromstring(*args, **kwargs): # real signature unknown
pass
def profile_open(*args, **kwargs): # real signature unknown
pass
def profile_tobytes(*args, **kwargs): # real signature unknown
pass
# classes
class CmsProfile(object):
# no doc
def is_intent_supported(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
attributes = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
blue_colorant = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
blue_primary = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
chromaticity = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
chromatic_adaptation = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
clut = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
colorant_table = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
colorant_table_out = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
colorimetric_intent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
color_space = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
connection_space = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
copyright = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
creation_date = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
device_class = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
green_colorant = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
green_primary = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
header_flags = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
header_manufacturer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
header_model = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
icc_measurement_condition = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
icc_version = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
icc_viewing_condition = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
intent_supported = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
is_matrix_shaper = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
luminance = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
manufacturer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
media_black_point = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
media_white_point = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
media_white_point_temperature = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
model = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
pcs = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
perceptual_rendering_intent_gamut = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
product_copyright = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
product_desc = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
product_description = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
product_manufacturer = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
product_model = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
profile_description = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
profile_id = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
red_colorant = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
red_primary = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
rendering_intent = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
saturation_rendering_intent_gamut = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
screening_description = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
target = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
technology = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
version = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
viewing_condition = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
xcolor_space = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
# variables with complex values
__loader__ = None # (!) real value is '<_frozen_importlib_external.ExtensionFileLoader object at 0x7f610258f860>'
__spec__ = None # (!) real value is "ModuleSpec(name='PIL._imagingcms', loader=<_frozen_importlib_external.ExtensionFileLoader object at 0x7f610258f860>, origin='/usr/lib/python3/dist-packages/PIL/_imagingcms.cpython-36m-x86_64-linux-gnu.so')"
| [
"[email protected]"
]
| |
73d8ba35ec791beffc2e3f13c980754b8a857f23 | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part003405.py | 5a684c947584fd60604a2244ef648e9a15ca31ab | []
| no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,655 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher37493(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({0: 1}), [
(VariableWithCount('i2.2.1.2.2.2.1.0', 1, 1, S(1)), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher37493._instance is None:
CommutativeMatcher37493._instance = CommutativeMatcher37493()
return CommutativeMatcher37493._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 37492
if len(subjects) >= 1 and isinstance(subjects[0], Pow):
tmp1 = subjects.popleft()
subjects2 = deque(tmp1._args)
# State 37494
if len(subjects2) >= 1:
tmp3 = subjects2.popleft()
subst1 = Substitution(subst0)
try:
subst1.try_add_variable('i2.2.1.1', tmp3)
except ValueError:
pass
else:
pass
# State 37495
if len(subjects2) >= 1:
tmp5 = subjects2.popleft()
subst2 = Substitution(subst1)
try:
subst2.try_add_variable('i2.2.1.2', tmp5)
except ValueError:
pass
else:
pass
# State 37496
if len(subjects2) == 0:
pass
# State 37497
if len(subjects) == 0:
pass
# 0: x**j
yield 0, subst2
subjects2.appendleft(tmp5)
subjects2.appendleft(tmp3)
subjects.appendleft(tmp1)
return
yield
from collections import deque | [
"[email protected]"
]
| |
82c1c52804b41ca95993b01dc43ebd395f50a466 | 7ae44f6975561ff5542cd369dcd04d53093db887 | /Data Structures and Algorithms in Python/11_Linked_List_2/test.py | 23bb821782d5d54fb6567da6bc799dcb4f68d659 | []
| no_license | ashisharora24/learning_tutorials_practice | 89208a77ad162265c6573ca4559ebf6f4a6f8f18 | 57f461908d0c4d58d831ec375c428179fa69cb3f | refs/heads/master | 2020-05-21T05:32:26.397725 | 2019-07-23T10:36:06 | 2019-07-23T10:36:06 | 185,923,049 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | from time import *
t2 = perf_counter()
t1 = perf_counter()
counter = 0
for i in range(10000):
counter += 1
print(i, end=" ")
# r = req...
if counter == 61:
t2 = perf_counter()
print("\n sleeping \n",10 - t2 + t1)
sleep(10 - t2 + t1)
counter = 1
t1 = perf_counter()
| [
"[email protected]"
]
| |
40f837d965c2f999a465970c392b061e920b701f | c808be39b3c2cb89cc8214a9ce979381a8c2e9e9 | /kosh/exec_graphs/__init__.py | 8af6b4730ee8b1efa4cc5de97c239afa15c54a99 | [
"MIT"
]
| permissive | LLNL/kosh | aff088e0907873f29fd5b5e07c6b44e295a65314 | d2fda80cf8ecea9e49b44658127538ce89b20301 | refs/heads/stable | 2023-05-10T19:09:24.149298 | 2023-05-05T21:06:42 | 2023-05-05T21:06:42 | 299,369,568 | 7 | 2 | MIT | 2022-03-15T14:39:31 | 2020-09-28T16:35:09 | Python | UTF-8 | Python | false | false | 74 | py | from .core import KoshExecutionGraph, populate, find_network_ends # noqa
| [
"[email protected]"
]
| |
996eb7912cb82e037205e02ff650c6061318278d | 266947fd84eed629ed0c21f6d91134239512afd9 | /BeginnerContest_B/147.py | 1181ff0ecd9ce31a917fcf62e9df63d19fa0c9c8 | []
| no_license | SkiMsyk/AtCoder | c86adeec4fa470ec14c1be7400c9fc8b3fb301cd | 8102b99cf0fb6d7fa304edb942d21cf7016cba7d | refs/heads/master | 2022-09-03T01:23:10.748038 | 2022-08-15T01:19:55 | 2022-08-15T01:19:55 | 239,656,752 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 207 | py | # Palindrome-philia
# input
S = input()
# solve
if len(S) % 2 != 0:
S = S[:len(S)//2] + S[len(S)//2 + 1:]
res = sum([i != j for i, j in zip(S[:len(S)//2], S[len(S)//2:][-1::-1])])
# output
print(res) | [
"[email protected]"
]
| |
8a2ff5c31ecbccf07f06b151a8aae5d8e32aa02a | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/21/usersdata/69/7600/submittedfiles/exercicio24.py | 7231844c7a7561c81bde56eb4f4805952f045a09 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | # -*- coding: utf-8 -*-
from __future__ import division
import math
a=int(input('Digite o valor de a:'))
b=int(input('Digite o valor de b:'))
i=1
c=0
if a>=b:
while i<=b:
if a%i==0 and b%i==0:
i=c
i=i+1
print c
| [
"[email protected]"
]
| |
aee318c6b0606346facce9b35398fcde1d8aefe0 | 08ee36e0bb1c250f7f2dfda12c1a73d1984cd2bc | /src/mnistk/networks/conv3dtanh_20.py | 5ceab17b3a130f554d3b8d621403d7c98fcfe4f2 | []
| no_license | ahgamut/mnistk | 58dadffad204602d425b18549e9b3d245dbf5486 | 19a661185e6d82996624fc6fcc03de7ad9213eb0 | refs/heads/master | 2021-11-04T07:36:07.394100 | 2021-10-27T18:37:12 | 2021-10-27T18:37:12 | 227,103,881 | 2 | 1 | null | 2020-02-19T22:07:24 | 2019-12-10T11:33:09 | Python | UTF-8 | Python | false | false | 1,600 | py | # -*- coding: utf-8 -*-
"""
conv3dtanh_20.py
:copyright: (c) 2019 by Gautham Venkatasubramanian.
:license: MIT
"""
import torch
from torch import nn
class Conv3dTanh_20(nn.Module):
def __init__(self):
nn.Module.__init__(self)
self.f0 = nn.Conv3d(in_channels=1, out_channels=13, kernel_size=(7, 7, 7), stride=(1, 1, 1), padding=(0, 0, 0), dilation=(1, 1, 1), groups=1, bias=True, padding_mode='zeros')
self.f1 = nn.Tanh()
self.f2 = nn.Conv3d(in_channels=13, out_channels=25, kernel_size=(1, 1, 1), stride=(1, 1, 1), padding=(0, 0, 0), dilation=(1, 1, 1), groups=1, bias=True, padding_mode='zeros')
self.f3 = nn.Conv3d(in_channels=25, out_channels=64, kernel_size=(1, 1, 1), stride=(1, 1, 1), padding=(0, 0, 0), dilation=(1, 1, 1), groups=1, bias=False, padding_mode='zeros')
self.f4 = nn.Conv3d(in_channels=64, out_channels=28, kernel_size=(1, 1, 1), stride=(1, 1, 1), padding=(0, 0, 0), dilation=(1, 1, 1), groups=1, bias=False, padding_mode='zeros')
self.f5 = nn.Tanh()
self.f6 = nn.Conv3d(in_channels=28, out_channels=10, kernel_size=(10, 1, 1), stride=(1, 1, 1), padding=(0, 0, 0), dilation=(1, 1, 1), groups=1, bias=False, padding_mode='zeros')
self.f7 = nn.LogSoftmax(dim=1)
def forward(self, *inputs):
x = inputs[0]
x = x.view(x.shape[0],1,16,7,7)
x = self.f0(x)
x = self.f1(x)
x = self.f2(x)
x = self.f3(x)
x = self.f4(x)
x = self.f5(x)
x = self.f6(x)
x = x.view(x.shape[0],10)
x = self.f7(x)
return x
| [
"[email protected]"
]
| |
86298f50d044ed13be501c49ad6bcf4ba3d5ffaf | b99f518e5289da9d068154cefd7b9f13c8d6564f | /VSCode_work/chapter16/chapter16_16_5.py | 98addd58a6f830d481ce26f5bae44635ea5d202a | [
"MIT"
]
| permissive | yangyahu-1994/Python-Crash-Course | e6a81feba4bdbef297387577ea36b7027bf61c8f | 6f8ef7fe8466d88931a0d3cc423ba5d966663b9d | refs/heads/master | 2023-07-29T22:29:06.151253 | 2021-10-05T14:04:32 | 2021-10-05T14:04:32 | 313,314,086 | 18 | 12 | null | null | null | null | UTF-8 | Python | false | false | 1,423 | py | # ๅฏผๅ
ฅๅฟ
่ฆ็ๆจกๅ
import json
import pygal_maps_world.maps
from chapter16_16_5_country_codes import get_country_code
from pygal.style import RotateStyle
# ๅฐๆฐๆฎๅ ่ฝฝๅฐๅ่กจไธญ
filename = '/home/yyh/Documents/VSCode_work/chapter16/population_data.json'
with open(filename) as f:
pop_data = json.load(f)
# ๅๅปบไธไธชๅ
ๅซไบบๅฃๆฐ้็ๅญๅ
ธ
cc_populations = {}
for pop_dict in pop_data: # ๅฐๆฏไธชๅญๅ
ธไพๆฌกๅญๅจๅจpop_dictไธญ
if pop_dict['Year'] == '2010':
country = pop_dict['Country Name']
population = int(float(pop_dict['Value']))
code = get_country_code(country)
if code:
cc_populations[code] = population
# ๆ นๆฎไบบๅฃๆฐ้ๅฐๆๆ็ๅฝๅฎถๅๆไธ็ป
cc_pops_1, cc_pops_2, cc_pops_3 = {}, {}, {}
for cc, pop in cc_populations.items():
if pop < 10000000:
cc_pops_1[cc] = pop
elif pop < 1000000000:
cc_pops_2[cc] = pop
else:
cc_pops_3[cc] = pop
# ็็ๆฏ็ปๅๅซๅ
ๅซๅคๅฐไธชๅฝๅฎถ
print(len(cc_pops_1), len(cc_pops_2), len(cc_pops_3))
wm = pygal_maps_world.maps.World()
wm_style = RotateStyle('#336699') # ๅๅปบๅฎไพ
wm = pygal_maps_world.maps.World(style=wm_style)
wm.title = 'World Population in 2010, by Country'
wm.add('0-10m', cc_pops_1)
wm.add('10m-1bn', cc_pops_2)
wm.add('>1bn', cc_pops_3)
wm.render_to_file('/home/yyh/Documents/VSCode_work/chapter16/world_population_16_5.svg') | [
"[email protected]"
]
| |
c7e7f24927496acf6bd5567a6c1947d2a8713bd1 | 100fdb7bc25acda0953f238c6d1104fda786ee7a | /probreg/callbacks.py | 17c2290b8cc732327fb23d0422177f6903656178 | [
"MIT"
]
| permissive | Yellowshuohahaha/probreg | 1f219821fe4706b58aaf70f040aea3bc660bc103 | 31cade406af7f20a20a85e2e3764c55e5b9e986f | refs/heads/master | 2022-11-16T08:54:45.610504 | 2020-07-18T02:30:28 | 2020-07-18T02:30:58 | 283,975,360 | 1 | 0 | MIT | 2020-07-31T07:46:06 | 2020-07-31T07:46:06 | null | UTF-8 | Python | false | false | 3,385 | py | import copy
import open3d as o3
import matplotlib.pyplot as plt
class Plot2DCallback(object):
"""Display the 2D registration result of each iteration.
Args:
source (numpy.ndarray): Source point cloud data.
target (numpy.ndarray): Target point cloud data.
save (bool, optional): If this flag is True,
each iteration image is saved in a sequential number.
"""
def __init__(self, source, target, save=False,
keep_window=True):
self._source = source
self._target = target
self._result = copy.deepcopy(self._source)
self._save = save
self._cnt = 0
plt.axis('equal')
plt.plot(self._source[:, 0], self._source[:, 1], 'ro', label='source')
plt.plot(self._target[:, 0], self._target[:, 1], 'g^', label='taget')
plt.plot(self._result[:, 0], self._result[:, 1], 'bo', label='result')
plt.legend()
plt.draw()
def __call__(self, transformation):
self._result = transformation.transform(self._source)
plt.cla()
plt.axis('equal')
plt.plot(self._source[:, 0], self._source[:, 1], 'ro', label='source')
plt.plot(self._target[:, 0], self._target[:, 1], 'g^', label='taget')
plt.plot(self._result[:, 0], self._result[:, 1], 'bo', label='result')
plt.legend()
if self._save:
plt.savefig('image_%04d.png' % self._cnt)
plt.draw()
plt.pause(0.001)
self._cnt += 1
class Open3dVisualizerCallback(object):
"""Display the 3D registration result of each iteration.
Args:
source (numpy.ndarray): Source point cloud data.
target (numpy.ndarray): Target point cloud data.
save (bool, optional): If this flag is True,
each iteration image is saved in a sequential number.
keep_window (bool, optional): If this flag is True,
the drawing window blocks after registration is finished.
fov: Field of view (degree).
"""
def __init__(self, source, target, save=False,
keep_window=True, fov=None):
self._vis = o3.visualization.Visualizer()
self._vis.create_window()
self._source = source
self._target = target
self._result = copy.deepcopy(self._source)
self._save = save
self._keep_window = keep_window
self._source.paint_uniform_color([1, 0, 0])
self._target.paint_uniform_color([0, 1, 0])
self._result.paint_uniform_color([0, 0, 1])
self._vis.add_geometry(self._source)
self._vis.add_geometry(self._target)
self._vis.add_geometry(self._result)
if not fov is None:
ctr = self._vis.get_view_control()
ctr.change_field_of_view(step=fov)
self._cnt = 0
def __del__(self):
if self._keep_window:
self._vis.run()
self._vis.destroy_window()
def __call__(self, transformation):
self._result.points = transformation.transform(self._source.points)
self._vis.update_geometry(self._source)
self._vis.update_geometry(self._target)
self._vis.update_geometry(self._result)
self._vis.poll_events()
self._vis.update_renderer()
if self._save:
self._vis.capture_screen_image("image_%04d.jpg" % self._cnt)
self._cnt += 1
| [
"[email protected]"
]
| |
b848fd06d86f2f7fb6045ef2703e6d9bcb764ab3 | c208954de92470c0144fad2e07a92ed1822edd59 | /visit_wd/out_dis.py | 72e792a11c66f6d1ddcd7225401dccb0a3f16ed6 | [
"MIT"
]
| permissive | rendy026/reverse-enginnering | 4217f3b723569fb792bac0f22a56a305199db1dc | f04cec0bf518a2617fc4fd7155f755fafc2af799 | refs/heads/master | 2023-01-07T15:49:15.791052 | 2020-10-13T09:22:02 | 2020-10-13T09:22:02 | 303,575,571 | 0 | 0 | MIT | 2020-10-13T09:41:59 | 2020-10-13T03:17:42 | Python | UTF-8 | Python | false | false | 39,462 | py | # FileNames : <zen_ezz>
# Python Bytecode : 3.8.5
# Selector <module> In Line 1 file out.pyc
# Timestamp In Code : (2020-08-24 02:27:49)
# Method Name: <module>
# Filename: <zen_ezz>
# Argument count: 0
# Position-only argument count: 0
# Keyword-only arguments: 0
# Number of locals: 0
# Stack size: 9
# Flags: 0x00000040 (NOFREE)
# First Line: 1
# Constants:
# 0: 0
# 1: ('TelegramClient', 'sync', 'events')
# 2: ('GetHistoryRequest', 'GetBotCallbackAnswerRequest')
# 3: ('SessionPasswordNeededError',)
# 4: ('sleep',)
# 5: ('BeautifulSoup',)
# 6: None
# 7: 'cfg.json'
# 8: 'r'
# 9: 'walletltc'
# 10: 'walletdoge'
# 11: 'walletzec'
# 12: 'walletbch'
# 13: 'walletbtc'
# 14: <code object mengetik at 0xae3a56a0, file "<zen_ezz>", line 16>
# 15: 'mengetik'
# 16: <code object tunggu at 0xae5a7df0, file "<zen_ezz>", line 22>
# 17: 'tunggu'
# 18: '\x1b[1;35m\n \neeeee eeeee e e eeee eeeee e eeeee \n8 8 8 8 8 8 8 " 8 8 8 8 \n8eee8e 8eee8 8eeee8 8eee eeee8 8e 8e 8 \n88 8 88 8 88 88 88 88 88 8 \n88 8 88 8 88 88ee 88ee8 88 88ee8 \n eeeee\n\x1b[1;36m=============================================\n\x1b[1;32m ~ [email protected] - t.me/rayez_id \n\x1b[1;36m=============================================\n'
# 19: 'clear'
# 20: '\x1b[1;35mCONFIGURATION SETTING \x1b[1;36m: AUTO VISIT & WD CLICKBOT'
# 21: <code object password at 0xae33b708, file "<zen_ezz>", line 48>
# 22: 'password'
# 23: '\x1b[1;35mINPUT CURRENCY \x1b[1;36m: '
# 24: 'doge'
# 25: 'ltc'
# 26: 'zec'
# 27: 'btc'
# 28: 'bch'
# 29: '\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n'
# 30: '\x1b[1;35mSTATUS \x1b[1;36m: Wrong Input \n'
# 31: False
# 32: <code object animate at 0xae33b840, file "<zen_ezz>", line 81>
# 33: 'animate'
# 34: ('target',)
# 35: 2
# 36: True
# 37: ' Jangan lupa bahagia, tersenyumlah =)'
# 38: ''
# 39: ('end',)
# 40: 0.1
# 41: '\n'
# 42: 'session'
# 43: 'https://ipinfo.io/json'
# 44: '\x1b[1;35mYOUR LOCATION\t: \x1b[1;36m{1}\t({2})\n\x1b[1;35mYOUR OWN IP\t: \x1b[1;36m{0}'
# 45: 'ip'
# 46: '[NO DATA]'
# 47: 'region'
# 48: 'country'
# 49: '\x1b[1;35mWELCOME TO BOT :\x1b[1;36m Visit and Withdraw Clickbot\n\x1b[1;35mNOTE :\x1b[1;36m This Script Is not for sale !\n'
# 50: 'User-Agent'
# 51: 'Mozilla/5.0 (Windows NT 10.0; Win32; x86) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'
# 52: 800812
# 53: 'db55ad67a98df35667ca788b97f771f5'
# 54: <code object withdraw at 0xae33b8a8, file "<zen_ezz>", line 118>
# 55: 'withdraw'
# 56: <code object visit at 0xae33b910, file "<zen_ezz>", line 144>
# 57: 'visit'
# 58: 'list.txt'
# 59: '\x1b[1;36m===============================================\n'
# 60: '\x1b[1;35mYOUR NUMBER : \x1b[1;36m'
# 61: 'session/'
# 62: '\x1b[1;35mVERIF CODE : \x1b[1;36m'
# 63: '\x1b[1;35m2FA CODE : \x1b[1;36m'
# 64: '\r\x1b[1;35mSTATUS : \x1b[1;36mSomething Wrong happened !\n'
# 65: '@Dogecoin_click_bot'
# 66: 'DOGE'
# 67: '@Litecoin_click_bot'
# 68: 'LTC'
# 69: '@Zcash_click_bot'
# 70: 'ZEC'
# 71: '@BCH_clickbot'
# 72: 'BCH'
# 73: '@BitcoinClick_bot'
# 74: 'BTC'
# 75: '\r\x1b[1;35mSTATUS AKUN : \x1b[1;36mSomething Wrong happened !\n\n'
# 76: 1
# Names:
# 0: telethon
# 1: TelegramClient
# 2: sync
# 3: events
# 4: telethon.tl.functions.messages
# 5: GetHistoryRequest
# 6: GetBotCallbackAnswerRequest
# 7: telethon.errors
# 8: SessionPasswordNeededError
# 9: time
# 10: sleep
# 11: bs4
# 12: BeautifulSoup
# 13: re
# 14: sys
# 15: os
# 16: colorama
# 17: random
# 18: json
# 19: threading
# 20: itertools
# 21: requests
# 22: open
# 23: fh
# 24: read
# 25: json_str
# 26: loads
# 27: json_value
# 28: ltcwallet
# 29: dogewallet
# 30: zecwallet
# 31: bchwallet
# 32: btcwallet
# 33: mengetik
# 34: tunggu
# 35: banner
# 36: system
# 37: print
# 38: password
# 39: input
# 40: currency
# 41: exit
# 42: ansi
# 43: clear_screen
# 44: done
# 45: animate
# 46: Thread
# 47: t
# 48: start
# 49: lines
# 50: line
# 51: c
# 52: stdout
# 53: flush
# 54: path
# 55: exists
# 56: makedirs
# 57: get
# 58: r
# 59: text
# 60: data
# 61: format
# 62: ua
# 63: api_id
# 64: api_hash
# 65: withdraw
# 66: visit
# 67: f
# 68: len
# 69: readlines
# 70: baris
# 71: close
# 72: range
# 73: i
# 74: strip
# 75: phone_number
# 76: client
# 77: connect
# 78: is_user_authorized
# 79: send_code_request
# 80: sign_in
# 81: me
# 82: passw
# 83: Exception
# 84: write
# 85: get_me
# 86: myself
# 87: disconnect
# 88: get_entity
# 89: channel_entity
# 90: channel_username
# 91: cr
# 92: wallet
1 0 LOAD_CONST 0 (0)
2 LOAD_CONST 1 (('TelegramClient', 'sync', 'events'))
4 IMPORT_NAME 0 (telethon)
6 IMPORT_FROM 1 (TelegramClient)
8 STORE_NAME 1 (TelegramClient)
10 IMPORT_FROM 2 (sync)
12 STORE_NAME 2 (sync)
14 IMPORT_FROM 3 (events)
16 STORE_NAME 3 (events)
18 POP_TOP
2 20 LOAD_CONST 0 (0)
22 LOAD_CONST 2 (('GetHistoryRequest', 'GetBotCallbackAnswerRequest'))
24 IMPORT_NAME 4 (telethon.tl.functions.messages)
26 IMPORT_FROM 5 (GetHistoryRequest)
28 STORE_NAME 5 (GetHistoryRequest)
30 IMPORT_FROM 6 (GetBotCallbackAnswerRequest)
32 STORE_NAME 6 (GetBotCallbackAnswerRequest)
34 POP_TOP
3 36 LOAD_CONST 0 (0)
38 LOAD_CONST 3 (('SessionPasswordNeededError',))
40 IMPORT_NAME 7 (telethon.errors)
42 IMPORT_FROM 8 (SessionPasswordNeededError)
44 STORE_NAME 8 (SessionPasswordNeededError)
46 POP_TOP
4 48 LOAD_CONST 0 (0)
50 LOAD_CONST 4 (('sleep',))
52 IMPORT_NAME 9 (time)
54 IMPORT_FROM 10 (sleep)
56 STORE_NAME 10 (sleep)
58 POP_TOP
5 60 LOAD_CONST 0 (0)
62 LOAD_CONST 5 (('BeautifulSoup',))
64 IMPORT_NAME 11 (bs4)
66 IMPORT_FROM 12 (BeautifulSoup)
68 STORE_NAME 12 (BeautifulSoup)
70 POP_TOP
6 72 LOAD_CONST 0 (0)
74 LOAD_CONST 6 (None)
76 IMPORT_NAME 13 (re)
78 STORE_NAME 13 (re)
80 LOAD_CONST 0 (0)
82 LOAD_CONST 6 (None)
84 IMPORT_NAME 14 (sys)
86 STORE_NAME 14 (sys)
88 LOAD_CONST 0 (0)
90 LOAD_CONST 6 (None)
92 IMPORT_NAME 15 (os)
94 STORE_NAME 15 (os)
96 LOAD_CONST 0 (0)
98 LOAD_CONST 6 (None)
100 IMPORT_NAME 9 (time)
102 STORE_NAME 9 (time)
104 LOAD_CONST 0 (0)
106 LOAD_CONST 6 (None)
108 IMPORT_NAME 16 (colorama)
110 STORE_NAME 16 (colorama)
112 LOAD_CONST 0 (0)
114 LOAD_CONST 6 (None)
116 IMPORT_NAME 17 (random)
118 STORE_NAME 17 (random)
120 LOAD_CONST 0 (0)
122 LOAD_CONST 6 (None)
124 IMPORT_NAME 18 (json)
126 STORE_NAME 18 (json)
128 LOAD_CONST 0 (0)
130 LOAD_CONST 6 (None)
132 IMPORT_NAME 19 (threading)
134 STORE_NAME 19 (threading)
136 LOAD_CONST 0 (0)
138 LOAD_CONST 6 (None)
140 IMPORT_NAME 20 (itertools)
142 STORE_NAME 20 (itertools)
144 LOAD_CONST 0 (0)
146 LOAD_CONST 6 (None)
148 IMPORT_NAME 21 (requests)
150 STORE_NAME 21 (requests)
7 152 LOAD_NAME 22 (open)
154 LOAD_CONST 7 ('cfg.json')
156 LOAD_CONST 8 ('r')
158 CALL_FUNCTION 2
160 SETUP_WITH 64 (to 226)
162 STORE_NAME 23 (fh)
8 164 LOAD_NAME 23 (fh)
166 LOAD_METHOD 24 (read)
168 CALL_METHOD 0
170 STORE_NAME 25 (json_str)
9 172 LOAD_NAME 18 (json)
174 LOAD_METHOD 26 (loads)
176 LOAD_NAME 25 (json_str)
178 CALL_METHOD 1
180 STORE_NAME 27 (json_value)
10 182 LOAD_NAME 27 (json_value)
184 LOAD_CONST 9 ('walletltc')
186 BINARY_SUBSCR
188 STORE_NAME 28 (ltcwallet)
11 190 LOAD_NAME 27 (json_value)
192 LOAD_CONST 10 ('walletdoge')
194 BINARY_SUBSCR
196 STORE_NAME 29 (dogewallet)
12 198 LOAD_NAME 27 (json_value)
200 LOAD_CONST 11 ('walletzec')
202 BINARY_SUBSCR
204 STORE_NAME 30 (zecwallet)
13 206 LOAD_NAME 27 (json_value)
208 LOAD_CONST 12 ('walletbch')
210 BINARY_SUBSCR
212 STORE_NAME 31 (bchwallet)
14 214 LOAD_NAME 27 (json_value)
216 LOAD_CONST 13 ('walletbtc')
218 BINARY_SUBSCR
220 STORE_NAME 32 (btcwallet)
222 POP_BLOCK
224 BEGIN_FINALLY
>> 226 WITH_CLEANUP_START
228 WITH_CLEANUP_FINISH
230 END_FINALLY
from telethon import TelegramClient, sync, events
from telethon.tl.functions.messages import GetHistoryRequest, GetBotCallbackAnswerRequest
from telethon.errors import SessionPasswordNeededError
from time import sleep
from bs4 import BeautifulSoup
import re, sys, os, time, colorama, random, json, threading, itertools, requests
with open('cfg.json', 'r') as fh:
json_str = fh.read()
json_value = json.loads(json_str)
ltcwallet = json_value['walletltc']
dogewallet = json_value['walletdoge']
zecwallet = json_value['walletzec']
bchwallet = json_value['walletbch']
btcwallet = json_value['walletbtc']
16 232 LOAD_CONST 14 (<code object mengetik at 0xae3a56a0, file "<zen_ezz>", line 16>)
234 LOAD_CONST 15 ('mengetik')
236 MAKE_FUNCTION 0
238 STORE_NAME 33 (mengetik)
def mengetik():
22 240 LOAD_CONST 16 (<code object tunggu at 0xae5a7df0, file "<zen_ezz>", line 22>)
242 LOAD_CONST 17 ('tunggu')
244 MAKE_FUNCTION 0
246 STORE_NAME 34 (tunggu)
def tunggu():
32 248 LOAD_CONST 18 ('\x1b[1;35m\n \neeeee eeeee e e eeee eeeee e eeeee \n8 8 8 8 8 8 8 " 8 8 8 8 \n8eee8e 8eee8 8eeee8 8eee eeee8 8e 8e 8 \n88 8 88 8 88 88 88 88 88 8 \n88 8 88 8 88 88ee 88ee8 88 88ee8 \n eeeee\n\x1b[1;36m=============================================\n\x1b[1;32m ~ [email protected] - t.me/rayez_id \n\x1b[1;36m=============================================\n')
250 STORE_NAME 35 (banner)
44 252 LOAD_NAME 15 (os)
254 LOAD_METHOD 36 (system)
256 LOAD_CONST 19 ('clear')
258 CALL_METHOD 1
260 POP_TOP
45 262 LOAD_NAME 37 (print)
264 LOAD_NAME 35 (banner)
266 CALL_FUNCTION 1
268 POP_TOP
46 270 LOAD_NAME 37 (print)
272 LOAD_CONST 20 ('\x1b[1;35mCONFIGURATION SETTING \x1b[1;36m: AUTO VISIT & WD CLICKBOT')
274 CALL_FUNCTION 1
276 POP_TOP
banner = '\x1b[1;35m\n \neeeee eeeee e e eeee eeeee e eeeee \n8 8 8 8 8 8 8 " 8 8 8 8 \n8eee8e 8eee8 8eeee8 8eee eeee8 8e 8e 8 \n88 8 88 8 88 88 88 88 88 8 \n88 8 88 8 88 88ee 88ee8 88 88ee8 \n eeeee\n\x1b[1;36m=============================================\n\x1b[1;32m ~ [email protected] - t.me/rayez_id \n\x1b[1;36m=============================================\n'
os.system('clear')
print(banner)
print('\x1b[1;35mCONFIGURATION SETTING \x1b[1;36m: AUTO VISIT & WD CLICKBOT')
48 278 LOAD_CONST 21 (<code object password at 0xae33b708, file "<zen_ezz>", line 48>)
280 LOAD_CONST 22 ('password')
282 MAKE_FUNCTION 0
284 STORE_NAME 38 (password)
def password():
71 286 LOAD_NAME 38 (password)
288 CALL_FUNCTION 0
290 POP_TOP
password()
72 292 LOAD_NAME 39 (input)
294 LOAD_CONST 23 ('\x1b[1;35mINPUT CURRENCY \x1b[1;36m: ')
296 CALL_FUNCTION 1
298 STORE_NAME 40 (currency)
73 300 LOAD_NAME 40 (currency)
302 LOAD_CONST 24 ('doge')
304 COMPARE_OP 2 (==)
306 EXTENDED_ARG 1
308 POP_JUMP_IF_TRUE 350
310 LOAD_NAME 40 (currency)
312 LOAD_CONST 25 ('ltc')
314 COMPARE_OP 2 (==)
316 EXTENDED_ARG 1
318 POP_JUMP_IF_TRUE 350
320 LOAD_NAME 40 (currency)
322 LOAD_CONST 26 ('zec')
324 COMPARE_OP 2 (==)
326 EXTENDED_ARG 1
328 POP_JUMP_IF_TRUE 350
330 LOAD_NAME 40 (currency)
332 LOAD_CONST 27 ('btc')
334 COMPARE_OP 2 (==)
336 EXTENDED_ARG 1
338 POP_JUMP_IF_TRUE 350
340 LOAD_NAME 40 (currency)
342 LOAD_CONST 28 ('bch')
344 COMPARE_OP 2 (==)
346 EXTENDED_ARG 1
348 POP_JUMP_IF_FALSE 360
74 >> 350 LOAD_NAME 37 (print)
352 LOAD_CONST 29 ('\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n')
354 CALL_FUNCTION 1
356 POP_TOP
358 JUMP_FORWARD 16 (to 376)
76 >> 360 LOAD_NAME 37 (print)
362 LOAD_CONST 30 ('\x1b[1;35mSTATUS \x1b[1;36m: Wrong Input \n')
364 CALL_FUNCTION 1
366 POP_TOP
77 368 LOAD_NAME 14 (sys)
370 LOAD_METHOD 41 (exit)
372 CALL_METHOD 0
374 POP_TOP
78 >> 376 LOAD_NAME 37 (print)
378 LOAD_NAME 16 (colorama)
380 LOAD_ATTR 42 (ansi)
382 LOAD_METHOD 43 (clear_screen)
384 CALL_METHOD 0
386 CALL_FUNCTION 1
388 POP_TOP
79 390 LOAD_CONST 31 (False)
392 STORE_NAME 44 (done)
currency = input('\x1b[1;35mINPUT CURRENCY \x1b[1;36m: ')
if currency == 'doge' or currency == 'ltc' or currency == 'zec' or currency == 'btc' or currency == 'bch':
print('\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n')
else:
print('\x1b[1;35mSTATUS \x1b[1;36m: Wrong Input \n')
sys.exit()
print(colorama.ansi.clear_screen())
done = False
81 394 LOAD_CONST 32 (<code object animate at 0xae33b840, file "<zen_ezz>", line 81>)
396 LOAD_CONST 33 ('animate')
398 MAKE_FUNCTION 0
400 STORE_NAME 45 (animate)
def animate():
89 402 LOAD_NAME 19 (threading)
404 LOAD_ATTR 46 (Thread)
406 LOAD_NAME 45 (animate)
408 LOAD_CONST 34 (('target',))
410 CALL_FUNCTION_KW 1
412 STORE_NAME 47 (t)
90 414 LOAD_NAME 47 (t)
416 LOAD_METHOD 48 (start)
418 CALL_METHOD 0
420 POP_TOP
91 422 LOAD_NAME 9 (time)
424 LOAD_METHOD 10 (sleep)
426 LOAD_CONST 35 (2)
428 CALL_METHOD 1
430 POP_TOP
92 432 LOAD_CONST 36 (True)
434 STORE_NAME 44 (done)
94 436 LOAD_CONST 37 (' Jangan lupa bahagia, tersenyumlah =)')
93 438 BUILD_LIST 1
440 STORE_NAME 49 (lines)
95 442 LOAD_NAME 49 (lines)
444 GET_ITER
>> 446 FOR_ITER 66 (to 514)
448 STORE_NAME 50 (line)
96 450 LOAD_NAME 50 (line)
452 GET_ITER
>> 454 FOR_ITER 38 (to 494)
456 STORE_NAME 51 (c)
97 458 LOAD_NAME 37 (print)
460 LOAD_NAME 51 (c)
462 LOAD_CONST 38 ('')
464 LOAD_CONST 39 (('end',))
466 CALL_FUNCTION_KW 2
468 POP_TOP
98 470 LOAD_NAME 14 (sys)
472 LOAD_ATTR 52 (stdout)
474 LOAD_METHOD 53 (flush)
476 CALL_METHOD 0
478 POP_TOP
99 480 LOAD_NAME 9 (time)
482 LOAD_METHOD 10 (sleep)
484 LOAD_CONST 40 (0.1)
486 CALL_METHOD 1
488 POP_TOP
490 EXTENDED_ARG 1
492 JUMP_ABSOLUTE 454
t = threading.Thread(target=animate)
t.start()
time.sleep(2)
done = True
lines = [
' Jangan lupa bahagia, tersenyumlah =)']
for line in lines:
for c in line:
print(c, end='')
sys.stdout.flush()
time.sleep(0.1)
100 >> 494 LOAD_NAME 37 (print)
496 LOAD_CONST 41 ('\n')
498 CALL_FUNCTION 1
500 POP_TOP
101 502 LOAD_NAME 10 (sleep)
504 LOAD_CONST 35 (2)
506 CALL_FUNCTION 1
508 POP_TOP
510 EXTENDED_ARG 1
512 JUMP_ABSOLUTE 446
print('\n')
sleep(2)
103 >> 514 LOAD_NAME 15 (os)
516 LOAD_ATTR 54 (path)
518 LOAD_METHOD 55 (exists)
520 LOAD_CONST 42 ('session')
522 CALL_METHOD 1
524 EXTENDED_ARG 2
526 POP_JUMP_IF_TRUE 538
104 528 LOAD_NAME 15 (os)
530 LOAD_METHOD 56 (makedirs)
532 LOAD_CONST 42 ('session')
534 CALL_METHOD 1
536 POP_TOP
106 >> 538 LOAD_NAME 15 (os)
540 LOAD_METHOD 36 (system)
542 LOAD_CONST 19 ('clear')
544 CALL_METHOD 1
546 POP_TOP
107 548 LOAD_NAME 21 (requests)
550 LOAD_METHOD 57 (get)
552 LOAD_CONST 43 ('https://ipinfo.io/json')
554 CALL_METHOD 1
556 STORE_NAME 58 (r)
108 558 LOAD_NAME 18 (json)
560 LOAD_METHOD 26 (loads)
562 LOAD_NAME 58 (r)
564 LOAD_ATTR 59 (text)
566 CALL_METHOD 1
568 STORE_NAME 60 (data)
109 570 LOAD_CONST 44 ('\x1b[1;35mYOUR LOCATION\t: \x1b[1;36m{1}\t({2})\n\x1b[1;35mYOUR OWN IP\t: \x1b[1;36m{0}')
572 STORE_NAME 59 (text)
110 574 LOAD_NAME 59 (text)
576 LOAD_METHOD 61 (format)
578 LOAD_NAME 60 (data)
580 LOAD_METHOD 57 (get)
582 LOAD_CONST 45 ('ip')
584 LOAD_CONST 46 ('[NO DATA]')
586 CALL_METHOD 2
588 LOAD_NAME 60 (data)
590 LOAD_METHOD 57 (get)
592 LOAD_CONST 47 ('region')
594 LOAD_CONST 46 ('[NO DATA]')
596 CALL_METHOD 2
598 LOAD_NAME 60 (data)
600 LOAD_METHOD 57 (get)
602 LOAD_CONST 48 ('country')
604 LOAD_CONST 46 ('[NO DATA]')
606 CALL_METHOD 2
608 CALL_METHOD 3
610 STORE_NAME 59 (text)
111 612 LOAD_NAME 37 (print)
614 LOAD_NAME 35 (banner)
616 CALL_FUNCTION 1
618 POP_TOP
112 620 LOAD_NAME 33 (mengetik)
622 LOAD_NAME 59 (text)
624 CALL_FUNCTION 1
626 POP_TOP
113 628 LOAD_NAME 33 (mengetik)
630 LOAD_CONST 49 ('\x1b[1;35mWELCOME TO BOT :\x1b[1;36m Visit and Withdraw Clickbot\n\x1b[1;35mNOTE :\x1b[1;36m This Script Is not for sale !\n')
632 CALL_FUNCTION 1
634 POP_TOP
114 636 LOAD_CONST 50 ('User-Agent')
638 LOAD_CONST 51 ('Mozilla/5.0 (Windows NT 10.0; Win32; x86) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36')
640 BUILD_MAP 1
642 STORE_NAME 62 (ua)
115 644 LOAD_CONST 52 (800812)
646 STORE_NAME 63 (api_id)
116 648 LOAD_CONST 53 ('db55ad67a98df35667ca788b97f771f5')
650 STORE_NAME 64 (api_hash)
os.path.exists('session') or os.makedirs('session')
os.system('clear')
r = requests.get('https://ipinfo.io/json')
data = json.loads(r.text)
text = '\x1b[1;35mYOUR LOCATION\t: \x1b[1;36m{1}\t({2})\n\x1b[1;35mYOUR OWN IP\t: \x1b[1;36m{0}'
text = text.format(data.get('ip', '[NO DATA]'), data.get('region', '[NO DATA]'), data.get('country', '[NO DATA]'))
print(banner)
mengetik(text)
mengetik('\x1b[1;35mWELCOME TO BOT :\x1b[1;36m Visit and Withdraw Clickbot\n\x1b[1;35mNOTE :\x1b[1;36m This Script Is not for sale !\n')
ua = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win32; x86) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'}
api_id = 800812
api_hash = 'db55ad67a98df35667ca788b97f771f5'
118 652 LOAD_CONST 54 (<code object withdraw at 0xae33b8a8, file "<zen_ezz>", line 118>)
654 LOAD_CONST 55 ('withdraw')
656 MAKE_FUNCTION 0
658 STORE_NAME 65 (withdraw)
def withdraw():
144 660 LOAD_CONST 56 (<code object visit at 0xae33b910, file "<zen_ezz>", line 144>)
662 LOAD_CONST 57 ('visit')
664 MAKE_FUNCTION 0
666 STORE_NAME 66 (visit)
def visit():
221 668 LOAD_NAME 22 (open)
670 LOAD_CONST 58 ('list.txt')
672 CALL_FUNCTION 1
674 STORE_NAME 67 (f)
222 676 LOAD_NAME 68 (len)
678 LOAD_NAME 67 (f)
680 LOAD_METHOD 69 (readlines)
682 CALL_METHOD 0
684 CALL_FUNCTION 1
686 STORE_NAME 70 (baris)
223 688 LOAD_NAME 67 (f)
690 LOAD_METHOD 71 (close)
692 CALL_METHOD 0
694 POP_TOP
225 696 LOAD_NAME 72 (range)
698 LOAD_NAME 70 (baris)
700 CALL_FUNCTION 1
702 GET_ITER
>> 704 EXTENDED_ARG 1
706 FOR_ITER 460 (to 1168)
708 STORE_NAME 73 (i)
226 710 LOAD_NAME 22 (open)
712 LOAD_CONST 58 ('list.txt')
714 CALL_FUNCTION 1
716 STORE_NAME 67 (f)
227 718 LOAD_NAME 67 (f)
720 LOAD_METHOD 69 (readlines)
722 CALL_METHOD 0
724 LOAD_NAME 73 (i)
726 BINARY_SUBSCR
728 LOAD_METHOD 74 (strip)
730 CALL_METHOD 0
732 STORE_NAME 75 (phone_number)
228 734 LOAD_NAME 67 (f)
736 LOAD_METHOD 71 (close)
738 CALL_METHOD 0
740 POP_TOP
f = open('list.txt')
baris = len(f.readlines())
f.close()
for i in range(baris):
f = open('list.txt')
phone_number = f.readlines()[i].strip()
f.close()
229 742 LOAD_NAME 33 (mengetik)
744 LOAD_CONST 59 ('\x1b[1;36m===============================================\n')
746 CALL_FUNCTION 1
748 POP_TOP
230 750 LOAD_NAME 37 (print)
752 LOAD_CONST 60 ('\x1b[1;35mYOUR NUMBER : \x1b[1;36m')
754 LOAD_NAME 75 (phone_number)
756 FORMAT_VALUE 0
758 BUILD_STRING 2
760 CALL_FUNCTION 1
762 POP_TOP
231 764 LOAD_NAME 1 (TelegramClient)
766 LOAD_CONST 61 ('session/')
768 LOAD_NAME 75 (phone_number)
770 BINARY_ADD
772 LOAD_NAME 63 (api_id)
774 LOAD_NAME 64 (api_hash)
776 CALL_FUNCTION 3
778 STORE_NAME 76 (client)
232 780 LOAD_NAME 76 (client)
782 LOAD_METHOD 77 (connect)
784 CALL_METHOD 0
786 POP_TOP
233 788 LOAD_NAME 76 (client)
790 LOAD_METHOD 78 (is_user_authorized)
792 CALL_METHOD 0
794 EXTENDED_ARG 3
796 POP_JUMP_IF_TRUE 912
mengetik('\x1b[1;36m===============================================\n')
print(f"\x1b[1;35mYOUR NUMBER : \x1b[1;36m{phone_number}")
client = TelegramClient('session/' + phone_number, api_id, api_hash)
client.connect()
client.is_user_authorized()
234 798 SETUP_FINALLY 30 (to 830)
try:
235 800 LOAD_NAME 76 (client)
802 LOAD_METHOD 79 (send_code_request)
804 LOAD_NAME 75 (phone_number)
806 CALL_METHOD 1
808 POP_TOP
236 810 LOAD_NAME 76 (client)
812 LOAD_METHOD 80 (sign_in)
814 LOAD_NAME 75 (phone_number)
816 LOAD_NAME 39 (input)
818 LOAD_CONST 62 ('\x1b[1;35mVERIF CODE : \x1b[1;36m')
820 CALL_FUNCTION 1
822 CALL_METHOD 2
824 STORE_NAME 81 (me)
826 POP_BLOCK
828 JUMP_FORWARD 82 (to 912)
client.send_code_request(phone_number)
me = client.sign_in(phone_number, input('\x1b[1;35mVERIF CODE : \x1b[1;36m'))
237 >> 830 DUP_TOP
832 LOAD_NAME 8 (SessionPasswordNeededError)
834 COMPARE_OP 10 (exception match)
836 EXTENDED_ARG 3
838 POP_JUMP_IF_FALSE 870
840 POP_TOP
842 POP_TOP
844 POP_TOP
238 846 LOAD_NAME 39 (input)
848 LOAD_CONST 63 ('\x1b[1;35m2FA CODE : \x1b[1;36m')
850 CALL_FUNCTION 1
852 STORE_NAME 82 (passw)
239 854 LOAD_NAME 76 (client)
856 LOAD_METHOD 48 (start)
858 LOAD_NAME 75 (phone_number)
860 LOAD_NAME 82 (passw)
862 CALL_METHOD 2
864 STORE_NAME 81 (me)
866 POP_EXCEPT
868 JUMP_FORWARD 42 (to 912)
except (SessionPasswordNeededError):
passw = input('\x1b[1;35m2FA CODE : \x1b[1;36m')
me = client.start(phone_number, passw)
240 >> 870 DUP_TOP
872 LOAD_NAME 83 (Exception)
874 COMPARE_OP 10 (exception match)
876 EXTENDED_ARG 3
878 POP_JUMP_IF_FALSE 910
880 POP_TOP
882 POP_TOP
884 POP_TOP
except Exception:
241 886 LOAD_NAME 14 (sys)
888 LOAD_ATTR 52 (stdout)
890 LOAD_METHOD 84 (write)
892 LOAD_CONST 64 ('\r\x1b[1;35mSTATUS : \x1b[1;36mSomething Wrong happened !\n')
894 CALL_METHOD 1
896 POP_TOP
sys.stdout.write('\r\x1b[1;35mSTATUS : \x1b[1;36mSomething Wrong happened !\n')
242 898 POP_EXCEPT
900 POP_TOP
902 EXTENDED_ARG 4
904 JUMP_ABSOLUTE 1168
906 POP_EXCEPT
908 JUMP_FORWARD 2 (to 912)
>> 910 END_FINALLY
243 >> 912 LOAD_NAME 76 (client)
914 LOAD_METHOD 85 (get_me)
916 CALL_METHOD 0
918 STORE_NAME 86 (myself)
myself = client.get_me()
244 920 SETUP_FINALLY 232 (to 1154)
922 SETUP_FINALLY 184 (to 1108)
try:
245 924 LOAD_NAME 40 (currency)
926 LOAD_CONST 24 ('doge')
928 COMPARE_OP 2 (==)
930 EXTENDED_ARG 3
932 POP_JUMP_IF_FALSE 958
246 934 LOAD_NAME 76 (client)
936 LOAD_METHOD 88 (get_entity)
938 LOAD_CONST 65 ('@Dogecoin_click_bot')
940 CALL_METHOD 1
942 STORE_NAME 89 (channel_entity)
247 944 LOAD_CONST 65 ('@Dogecoin_click_bot')
946 STORE_NAME 90 (channel_username)
248 948 LOAD_CONST 66 ('DOGE')
950 STORE_NAME 91 (cr)
249 952 LOAD_NAME 29 (dogewallet)
954 STORE_NAME 92 (wallet)
956 JUMP_FORWARD 134 (to 1092)
if currency == 'doge':
channel_entity = client.get_entity('@Dogecoin_click_bot')
channel_username = '@Dogecoin_click_bot'
cr = 'DOGE'
wallet = dogewallet
250 >> 958 LOAD_NAME 40 (currency)
960 LOAD_CONST 25 ('ltc')
962 COMPARE_OP 2 (==)
964 EXTENDED_ARG 3
966 POP_JUMP_IF_FALSE 992
251 968 LOAD_NAME 76 (client)
970 LOAD_METHOD 88 (get_entity)
972 LOAD_CONST 67 ('@Litecoin_click_bot')
974 CALL_METHOD 1
976 STORE_NAME 89 (channel_entity)
252 978 LOAD_CONST 67 ('@Litecoin_click_bot')
980 STORE_NAME 90 (channel_username)
253 982 LOAD_CONST 68 ('LTC')
984 STORE_NAME 91 (cr)
254 986 LOAD_NAME 28 (ltcwallet)
988 STORE_NAME 92 (wallet)
990 JUMP_FORWARD 100 (to 1092)
elif currency == 'ltc':
channel_entity = client.get_entity('@Litecoin_click_bot')
channel_username = '@Litecoin_click_bot'
cr = 'LTC'
wallet = ltcwallet
255 >> 992 LOAD_NAME 40 (currency)
994 LOAD_CONST 26 ('zec')
996 COMPARE_OP 2 (==)
998 EXTENDED_ARG 4
1000 POP_JUMP_IF_FALSE 1026
256 1002 LOAD_NAME 76 (client)
1004 LOAD_METHOD 88 (get_entity)
1006 LOAD_CONST 69 ('@Zcash_click_bot')
1008 CALL_METHOD 1
1010 STORE_NAME 89 (channel_entity)
257 1012 LOAD_CONST 69 ('@Zcash_click_bot')
1014 STORE_NAME 90 (channel_username)
258 1016 LOAD_CONST 70 ('ZEC')
1018 STORE_NAME 91 (cr)
259 1020 LOAD_NAME 30 (zecwallet)
1022 STORE_NAME 92 (wallet)
1024 JUMP_FORWARD 66 (to 1092)
elif currency == 'zec':
channel_entity = client.get_entity('@Zcash_click_bot')
channel_username = '@Zcash_click_bot'
cr = 'ZEC'
wallet = zecwallet
260 >> 1026 LOAD_NAME 40 (currency)
1028 LOAD_CONST 28 ('bch')
1030 COMPARE_OP 2 (==)
1032 EXTENDED_ARG 4
1034 POP_JUMP_IF_FALSE 1060
261 1036 LOAD_NAME 76 (client)
1038 LOAD_METHOD 88 (get_entity)
1040 LOAD_CONST 71 ('@BCH_clickbot')
1042 CALL_METHOD 1
1044 STORE_NAME 89 (channel_entity)
262 1046 LOAD_CONST 71 ('@BCH_clickbot')
1048 STORE_NAME 90 (channel_username)
263 1050 LOAD_CONST 72 ('BCH')
1052 STORE_NAME 91 (cr)
264 1054 LOAD_NAME 31 (bchwallet)
1056 STORE_NAME 92 (wallet)
1058 JUMP_FORWARD 32 (to 1092)
elif currency == 'bch':
channel_entity = client.get_entity('@BCH_clickbot')
channel_username = '@BCH_clickbot'
cr = 'BCH'
wallet = bchwallet
265 >> 1060 LOAD_NAME 40 (currency)
1062 LOAD_CONST 27 ('btc')
1064 COMPARE_OP 2 (==)
1066 EXTENDED_ARG 4
1068 POP_JUMP_IF_FALSE 1092
266 1070 LOAD_NAME 76 (client)
1072 LOAD_METHOD 88 (get_entity)
1074 LOAD_CONST 73 ('@BitcoinClick_bot')
1076 CALL_METHOD 1
1078 STORE_NAME 89 (channel_entity)
267 1080 LOAD_CONST 73 ('@BitcoinClick_bot')
1082 STORE_NAME 90 (channel_username)
268 1084 LOAD_CONST 74 ('BTC')
1086 STORE_NAME 91 (cr)
269 1088 LOAD_NAME 32 (btcwallet)
1090 STORE_NAME 92 (wallet)
elif currency == 'btc':
channel_entity = client.get_entity('@BitcoinClick_bot')
channel_username = '@BitcoinClick_bot'
cr = 'BTC'
wallet = btcwallet
270 >> 1092 LOAD_NAME 66 (visit)
1094 CALL_FUNCTION 0
1096 POP_TOP
visit()
271 1098 LOAD_NAME 65 (withdraw)
1100 CALL_FUNCTION 0
1102 POP_TOP
1104 POP_BLOCK
1106 JUMP_FORWARD 42 (to 1150)
withdraw()
272 >> 1108 DUP_TOP
1110 LOAD_NAME 83 (Exception)
1112 COMPARE_OP 10 (exception match)
1114 EXTENDED_ARG 4
1116 POP_JUMP_IF_FALSE 1148
1118 POP_TOP
1120 POP_TOP
1122 POP_TOP
273 1124 LOAD_NAME 14 (sys)
1126 LOAD_ATTR 52 (stdout)
1128 LOAD_METHOD 84 (write)
1130 LOAD_CONST 75 ('\r\x1b[1;35mSTATUS AKUN : \x1b[1;36mSomething Wrong happened !\n\n')
1132 CALL_METHOD 1
1134 POP_TOP
274 1136 LOAD_NAME 10 (sleep)
1138 LOAD_CONST 76 (1)
1140 CALL_FUNCTION 1
1142 POP_TOP
1144 POP_EXCEPT
1146 JUMP_FORWARD 2 (to 1150)
>> 1148 END_FINALLY
>> 1150 POP_BLOCK
1152 BEGIN_FINALLY
else:
pass
276 >> 1154 LOAD_NAME 76 (client)
1156 LOAD_METHOD 87 (disconnect)
1158 CALL_METHOD 0
1160 POP_TOP
1162 END_FINALLY # <- pass?
1164 EXTENDED_ARG 2
1166 JUMP_ABSOLUTE 704
>> 1168 LOAD_CONST 6 (None)
1170 RETURN_VALUE
except Exception:
sys.stdout.write('\r\x1b[1;35mSTATUS AKUN : \x1b[1;36mSomething Wrong happened !\n\n')
sleep(1)
client.disconnect()
| [
"[email protected]"
]
| |
02c7a58b6b502e705b6ff5ff30e1577c75b11799 | b4cc597f7efa74d3bdea48aaeca7c675235351d6 | /param_files/0.0eV/reps_files/file_format_Hz.py | 39e4383d427caaa7151dd4a352dab909becdb16d | []
| no_license | franciscovillaescusa/Euclid_neutrino_comparison | 52de9fc72b1240319bd63341b94abb82471c721a | 9483de6512693d7303c51b371e8746425ea9c3aa | refs/heads/master | 2023-04-19T21:45:42.932627 | 2021-04-29T17:54:32 | 2021-04-29T17:54:32 | 303,197,191 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,072 | py | import numpy as np
import sys
########################## create Hz file ###################################
f_in = '0.0eV_hubble.txt' #file provided by Matteo
f_out = 'Hz.txt' #file to be used by Gadget
bins = 1000 #number of bins in the new file
z_max = 127.0 #maximum redshift of the new file
z_min = 0.0 #minimum redshift of the new file
#read original file; H(z) in km/s/Mpc
z,Hz = np.loadtxt(f_in,unpack=True)
#create a new (1+z) array
z_new = np.logspace(np.log10(1.0+z_min),np.log10(1.0+z_max),bins)
#interpolate from the original table. The H(z) has to be in Gadget units:
#km/s/(kpc/h), thus we need to multiply the Hz of Matteo by 1.0/(1000.0*h)
Hz_new = np.interp(z_new-1,z,Hz)/(Hz[0]/100.0)/1000.0
#create the w=-1 array
w = np.ones(bins)*(-1.0)
#the format of the H(z) file is 1+z,w,H(z), where 1+z should be decreasing
np.savetxt(f_out,np.transpose([z_new[::-1],w,Hz_new[::-1]]))
#############################################################################
| [
"[email protected]"
]
| |
a245e763e652cab4a89be27f2d41294d4526d33a | a9dc42e9f54b549fcdd695817e347abfd8f2869f | /old/old_bin/cru_ts323_prism_update_launcher.py | 9f4c80bba7d44a96d914f80d42d3712c30f99745 | [
"MIT"
]
| permissive | yusheng-wang/downscale | 2e77d070115ead3034c154d29f1c533976228f13 | 3fe8ea1774cf82149d19561ce5f19b25e6cba6fb | refs/heads/master | 2023-04-10T03:25:08.806859 | 2019-09-21T17:34:35 | 2019-09-21T17:34:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,713 | py |
# SCRIPT TO RUN THE CRU TS3.1 BUILT SCRIPT OVER THE CRU TS3.2.3 UPDATE (SEPT.2015)
# WHICH EXTENDS THE SERIES TO 12/2014.
# THIS IS CURRENTLY WORKING FOR CLD, TMP, VAP, AND MORE TO COME!
# # # # #
# Author: Michael Lindgren ([email protected])
# # # # #
# CURRENTLY SETUP TO RUN ON EOS.
# # CLD
# import os
# os.chdir( '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/CODE/tem_ar5_inputs/downscale_cmip5/bin' )
# ncores = '14'
# base_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323_prism'
# cru_ts31 = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323/cru_ts3.23.1901.2014.cld.dat.nc'
# cl20_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_october_final/cru_cl20/cld/akcan'
# template_raster_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/templates/tas_mean_C_AR5_GFDL-CM3_historical_01_1860.tif'
# anomalies_calc_type = 'relative'
# downscaling_operation = 'mult'
# climatology_begin = '1961'
# climatology_end = '1990'
# year_begin = '1901'
# year_end = '2014'
# variable = 'cld'
# metric = 'pct'
# args_tuples = [ ('hi', cru_ts31), ('ci', cl20_path), ('tr', template_raster_fn),
# ('base', base_path), ('bt', year_begin), ('et', year_end),
# ('cbt', climatology_begin), ('cet', climatology_end),
# ('nc', ncores), ('at', anomalies_calc_type), ('m', metric),
# ('dso', downscaling_operation), ('v', variable) ]
# args = ''.join([ ' -'+flag+' '+value for flag, value in args_tuples ])
# os.system( 'ipython2.7 -- tas_cld_cru_ts31_to_cl20_downscaling.py ' + args )
# TAS
import os
os.chdir( '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/CODE/tem_ar5_inputs/downscale_cmip5/bin' )
ncores = '14'
base_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323_prism'
cru_ts31 = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323/cru_ts3.23.1901.2014.tmp.dat.nc'
cl20_path = '/Data/Base_Data/Climate/AK_CAN_2km/historical/singleBand/prism/AK_CAN_2km_PRISM/AK_CAN_geotiffs/tas/ak83albers'
template_raster_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/templates/tas_mean_C_AR5_GFDL-CM3_historical_01_1860.tif'
anomalies_calc_type = 'absolute'
downscaling_operation = 'add'
climatology_begin = '1971'
climatology_end = '2000'
year_begin = '1901'
year_end = '2014'
variable = 'tas'
metric = 'C'
args_tuples = [ ('hi', cru_ts31), ('ci', cl20_path), ('tr', template_raster_fn),
('base', base_path), ('bt', year_begin), ('et', year_end),
('cbt', climatology_begin), ('cet', climatology_end),
('nc', ncores), ('at', anomalies_calc_type), ('m', metric),
('dso', downscaling_operation), ('v', variable) ]
args = ''.join([ ' -'+flag+' '+value for flag, value in args_tuples ])
os.system( 'ipython2.7 -- tas_cld_cru_ts31_to_cl20_downscaling.py ' + args )
# VAP (HUR)
import os
os.chdir( '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/CODE/tem_ar5_inputs/downscale_cmip5/bin' )
ncores = '14'
base_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323'
cru_ts31_vap = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323/cru_ts3.23.1901.2014.vap.dat.nc'
cru_ts31_tas = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323/cru_ts3.23.1901.2014.tmp.dat.nc'
cl20_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_october_final/cru_cl20/hur/akcan' # hur
template_raster_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/templates/tas_mean_C_AR5_GFDL-CM3_historical_01_1860.tif'
anomalies_calc_type = 'relative'
downscaling_operation = 'mult'
climatology_begin = '1961'
climatology_end = '1990'
year_begin = '1901'
year_end = '2014'
variable = 'hur'
metric = 'pct'
args_tuples = [ ('hhi', cru_ts31_vap), ('thi', cru_ts31_tas), ('ci', cl20_path), ('tr', template_raster_fn),
('base', base_path), ('bt', year_begin), ('et', year_end),
('cbt', climatology_begin), ('cet', climatology_end),
('nc', ncores), ('at', anomalies_calc_type), ('m', metric),
('dso', downscaling_operation), ('v', variable) ]
args = ''.join([ ' -'+flag+' '+value for flag, value in args_tuples ])
os.system( 'ipython2.7 -i -- hur_cru_ts31_to_cl20_downscaling.py ' + args )
# PRECIP
import os
os.chdir( '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/CODE/tem_ar5_inputs/downscale_cmip5/bin' )
ncores = '14'
base_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323'
cru_ts31 = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_ts323/cru_ts3.23.1901.2014.pre.dat.nc'
cl20_path = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/cru_october_final/cru_cl20/pre/akcan'
template_raster_fn = '/workspace/Shared/Tech_Projects/ALFRESCO_Inputs/project_data/TEM_Data/templates/tas_mean_C_AR5_GFDL-CM3_historical_01_1860.tif'
anomalies_calc_type = 'relative'
downscaling_operation = 'mult'
climatology_begin = '1961'
climatology_end = '1990'
year_begin = '1901'
year_end = '2014'
variable = 'pre'
metric = 'mm'
args_tuples = [ ('hi', cru_ts31), ('ci', cl20_path), ('tr', template_raster_fn),
('base', base_path), ('bt', year_begin), ('et', year_end),
('cbt', climatology_begin), ('cet', climatology_end),
('nc', ncores), ('at', anomalies_calc_type), ('m', metric),
('dso', downscaling_operation), ('v', variable) ]
args = ''.join([ ' -'+flag+' '+value for flag, value in args_tuples ])
os.system( 'ipython2.7 -- tas_cld_cru_ts31_to_cl20_downscaling.py ' + args )
| [
"[email protected]"
]
| |
3bb4882df0c8b66b1d985109247e95926e438367 | 28dbe47aba287ed94ef7bba734203736bcc06249 | /.history/dmac_20200713134926.py | e0c5592f917f60de87f93354003a2b45895781c6 | []
| no_license | ntung88/Trading_Algorithms | 242fd816b19df95e02e9fcd8c5c91c862d2ede40 | d96488b1754e3751f739d9c3f094a8f8dc54a0a9 | refs/heads/master | 2022-11-19T16:04:07.800344 | 2020-07-17T21:14:10 | 2020-07-17T21:14:10 | 276,239,640 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,330 | py | import yfinance as yf
import numpy as np
import pandas as pd
from pandasgui import show
from scipy.optimize import minimize
import matplotlib.pyplot as plt
'''
A library for running Dual Moving Average Crossover trading strategy, with backtesting,
period optimization, and vizualization tools.
'''
#Period of time (in years) that we look back when optimizing in return calculation
HINDSIGHT = 2
def clean_data(data):
'''
Removes row (days) with no data from dataframe or series
'''
incomplete_idxs = False
if isinstance(data, pd.DataFrame):
for col in data.columns:
incomplete_idxs |= np.isnan(data[col])
else:
incomplete_idxs |= np.isnan(data)
return data[~incomplete_idxs]
def calc_crossovers(sma, lma):
'''
Returns a dataframe containing only the rows where a crossover of the sma and lma
is detected. 1 indicates a buy point (sma moving above lma), -1 a sell point
'''
num_points = len(clean_data(lma))
high = (sma > lma)[-num_points:]
crossovers = high.astype(int).diff()[1:]
trimmed = crossovers[crossovers != 0]
return trimmed
def profit(data, crossovers):
'''
Calculates profit assuming data covers a continuous time period with the given crossovers
'''
if len(crossovers) == 0:
return 0
total = 0
# If first crossover is a sell point assume implicit buy point at very start of data
if crossovers.iloc[0] == -1:
total += data.loc[crossovers.index[0]] - data.iloc[0]
# Add the difference between value at sell points and value at buy points to our profit
for i in range(1,len(crossovers)):
left_bound = crossovers.index[i-1]
if crossovers.loc[left_bound] == 1:
right_bound = crossovers.index[i]
total += data.loc[right_bound] - data.loc[left_bound]
# If last crossover is a buy point assume implicit sell point at end of data (include
# profit we have made on current holding)
if crossovers.iloc[-1] == 1:
total += data.iloc[-1] - data.loc[crossovers.index[-1]]
return total
def optimize(data):
'''
Uses scipy's convex minimization library to find optimal short period and long period
for moving averages. Because the profit certainly isn't a truly convex function I use a
wide range of seeds as initial guesses in hopes of detecting all the local minimums
and comparing them to get a good guess of the global min
'''
cons = ({'type': 'ineq', 'fun': lambda x: x[1] - x[0]},
{'type': 'ineq', 'fun': lambda x: x[0] - 5})
# Ranges of initial guesses for short and long periods
#30 and 40 step size for max accuracy, larger for faster runtime
short_seeds = range(5, 300, 50)
long_seeds = range(20, 800, 70)
# short_seeds = [100]
# long_seeds = [750]
minimum = float('inf')
best_short = 0
best_long = 0
for short_seed in short_seeds:
for long_seed in long_seeds:
# Use all combinations of ranges where long_seed > short_seed as initial guesses
if long_seed > short_seed:
res = minimize(lambda x, data: -1 * run_analysis(x, data), [short_seed, long_seed], args=(data,), method='COBYLA', constraints=cons, options={'rhobeg': 10.0, 'catol': 0.0})
if res.fun < minimum:
best_short = res.x[0]
best_long = res.x[1]
minimum = res.fun
return (int(round(best_short)), int(round(best_long)), minimum)
def run_analysis(periods, data):
'''
Objective function for minimization, runs profit calculation with given periods and data
Returns negative profit for minimization (maximization of profit)
'''
short_period = int(round(periods[0]))
long_period = int(round(periods[1]))
sma = data.rolling(short_period).mean()
lma = data.rolling(long_period).mean()
crossovers = calc_crossovers(sma, lma)
return profit(data, crossovers)
def visualize(data, short_period, long_period):
'''
Useful for visualizing the algorithm's decisions. Plots the stock price with colored
vertical bars at buy and sell points
'''
sma = data.rolling(short_period).mean()
lma = data.rolling(long_period).mean()
crossovers = calc_crossovers(sma, lma)
buys = pd.DataFrame(crossovers[crossovers == 1.0])
sells = pd.DataFrame(crossovers[crossovers == -1.0])
data.plot(color='black')
for buy in buys.index:
plt.axvline(buy, color="green")
for sell in sells.index:
plt.axvline(sell, color="red")
plt.show()
def split_year(data):
'''
Split dataframe into a list of dataframes, each corresponding to the data for each year
'''
years = np.unique(data.index.year)
split = []
for year in years:
split.append(data[data.index.year == year])
return split
def calc_returns(split_data):
'''
Calculate annual returns for periods optimized over slices (of size HINDSIGHT) of past data. Gives an idea of what kind of results to realistically expect
'''
annual_returns = []
max_return = float('-inf')
min_return = float('inf')
for i in range(2, len(split_data)):
test_year = split_data[i]
optimize_period = pd.DataFrame(np.concatenate(split_data[i-HINDSIGHT:i]))
periods = optimize(optimize_period)
profit = run_analysis(periods, test_year)
annual_returns.append(profit)
if profit > max_return: max_return = profit
if profit < min_return: min_return = profit
return annual_returns, max_return, min_return
def main():
'''
Main's current functionality: Find optimal windows for TSLA and print them, along with profit since 6/29/2010
'''
ticker = yf.Ticker('PSV')
# data = yf.download(tickers, period='max', group_by='ticker')
data = ticker.history(period="max")
dirty = pd.DataFrame(data)
#Currently using only closing prices
frame = clean_data(dirty)['Close']
# split_year(frame)
# periods = optimize(frame)
print('optimizing')
results = calc_returns(split_year(frame))
print(results)
# visualize(frame, periods[0], periods[1])
if __name__ == "__main__":
main()
'''
how to quantify number of shares you want to buy (steepness of trend, volatility, top 20 stocks?)
''' | [
"[email protected]"
]
| |
c6a8b0b84a6a412e24430e7c5dcbbac95110038a | e4e1d12269cea35451bbe552af3262a9a428faa1 | /trainer/trainer_lm.py | 2da3166825d2ea23258bb8cdeaf7dff9f50eedd5 | [
"Apache-2.0"
]
| permissive | 12341123/pylon | 286e033623b13846ca9dd5d9b84c7788f6c70a1c | e26202b2c1cfbb8b5c444f840763f0ce839f048a | refs/heads/master | 2023-05-07T20:25:22.134413 | 2021-06-08T03:51:29 | 2021-06-08T03:51:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 994 | py | import torch.nn.functional as F
from .trainer_base import *
from .callbacks.start import *
class LMTrainer(BaseTrainer):
"""language modeling trainer
"""
def on_ep_begin(self, **kwargs):
self.hidden = None
def forward_pass(self, data):
x, y = data
pred, hidden = self.net(x, self.hidden)
self.hidden = detach(hidden)
# flatten pred and target before loss
t, b, n_token = pred.shape
loss = F.cross_entropy(pred.view(-1, n_token), y.view(-1))
with torch.no_grad():
ppl = torch.exp(loss)
return {
'x': x,
'y': y,
'pred': pred,
'loss': loss,
'ppl': ppl,
'hidden': self.hidden,
'n': y.shape[0] * y.shape[1]
}
def make_default_callbacks(self):
return super().make_default_callbacks() + [MovingAvgCb(['loss', 'ppl'])]
def __repr__(self):
return f'<LMTrainer {self.looper.state}>'
| [
"[email protected]"
]
| |
8f5086512825812a077bc0c8c1f29b3a00095003 | a14af281ab0c6189d5b6c48a25732fb286f34c8d | /rbac/__init__.py | c51b6a735b7d3bac790f423527d6132e53670915 | [
"MIT"
]
| permissive | jackeyGao/simple-rbac | 62112301fbad8d8bdc6639b4a3038bc0de3a7638 | 967dee94be4ad6c93999ce52655eada2477c9046 | refs/heads/master | 2020-04-14T15:31:43.655283 | 2019-01-03T06:20:22 | 2019-01-03T06:20:22 | 163,929,777 | 2 | 0 | MIT | 2019-01-03T06:01:11 | 2019-01-03T06:01:10 | null | UTF-8 | Python | false | false | 161 | py | from __future__ import absolute_import
"""Simple RBAC
This is a simple role based access control utility in Python.
"""
__all__ = ["acl", "context", "proxy"]
| [
"[email protected]"
]
| |
4c633e20b0ec5f68658ba51e9e74fa3d3f7c6cd4 | e6a8793b1b12d47e57f00485350d122946618245 | /services/models.py | 8dad81c83ffca9da8fcc511e7630e1ee93c018e7 | []
| no_license | Fabricourt/school | 70b2eba2c0b8ff9b9290eb0f68d730698a6d3a63 | dad80c36be34b432dfadef195eb9e867f82cafff | refs/heads/main | 2023-01-01T15:48:43.760288 | 2020-10-26T11:15:32 | 2020-10-26T11:15:32 | 305,829,630 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,678 | py | from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.urls import reverse
from ckeditor.fields import RichTextField
from django.utils.html import mark_safe
from PIL import Image
class Service(models.Model):
title1 = models.CharField(max_length=200, blank=False, null=True)
title2 = models.CharField(max_length=200, blank=False, null=True)
title3 = models.CharField(max_length=200, blank=False, null=True)
title4 = models.CharField(max_length=200, blank=False, null=True)
subtitle1 = models.CharField(max_length=200, blank=False, null=True)
subtitle2 = models.CharField(max_length=200, blank=False, null=True)
subtitle3 = models.CharField(max_length=200, blank=False, null=True)
subtitle4 = models.CharField(max_length=200, blank=False, null=True)
statement1 = models.CharField(max_length=50, blank=False, null=True)
statement2 = models.CharField(max_length=50, blank=False, null=True)
statement3 = models.CharField(max_length=50, blank=False, null=True)
statement4 = models.CharField(max_length=50, blank=False, null=True)
icon1 = models.CharField(max_length=200, blank=False, null=True)
icon2 = models.CharField(max_length=200, blank=False, null=True)
icon3 = models.CharField(max_length=200, blank=False, null=True)
icon4 = models.CharField(max_length=200, blank=False, null=True)
date_posted = models.DateTimeField(default=timezone.now)
is_published = models.BooleanField(default=True)
#image = models.ImageField(upload_to='Service/%Y/%m/%d/', null=True, blank=False)
def __str__(self):
return str(self.title1)
| [
"[email protected]"
]
| |
c3a894ab35a0f429bdb265a780ff7a2714455edc | ef998914b19f34f8f85cd784b6470147d73a93e0 | /worldometers/worldometers/settings.py | 7b61c87b7bbe8488246874f4ebaf1191a148c970 | []
| no_license | dilipksahu/Web_Scraping_Using_Python | 9f586704a779fda551c5e3d15902591648663870 | e6d3031f89b1b5d4bfa0377c11e87037772c460d | refs/heads/master | 2023-01-30T14:36:00.702440 | 2020-12-17T16:10:25 | 2020-12-17T16:10:25 | 316,200,913 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,123 | py | # Scrapy settings for worldometers project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# https://docs.scrapy.org/en/latest/topics/settings.html
# https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
# https://docs.scrapy.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'worldometers'
SPIDER_MODULES = ['worldometers.spiders']
NEWSPIDER_MODULE = 'worldometers.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'worldometers (+http://www.yourdomain.com)'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See https://docs.scrapy.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
#COOKIES_ENABLED = False
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See https://docs.scrapy.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'worldometers.middlewares.WorldometersSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'worldometers.middlewares.WorldometersDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See https://docs.scrapy.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See https://docs.scrapy.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'worldometers.pipelines.WorldometersPipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See https://docs.scrapy.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"[email protected]"
]
| |
c799710f27d097dea81aebba892ed99561c9b17d | 9aa7d3c6d563a434595141f5b4dd8c54252a4d40 | /tweets/migrations/0013_post_image.py | 93841f7f324a2610760c5b2d94d0924b9898e049 | []
| no_license | akhad97/Test-Web-App | 9e200fc6d394cf6d52e72cb5f360d013e777fa9c | eb9b3480732c86f836748967bcfd6201dac6a6ee | refs/heads/master | 2023-07-19T12:24:26.400998 | 2021-09-06T11:55:20 | 2021-09-06T11:55:20 | 402,657,324 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 485 | py | # Generated by Django 2.2.13 on 2021-07-31 05:30
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('tweets', '0012_auto_20210730_2226'),
]
operations = [
migrations.AddField(
model_name='post',
name='image',
field=models.ImageField(default=django.utils.timezone.now, upload_to='post'),
preserve_default=False,
),
]
| [
"[email protected]"
]
| |
811e71674e45e86b8f44ddac7576883ae69df87c | b33ad1c8560fc22a7e4ae9dec7f3c778b70941fa | /abc180/c.py | 95d7109f8ff66c38bc7f9687872ddbacdf95636a | []
| no_license | Tommy-somen/atcoder_record | 36f226ffe6465dd5f8ae4986195510d00da46ffb | 0e549a72cec3b87accefc52d5cd56420251361b9 | refs/heads/master | 2023-08-16T10:23:39.609512 | 2021-09-27T11:38:57 | 2021-09-27T11:38:57 | 410,585,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | n = int(input())
front,back = [],[]
i = 1
while i*i <= n:
if n%i == 0:
front.append(i)
if i != n//i:
back.append(n//i)
i += 1
front.extend(back[::-1])
for k in front:
print(k)
| [
"[email protected]"
]
| |
f8c09a385fefe6b5064760cdcaa4f6f4794a0e07 | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/new_20200709211421.py | be8abece3db7727532968727ef71c47945c6b598 | []
| no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 737 | py | # [3,4] [1,2,7,7,0]
# --> new set {1,2,7,7}
# 4-3 = 1
# [5,9] [1,2,6,7]
# smaller = 5
#
# ---> 3
# left = {1:4,2:5,7:10,0:3}
# ---> 4
# right = {1:5,2:6,7:11,0:4}
#
# newArr = sorted(min[[1,0],[2,1]])
# newArr =[2,6]
# if newArr[0] == 0:
#
# 2,6
# "not possible"
# 13,4 - [1,2,3,6,14]
'''
smaller = 4
answer = 4+1+2 = 7
4+1+3 = 8
4+1+6 = 11
4+1+14 = 19
answer = 13+1=14
13+2 = 15
13+3 = 16
13+6 = 19
continue
4+2+3 = 9
4+2+6 = 12
4+2+14 = 20
4+3+6 = 13
[6,2] [1,10,6,5]
2-6 = 4
# sort -->[1,5,6,10]
2 + 1 = 3
2+5 = 7
'''
# bigger = scale[0]
# smaller = scale[1]
# diff = 9
# find two values that add up to 9
# --> weight[i] + weights[i+1] == diff:
# newArr
# return "not possible"
| [
"[email protected]"
]
| |
4c4af7c1fc3dd62c467f8350dc9f109165a94352 | 82fce9aae9e855a73f4e92d750e6a8df2ef877a5 | /Lab/venv/lib/python3.8/site-packages/OpenGL/raw/GL/SGIX/shadow_ambient.py | 883eef870fb602d8947fba66023b3d22c72e48fc | []
| no_license | BartoszRudnik/GK | 1294f7708902e867dacd7da591b9f2e741bfe9e5 | 6dc09184a3af07143b9729e42a6f62f13da50128 | refs/heads/main | 2023-02-20T19:02:12.408974 | 2021-01-22T10:51:14 | 2021-01-22T10:51:14 | 307,847,589 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 468 | py | '''Autogenerated by xml_generate script, do not edit!'''
from OpenGL import platform as _p
from OpenGL.constant import Constant as _C
# Code generation uses this
# End users want this...
from OpenGL.raw.GL import _errors
_EXTENSION_NAME = 'GL_SGIX_shadow_ambient'
def _f(function):
return _p.createFunction(function, _p.PLATFORM.GL, 'GL_SGIX_shadow_ambient', error_checker=_errors._error_checker)
GL_SHADOW_AMBIENT_SGIX = _C('GL_SHADOW_AMBIENT_SGIX', 0x80BF)
| [
"[email protected]"
]
| |
ca0d87e4e07ec3f3cdd5b7c6d054ff6ef5957b87 | 325fde42058b2b82f8a4020048ff910cfdf737d7 | /src/storage-blob-preview/azext_storage_blob_preview/vendored_sdks/azure_storage_blob/v2019_12_12/aio/_blob_service_client_async.py | ab2e8a0defc727617d464dcde63e9049d06c006a | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | ebencarek/azure-cli-extensions | 46b0d18fe536fe5884b00d7ffa30f54c7d6887d1 | 42491b284e38f8853712a5af01836f83b04a1aa8 | refs/heads/master | 2023-04-12T00:28:44.828652 | 2021-03-30T22:34:13 | 2021-03-30T22:34:13 | 261,621,934 | 2 | 5 | MIT | 2020-10-09T18:21:52 | 2020-05-06T01:25:58 | Python | UTF-8 | Python | false | false | 30,285 | py | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
# pylint: disable=invalid-overridden-method
import functools
from typing import ( # pylint: disable=unused-import
Union, Optional, Any, Iterable, Dict, List,
TYPE_CHECKING
)
from azure.core.tracing.decorator import distributed_trace
from azure.core.pipeline import AsyncPipeline
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.async_paging import AsyncItemPaged
from .._shared.models import LocationMode
from .._shared.policies_async import ExponentialRetry
from .._shared.base_client_async import AsyncStorageAccountHostsMixin, AsyncTransportWrapper
from .._shared.response_handlers import return_response_headers, process_storage_error
from .._shared.parser import _to_utc_datetime
from .._shared.response_handlers import parse_to_internal_user_delegation_key
from .._generated import VERSION
from .._generated.aio import AzureBlobStorage
from .._generated.models import StorageErrorException, StorageServiceProperties, KeyInfo
from .._blob_service_client import BlobServiceClient as BlobServiceClientBase
from ._container_client_async import ContainerClient
from ._blob_client_async import BlobClient
from .._models import ContainerProperties
from .._deserialize import service_stats_deserialize, service_properties_deserialize
from .._serialize import get_api_version
from ._models import ContainerPropertiesPaged, FilteredBlobPaged
if TYPE_CHECKING:
from datetime import datetime
from azure.core.pipeline.transport import HttpTransport
from azure.core.pipeline.policies import HTTPPolicy
from .._shared.models import AccountSasPermissions, ResourceTypes, UserDelegationKey
from ._lease_async import BlobLeaseClient
from .._models import (
BlobProperties,
PublicAccess,
BlobAnalyticsLogging,
Metrics,
CorsRule,
RetentionPolicy,
StaticWebsite,
)
class BlobServiceClient(AsyncStorageAccountHostsMixin, BlobServiceClientBase):
"""A client to interact with the Blob Service at the account level.
This client provides operations to retrieve and configure the account properties
as well as list, create and delete containers within the account.
For operations relating to a specific container or blob, clients for those entities
can also be retrieved using the `get_client` functions.
:param str account_url:
The URL to the blob storage account. Any other entities included
in the URL path (e.g. container or blob) will be discarded. This URL can be optionally
authenticated with a SAS token.
:param credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be a SAS token string, an account
shared access key, or an instance of a TokenCredentials class from azure.identity.
If the URL already has a SAS token, specifying an explicit credential will take priority.
:keyword str api_version:
The Storage API version to use for requests. Default value is '2019-07-07'.
Setting to an older version may result in reduced feature compatibility.
.. versionadded:: 12.2.0
:keyword str secondary_hostname:
The hostname of the secondary endpoint.
:keyword int max_block_size: The maximum chunk size for uploading a block blob in chunks.
Defaults to 4*1024*1024, or 4MB.
:keyword int max_single_put_size: If the blob size is less than or equal max_single_put_size, then the blob will be
uploaded with only one http PUT request. If the blob size is larger than max_single_put_size,
the blob will be uploaded in chunks. Defaults to 64*1024*1024, or 64MB.
:keyword int min_large_block_upload_threshold: The minimum chunk size required to use the memory efficient
algorithm when uploading a block blob. Defaults to 4*1024*1024+1.
:keyword bool use_byte_buffer: Use a byte buffer for block blob uploads. Defaults to False.
:keyword int max_page_size: The maximum chunk size for uploading a page blob. Defaults to 4*1024*1024, or 4MB.
:keyword int max_single_get_size: The maximum size for a blob to be downloaded in a single call,
the exceeded part will be downloaded in chunks (could be parallel). Defaults to 32*1024*1024, or 32MB.
:keyword int max_chunk_get_size: The maximum chunk size used for downloading a blob. Defaults to 4*1024*1024,
or 4MB.
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_authentication_async.py
:start-after: [START create_blob_service_client]
:end-before: [END create_blob_service_client]
:language: python
:dedent: 8
:caption: Creating the BlobServiceClient with account url and credential.
.. literalinclude:: ../samples/blob_samples_authentication_async.py
:start-after: [START create_blob_service_client_oauth]
:end-before: [END create_blob_service_client_oauth]
:language: python
:dedent: 8
:caption: Creating the BlobServiceClient with Azure Identity credentials.
"""
def __init__(
self, account_url, # type: str
credential=None, # type: Optional[Any]
**kwargs # type: Any
):
# type: (...) -> None
kwargs['retry_policy'] = kwargs.get('retry_policy') or ExponentialRetry(**kwargs)
super(BlobServiceClient, self).__init__(
account_url,
credential=credential,
**kwargs)
self._client = AzureBlobStorage(url=self.url, pipeline=self._pipeline)
self._client._config.version = get_api_version(kwargs, VERSION) # pylint: disable=protected-access
self._loop = kwargs.get('loop', None)
@distributed_trace_async
async def get_user_delegation_key(self, key_start_time, # type: datetime
key_expiry_time, # type: datetime
**kwargs # type: Any
):
# type: (...) -> UserDelegationKey
"""
Obtain a user delegation key for the purpose of signing SAS tokens.
A token credential must be present on the service object for this request to succeed.
:param ~datetime.datetime key_start_time:
A DateTime value. Indicates when the key becomes valid.
:param ~datetime.datetime key_expiry_time:
A DateTime value. Indicates when the key stops being valid.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:return: The user delegation key.
:rtype: ~azure.storage.blob.UserDelegationKey
"""
key_info = KeyInfo(start=_to_utc_datetime(key_start_time), expiry=_to_utc_datetime(key_expiry_time))
timeout = kwargs.pop('timeout', None)
try:
user_delegation_key = await self._client.service.get_user_delegation_key(key_info=key_info,
timeout=timeout,
**kwargs) # type: ignore
except StorageErrorException as error:
process_storage_error(error)
return parse_to_internal_user_delegation_key(user_delegation_key) # type: ignore
@distributed_trace_async
async def get_account_information(self, **kwargs):
# type: (Any) -> Dict[str, str]
"""Gets information related to the storage account.
The information can also be retrieved if the user has a SAS to a container or blob.
The keys in the returned dictionary include 'sku_name' and 'account_kind'.
:returns: A dict of account information (SKU and account type).
:rtype: dict(str, str)
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START get_blob_service_account_info]
:end-before: [END get_blob_service_account_info]
:language: python
:dedent: 12
:caption: Getting account information for the blob service.
"""
try:
return await self._client.service.get_account_info(cls=return_response_headers, **kwargs) # type: ignore
except StorageErrorException as error:
process_storage_error(error)
@distributed_trace_async
async def get_service_stats(self, **kwargs):
# type: (Any) -> Dict[str, Any]
"""Retrieves statistics related to replication for the Blob service.
It is only available when read-access geo-redundant replication is enabled for
the storage account.
With geo-redundant replication, Azure Storage maintains your data durable
in two locations. In both locations, Azure Storage constantly maintains
multiple healthy replicas of your data. The location where you read,
create, update, or delete data is the primary storage account location.
The primary location exists in the region you choose at the time you
create an account via the Azure Management Azure classic portal, for
example, North Central US. The location to which your data is replicated
is the secondary location. The secondary location is automatically
determined based on the location of the primary; it is in a second data
center that resides in the same region as the primary location. Read-only
access is available from the secondary location, if read-access geo-redundant
replication is enabled for your storage account.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:return: The blob service stats.
:rtype: Dict[str, Any]
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START get_blob_service_stats]
:end-before: [END get_blob_service_stats]
:language: python
:dedent: 12
:caption: Getting service stats for the blob service.
"""
timeout = kwargs.pop('timeout', None)
try:
stats = await self._client.service.get_statistics( # type: ignore
timeout=timeout, use_location=LocationMode.SECONDARY, **kwargs)
return service_stats_deserialize(stats)
except StorageErrorException as error:
process_storage_error(error)
@distributed_trace_async
async def get_service_properties(self, **kwargs):
# type: (Any) -> Dict[str, Any]
"""Gets the properties of a storage account's Blob service, including
Azure Storage Analytics.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: An object containing blob service properties such as
analytics logging, hour/minute metrics, cors rules, etc.
:rtype: Dict[str, Any]
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START get_blob_service_properties]
:end-before: [END get_blob_service_properties]
:language: python
:dedent: 12
:caption: Getting service properties for the blob service.
"""
timeout = kwargs.pop('timeout', None)
try:
service_props = await self._client.service.get_properties(timeout=timeout, **kwargs)
return service_properties_deserialize(service_props)
except StorageErrorException as error:
process_storage_error(error)
@distributed_trace_async
async def set_service_properties(
self, analytics_logging=None, # type: Optional[BlobAnalyticsLogging]
hour_metrics=None, # type: Optional[Metrics]
minute_metrics=None, # type: Optional[Metrics]
cors=None, # type: Optional[List[CorsRule]]
target_version=None, # type: Optional[str]
delete_retention_policy=None, # type: Optional[RetentionPolicy]
static_website=None, # type: Optional[StaticWebsite]
**kwargs
):
# type: (...) -> None
"""Sets the properties of a storage account's Blob service, including
Azure Storage Analytics.
If an element (e.g. analytics_logging) is left as None, the
existing settings on the service for that functionality are preserved.
:param analytics_logging:
Groups the Azure Analytics Logging settings.
:type analytics_logging: ~azure.storage.blob.BlobAnalyticsLogging
:param hour_metrics:
The hour metrics settings provide a summary of request
statistics grouped by API in hourly aggregates for blobs.
:type hour_metrics: ~azure.storage.blob.Metrics
:param minute_metrics:
The minute metrics settings provide request statistics
for each minute for blobs.
:type minute_metrics: ~azure.storage.blob.Metrics
:param cors:
You can include up to five CorsRule elements in the
list. If an empty list is specified, all CORS rules will be deleted,
and CORS will be disabled for the service.
:type cors: list[~azure.storage.blob.CorsRule]
:param str target_version:
Indicates the default version to use for requests if an incoming
request's version is not specified.
:param delete_retention_policy:
The delete retention policy specifies whether to retain deleted blobs.
It also specifies the number of days and versions of blob to keep.
:type delete_retention_policy: ~azure.storage.blob.RetentionPolicy
:param static_website:
Specifies whether the static website feature is enabled,
and if yes, indicates the index document and 404 error document to use.
:type static_website: ~azure.storage.blob.StaticWebsite
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: None
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START set_blob_service_properties]
:end-before: [END set_blob_service_properties]
:language: python
:dedent: 12
:caption: Setting service properties for the blob service.
"""
props = StorageServiceProperties(
logging=analytics_logging,
hour_metrics=hour_metrics,
minute_metrics=minute_metrics,
cors=cors,
default_service_version=target_version,
delete_retention_policy=delete_retention_policy,
static_website=static_website
)
timeout = kwargs.pop('timeout', None)
try:
await self._client.service.set_properties(props, timeout=timeout, **kwargs)
except StorageErrorException as error:
process_storage_error(error)
@distributed_trace
def list_containers(
self, name_starts_with=None, # type: Optional[str]
include_metadata=False, # type: Optional[bool]
**kwargs
):
# type: (...) -> AsyncItemPaged[ContainerProperties]
"""Returns a generator to list the containers under the specified account.
The generator will lazily follow the continuation tokens returned by
the service and stop when all containers have been returned.
:param str name_starts_with:
Filters the results to return only containers whose names
begin with the specified prefix.
:param bool include_metadata:
Specifies that container metadata to be returned in the response.
The default value is `False`.
:keyword int results_per_page:
The maximum number of container names to retrieve per API
call. If the request does not specify the server will return up to 5,000 items.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: An iterable (auto-paging) of ContainerProperties.
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.ContainerProperties]
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START bsc_list_containers]
:end-before: [END bsc_list_containers]
:language: python
:dedent: 16
:caption: Listing the containers in the blob service.
"""
include = ['metadata'] if include_metadata else []
timeout = kwargs.pop('timeout', None)
results_per_page = kwargs.pop('results_per_page', None)
command = functools.partial(
self._client.service.list_containers_segment,
prefix=name_starts_with,
include=include,
timeout=timeout,
**kwargs)
return AsyncItemPaged(
command,
prefix=name_starts_with,
results_per_page=results_per_page,
page_iterator_class=ContainerPropertiesPaged
)
@distributed_trace
def find_blobs_by_tags(self, filter_expression, **kwargs):
# type: (str, **Any) -> AsyncItemPaged[FilteredBlob]
"""The Filter Blobs operation enables callers to list blobs across all
containers whose tags match a given search expression. Filter blobs
searches across all containers within a storage account but can be
scoped within the expression to a single container.
:param str filter_expression:
The expression to find blobs whose tags matches the specified condition.
eg. "\"yourtagname\"='firsttag' and \"yourtagname2\"='secondtag'"
To specify a container, eg. "@container='containerName' and \"Name\"='C'"
:keyword int results_per_page:
The max result per page when paginating.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:returns: An iterable (auto-paging) response of BlobProperties.
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.storage.blob.FilteredBlob]
"""
results_per_page = kwargs.pop('results_per_page', None)
timeout = kwargs.pop('timeout', None)
command = functools.partial(
self._client.service.filter_blobs,
where=filter_expression,
timeout=timeout,
**kwargs)
return AsyncItemPaged(
command, results_per_page=results_per_page,
page_iterator_class=FilteredBlobPaged)
@distributed_trace_async
async def create_container(
self, name, # type: str
metadata=None, # type: Optional[Dict[str, str]]
public_access=None, # type: Optional[Union[PublicAccess, str]]
**kwargs
):
# type: (...) -> ContainerClient
"""Creates a new container under the specified account.
If the container with the same name already exists, a ResourceExistsError will
be raised. This method returns a client with which to interact with the newly
created container.
:param str name: The name of the container to create.
:param metadata:
A dict with name-value pairs to associate with the
container as metadata. Example: `{'Category':'test'}`
:type metadata: dict(str, str)
:param public_access:
Possible values include: 'container', 'blob'.
:type public_access: str or ~azure.storage.blob.PublicAccess
:keyword container_encryption_scope:
Specifies the default encryption scope to set on the container and use for
all future writes.
.. versionadded:: 12.2.0
:paramtype container_encryption_scope: dict or ~azure.storage.blob.ContainerEncryptionScope
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: ~azure.storage.blob.aio.ContainerClient
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START bsc_create_container]
:end-before: [END bsc_create_container]
:language: python
:dedent: 16
:caption: Creating a container in the blob service.
"""
container = self.get_container_client(name)
timeout = kwargs.pop('timeout', None)
kwargs.setdefault('merge_span', True)
await container.create_container(
metadata=metadata, public_access=public_access, timeout=timeout, **kwargs)
return container
@distributed_trace_async
async def delete_container(
self, container, # type: Union[ContainerProperties, str]
lease=None, # type: Optional[Union[BlobLeaseClient, str]]
**kwargs
):
# type: (...) -> None
"""Marks the specified container for deletion.
The container and any blobs contained within it are later deleted during garbage collection.
If the container is not found, a ResourceNotFoundError will be raised.
:param container:
The container to delete. This can either be the name of the container,
or an instance of ContainerProperties.
:type container: str or ~azure.storage.blob.ContainerProperties
:param lease:
If specified, delete_container only succeeds if the
container's lease is active and matches this ID.
Required if the container has an active lease.
:paramtype lease: ~azure.storage.blob.aio.BlobLeaseClient or str
:keyword ~datetime.datetime if_modified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only
if the resource has been modified since the specified time.
:keyword ~datetime.datetime if_unmodified_since:
A DateTime value. Azure expects the date value passed in to be UTC.
If timezone is included, any non-UTC datetimes will be converted to UTC.
If a date is passed in without timezone info, it is assumed to be UTC.
Specify this header to perform the operation only if
the resource has not been modified since the specified date/time.
:keyword str etag:
An ETag value, or the wildcard character (*). Used to check if the resource has changed,
and act according to the condition specified by the `match_condition` parameter.
:keyword ~azure.core.MatchConditions match_condition:
The match condition to use upon the etag.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: None
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START bsc_delete_container]
:end-before: [END bsc_delete_container]
:language: python
:dedent: 16
:caption: Deleting a container in the blob service.
"""
container = self.get_container_client(container) # type: ignore
kwargs.setdefault('merge_span', True)
timeout = kwargs.pop('timeout', None)
await container.delete_container( # type: ignore
lease=lease,
timeout=timeout,
**kwargs)
@distributed_trace_async
async def _undelete_container(self, deleted_container_name, deleted_container_version, new_name=None, **kwargs):
# type: (str, str, str, **Any) -> ContainerClient
"""Restores soft-deleted container.
Operation will only be successful if used within the specified number of days
set in the delete retention policy.
.. versionadded:: 12.4.0
This operation was introduced in API version '2019-12-12'.
:param str deleted_container_name:
Specifies the name of the deleted container to restore.
:param str deleted_container_version:
Specifies the version of the deleted container to restore.
:param str new_name:
The new name for the deleted container to be restored to.
:keyword int timeout:
The timeout parameter is expressed in seconds.
:rtype: ~azure.storage.blob.aio.ContainerClient
"""
container = self.get_container_client(new_name or deleted_container_name)
try:
await container._client.container.restore(deleted_container_name=deleted_container_name, # pylint: disable = protected-access
deleted_container_version=deleted_container_version,
timeout=kwargs.pop('timeout', None), **kwargs)
return container
except StorageErrorException as error:
process_storage_error(error)
def get_container_client(self, container):
# type: (Union[ContainerProperties, str]) -> ContainerClient
"""Get a client to interact with the specified container.
The container need not already exist.
:param container:
The container. This can either be the name of the container,
or an instance of ContainerProperties.
:type container: str or ~azure.storage.blob.ContainerProperties
:returns: A ContainerClient.
:rtype: ~azure.storage.blob.aio.ContainerClient
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START bsc_get_container_client]
:end-before: [END bsc_get_container_client]
:language: python
:dedent: 12
:caption: Getting the container client to interact with a specific container.
"""
try:
container_name = container.name
except AttributeError:
container_name = container
_pipeline = AsyncPipeline(
transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access
policies=self._pipeline._impl_policies # pylint: disable = protected-access
)
return ContainerClient(
self.url, container_name=container_name,
credential=self.credential, api_version=self.api_version, _configuration=self._config,
_pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts,
require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key,
key_resolver_function=self.key_resolver_function, loop=self._loop)
def get_blob_client(
self, container, # type: Union[ContainerProperties, str]
blob, # type: Union[BlobProperties, str]
snapshot=None # type: Optional[Union[Dict[str, Any], str]]
):
# type: (...) -> BlobClient
"""Get a client to interact with the specified blob.
The blob need not already exist.
:param container:
The container that the blob is in. This can either be the name of the container,
or an instance of ContainerProperties.
:type container: str or ~azure.storage.blob.ContainerProperties
:param blob:
The blob with which to interact. This can either be the name of the blob,
or an instance of BlobProperties.
:type blob: str or ~azure.storage.blob.BlobProperties
:param snapshot:
The optional blob snapshot on which to operate. This can either be the ID of the snapshot,
or a dictionary output returned by
:func:`~azure.storage.blob.aio.BlobClient.create_snapshot()`.
:type snapshot: str or dict(str, Any)
:returns: A BlobClient.
:rtype: ~azure.storage.blob.aio.BlobClient
.. admonition:: Example:
.. literalinclude:: ../samples/blob_samples_service_async.py
:start-after: [START bsc_get_blob_client]
:end-before: [END bsc_get_blob_client]
:language: python
:dedent: 16
:caption: Getting the blob client to interact with a specific blob.
"""
try:
container_name = container.name
except AttributeError:
container_name = container
try:
blob_name = blob.name
except AttributeError:
blob_name = blob
_pipeline = AsyncPipeline(
transport=AsyncTransportWrapper(self._pipeline._transport), # pylint: disable = protected-access
policies=self._pipeline._impl_policies # pylint: disable = protected-access
)
return BlobClient( # type: ignore
self.url, container_name=container_name, blob_name=blob_name, snapshot=snapshot,
credential=self.credential, api_version=self.api_version, _configuration=self._config,
_pipeline=_pipeline, _location_mode=self._location_mode, _hosts=self._hosts,
require_encryption=self.require_encryption, key_encryption_key=self.key_encryption_key,
key_resolver_function=self.key_resolver_function, loop=self._loop)
| [
"[email protected]"
]
| |
3f6e4ee13b6391fbdcae1d4ce6f0e96b1d7d1ae6 | 70fa6468c768d4ec9b4b14fc94fa785da557f1b5 | /lib/googlecloudsdk/command_lib/meta/regen.py | 19ec509f43fa92f3587e9fbdab809d6f1a30549a | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | kylewuolle/google-cloud-sdk | d43286ef646aec053ecd7eb58566ab2075e04e76 | 75f09ebe779e99fdc3fd13b48621fe12bfaa11aa | refs/heads/master | 2020-04-20T22:10:41.774132 | 2019-01-26T09:29:26 | 2019-01-26T09:29:26 | 169,131,028 | 0 | 0 | NOASSERTION | 2019-02-04T19:04:40 | 2019-02-04T18:58:36 | Python | UTF-8 | Python | false | false | 1,013 | py | # -*- coding: utf-8 -*- #
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for the gcloud meta apis surface."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.core import exceptions
class Error(exceptions.Error):
pass
class UnknownApi(Error):
"""Raised when api is not found."""
class ConfigFileError(Error):
pass
class DiscoveryDocError(Error):
pass
| [
"[email protected]"
]
| |
644f6c6ccb4805245ea968549b49decd927c466f | fc1b227f04158c0929b1e66106e0acd842844a88 | /pipeline/processor/Mosher.py | 7222950af17cc29b9752bfe9e9e1be70b3c550af | []
| no_license | csboling/pi-rt-video | 72603a8c5bac6e2edf7e3e0b55d0b87bbd95bd30 | 274b0fb3837b04958f229e6a96934561f5da9f05 | refs/heads/master | 2021-05-09T00:09:39.798926 | 2018-04-18T04:37:16 | 2018-04-18T04:37:16 | 119,735,150 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 482 | py | from abc import abstractmethod
from itertools import islice
import numpy as np
from pipeline.processor.pure import PureFunction
class Mosher(PureFunction):
def __call__(self, frame):
raw = frame.tobytes()
count = len(raw)
moshed = b''.join(self.mosh(raw))[:count]
return np.frombuffer(
moshed, dtype=frame.dtype
).reshape(
frame.shape
)
@abstractmethod
def mosh(self, in_bytes):
pass
| [
"[email protected]"
]
| |
62e01aea21bf490cea497e6ff15c24b5f8fc9c89 | 65f9576021285bc1f9e52cc21e2d49547ba77376 | /adsp_proc/platform/build/qaic.py | 81c8d353dccce591f0fa71a9587f5df3ecbdda4d | [
"BSD-3-Clause",
"BSD-1-Clause",
"LicenseRef-scancode-unknown-license-reference",
"BSD-2-Clause-Views",
"LicenseRef-scancode-public-domain"
]
| permissive | AVCHD/qcs605_root_qcom | 183d7a16e2f9fddc9df94df9532cbce661fbf6eb | 44af08aa9a60c6ca724c8d7abf04af54d4136ccb | refs/heads/main | 2023-03-18T21:54:11.234776 | 2021-02-26T11:03:59 | 2021-02-26T11:03:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,742 | py | #===============================================================================
# Copyrigha (c) 2012 by Qualcomm Technologies, Incorporated.
# All Rights Reserved.
# QUALCOMM Proprietary/GTDR
#===============================================================================
import sys
import os
import subprocess
import string
import SCons.Action
from SCons.Script import *
excluded_images = []
excluded_images_for_header_gen = []
if os.environ.get('CHIPSET') in ['sdm670', 'sdm710', 'sm6150', 'qcs405']:
excluded_images = [
'DAL_DEVCFG_IMG',
'DEVCFG_IMG',
'devcfg_img',
# 'SENSOR_IMG',
# 'AVS_ADSP_USER',
'AVS_ADSP_ROOT',
'CORE_USER_LIBS',
]
excluded_images_for_header_gen = [
'CORE_ROOT_LIBS',
'CORE_QDSP6_SW',
'devcfg_img',
'DAL_DEVCFG_IMG',
'DEVCFG_IMG',
'PLATFORM_SLPI_ROOT',
'SSC_SLPI_USER',
'FASTRPC_SHELL_IMG',
'PLATFORM_LIBS',
'AVS_ADSP_USER',
'SENSOR_IMG',
]
else:
excluded_images = [
'DAL_DEVCFG_IMG',
'DEVCFG_IMG',
'devcfg_img',
'SENSOR_IMG',
'AVS_ADSP_ROOT',
'CORE_USER_LIBS',
]
excluded_images_for_header_gen = [
'CORE_ROOT_LIBS',
'CORE_QDSP6_SW',
'devcfg_img',
'DAL_DEVCFG_IMG',
'DEVCFG_IMG',
'PLATFORM_SLPI_ROOT',
'SSC_SLPI_USER',
'FASTRPC_SHELL_IMG',
'PLATFORM_LIBS',
'AVS_ADSP_USER',
]
excluded_images_for_skel_gen = [
'DAL_DEVCFG_IMG',
'DEVCFG_IMG',
'devcfg_img',
# 'SENSOR_IMG',
'AVS_ADSP_ROOT',
'CORE_USER_LIBS',
]
def qaic_header_gen(target, source, env, for_signature ):
qaic_exe = env.FindQaicExe()
flags = env.subst('$_CPPINCFLAGS')
target_dir = os.path.dirname(str(target[0]))
source_file = str(source[0])
env.Depends(target, qaic_exe)
return ( "%s %s -ho -o %s %s" % (qaic_exe, flags, target_dir, source_file))
def qaic_ro_gen(target, source, env, for_signature ):
qaic_exe = env.FindQaicExe()
flags = env.subst('$_CPPINCFLAGS')
target_dir = os.path.dirname(str(target[0]))
source_file = str(source[0])
env.Depends(target, qaic_exe)
return ("%s %s -ro -o %s %s" % (qaic_exe, flags, target_dir, source_file))
#find a file in cpppath array, errors out if there are multiple results
#TODO: cache the results?
def find_in_cpppath(env, fname):
paths = env['CPPPATH']
path = None
tried = []
for dname in paths:
dname = env.subst(dname)
tpath = os.path.join(dname , fname)
tried.append(tpath)
if(os.path.isfile(tpath)):
if(path != None):
env.PrintError("find_in_cpppath: found duplicate path for %s, in %s and %s, adjust your required apis" % (fname, path, tpath))
Exit(1)
else:
path = tpath
return (path,tried)
#Finds the build folder that contains the scons file
# responsible for building the input idl
def find_build_folder(fname):
path = os.path.dirname(fname)
while True:
newdir = os.path.normpath(os.path.join(path ,'build'))
if os.path.isdir(newdir):
return newdir
else:
path = os.path.join(path,os.pardir)
def exists(env):
return env.Detect('qaic')
def add_qaic_header_dep(env, deps, idlname):
idls = idlname
if type(idls) != list:
idls = [idls]
hfiles = []
for idl in idls:
idlfile,tried = env.FindInCPPPath(idl + ".idl")
if None == idlfile:
env.PrintError("qaic builder: couldn't find idl file %s.idl. tried: %s" % (idl, str(tried)))
Exit(1)
if env['IMAGE_NAME'] in excluded_images_for_header_gen:
build_folder = os.path.join(find_build_folder(idlfile), env['QC_BUILDPATH'])
else:
build_folder = os.path.join(find_build_folder(idlfile), env['SHORT_BUILDPATH'])
hfiles.append(os.path.join(build_folder , idl + '.h'))
# Add path
env.AppendUnique(CPPPATH = [os.path.normpath(build_folder)])
deplist = deps
if type(deps) != list:
deplist = [deps]
for depname in deplist:
for hfile in hfiles:
env.Depends(depname, hfile)
return deps
#adds a header builder dependency to sources
#returns the dependency(ies)
def qaic_header_from_idl(env, idlname):
if env['IMAGE_NAME'] in excluded_images:
return
idlfile,tried = env.FindInCPPPath(idlname + ".idl")
if None == idlfile:
env.PrintError("qaic builder: couldn't find idl file %s.idl. tried: %s" % (idlname, str(tried)))
Exit(1)
if env['IMAGE_NAME'] in excluded_images_for_header_gen:
hfile = os.path.join(find_build_folder(idlfile), env['QC_BUILDPATH'], idlname + ".h")
else:
hfile = os.path.join(find_build_folder(idlfile), env['SHORT_BUILDPATH'], idlname + ".h")
return env.QaicHeaderBuilder(target = hfile, source = idlfile)
def qaic_stub_from_idl(env, idlname):
if env['IMAGE_NAME'] in excluded_images:
return
idlfile,tried = env.FindInCPPPath(idlname + ".idl")
if None == idlfile:
env.PrintError("qaic builder: couldn't find idl file %s.idl. tried: %s" % (idlname, str(tried)))
Exit(1)
stub = os.path.join(find_build_folder(idlfile), env['QC_BUILDPATH'], idlname + "_stub.c")
env.QaicHeaderFromIdl(idlname)
env.QaicStubBuilder(target = stub, source = idlfile)
return env.AddQaicHeaderDep(stub, idlname)
def qaic_skel_from_idl(env, idlname):
if env['IMAGE_NAME'] in excluded_images_for_skel_gen:
return
idlfile,tried = env.FindInCPPPath(idlname + ".idl")
if None == idlfile:
env.PrintError("qaic builder: couldn't find idl file %s.idl. tried: %s" % (idlname, str(tried)))
Exit(1)
skel = os.path.join(find_build_folder(idlfile), env['QC_BUILDPATH'], idlname + "_skel.c")
env.QaicSkelBuilder(target = skel, source = idlfile)
env.QaicHeaderFromIdl(idlname)
return env.AddQaicHeaderDep(skel, idlname)
#is there a way to cache this for all the envs, or is it ok to do this per env?
def find_qaic_exe(env):
if not env.subst('$QAIC_EXE'):
env.RequirePublicApi('QAIC')
qaic_exes = ["Darwin/qaic", "Ubuntu16/qaic", "Ubuntu18/qaic", "Linux_DoNotShip/qaic", "WinNT/qaic.exe"]
qaic_exe = None
tried = []
for ename in qaic_exes:
tn,tns = env.FindInCPPPath(ename)
if tn != None:
try:
#using execmds generates a bunch of error output, it wouild be nice if that could be supressed
#data, err, rv = env.ExecCmds("%s -v" % tn)
tried.append(tn)
rv = subprocess.Popen([("%s" % tn), "-v"], stderr = subprocess.PIPE, stdout = subprocess.PIPE)
rv.wait()
if 0 == rv.returncode:
qaic_exe = tn
break;
except:
pass
if None == qaic_exe:
env.PrintError("qaic builder: couldn't find qaic exe for your host. tried: %s" % str(tried))
Exit(1)
env.Append(QAIC_EXE = qaic_exe)
return env.subst('$QAIC_EXE')
def generate(env):
#public methods
# This method adds a dependency on the header that is generated from the IDL by qaic.
# Note: This method DOES NOT generate the header. The owner of the IDL has to call one
# of QaicHeaderFromIdl, QaicStubFromIdl or QaicSkelFromIdl to generate the header.
env.AddMethod(add_qaic_header_dep, "AddQaicHeaderDep")
#This method calls qaic to generate the header from the IDL.
env.AddMethod(qaic_header_from_idl, "QaicHeaderFromIdl")
# This method calls qaic to generate the stub AND the HEADER from the IDL.
# It also adds a dependency between the stub and the generated header.
env.AddMethod(qaic_stub_from_idl, "QaicStubFromIdl")
# This method calls qaic to generate the skel AND the HEADER from the IDL.
# It also adds a dependency between the skel and the generated header.
env.AddMethod(qaic_skel_from_idl, "QaicSkelFromIdl")
#private
#do these need a _ prefix?
env.AddMethod(find_in_cpppath, "FindInCPPPath")
env.AddMethod(find_qaic_exe, "FindQaicExe")
qaic_ho = Builder(name = "QaicHeaderBuilder", generator = qaic_header_gen)
qaic_stub = Builder(name = "QaicStubBuilder", generator = qaic_ro_gen)
qaic_skel = Builder(name = "QaicSkelBuilder", generator = qaic_ro_gen)
env['BUILDERS']['QaicHeaderBuilder'] = qaic_ho
env['BUILDERS']['QaicStubBuilder'] = qaic_stub
env['BUILDERS']['QaicSkelBuilder'] = qaic_skel
| [
"[email protected]"
]
| |
582ace8306bee507b0cff36687afe334967ec8bd | 209a7a4023a9a79693ec1f6e8045646496d1ea71 | /COMP0016_2020_21_Team12-datasetsExperimentsAna/pwa/FADapp/pythonScripts/venv/Lib/site-packages/pandas/_config/config.py | d9ec1c9ac3fa05c9eb82b90b28c8ea934632b051 | [
"MIT"
]
| permissive | anzhao920/MicrosoftProject15_Invictus | 5e2347015411bbffbdf0ceb059df854661fb240c | 15f44eebb09561acbbe7b6730dfadf141e4c166d | refs/heads/main | 2023-04-16T13:24:39.332492 | 2021-04-27T00:47:13 | 2021-04-27T00:47:13 | 361,913,170 | 0 | 0 | MIT | 2021-04-26T22:41:56 | 2021-04-26T22:41:55 | null | UTF-8 | Python | false | false | 24,553 | py | """
The config module holds package-wide configurables and provides
a uniform API for working with them.
Overview
========
This module supports the following requirements:
- options are referenced using keys in dot.notation, e.g. "x.y.option - z".
- keys are case-insensitive.
- functions should accept partial/regex keys, when unambiguous.
- options can be registered by modules at import time.
- options can be registered at init-time (via core.config_init)
- options have a default value, and (optionally) a description and
validation function associated with them.
- options can be deprecated, in which case referencing them
should produce a warning.
- deprecated options can optionally be rerouted to a replacement
so that accessing a deprecated option reroutes to a differently
named option.
- options can be reset to their default value.
- all option can be reset to their default value at once.
- all options in a certain sub - namespace can be reset at once.
- the user can set / get / reset or ask for the description of an option.
- a developer can register and mark an option as deprecated.
- you can register a callback to be invoked when the option value
is set or reset. Changing the stored value is considered misuse, but
is not verboten.
Implementation
==============
- Data is stored using nested dictionaries, and should be accessed
through the provided API.
- "Registered options" and "Deprecated options" have metadata associated
with them, which are stored in auxiliary dictionaries keyed on the
fully-qualified key, e.g. "x.y.z.option".
- the config_init module is imported by the package's __init__.py file.
placing any register_option() calls there will ensure those options
are available as soon as pandas is loaded. If you use register_option
in a module, it will only be available after that module is imported,
which you should be aware of.
- `config_prefix` is a context_manager (for use with the `with` keyword)
which can save developers some typing, see the docstring.
"""
from collections import namedtuple
from contextlib import ContextDecorator, contextmanager
import re
from typing import Any, Callable, Dict, Iterable, List, Optional, Tuple, Type, cast
import warnings
from pandas._typing import F
DeprecatedOption = namedtuple("DeprecatedOption", "key msg rkey removal_ver")
RegisteredOption = namedtuple("RegisteredOption", "key defval doc validator cb")
# holds deprecated option metadata
_deprecated_options: Dict[str, DeprecatedOption] = {}
# holds registered option metadata
_registered_options: Dict[str, RegisteredOption] = {}
# holds the current values for registered options
_global_config: Dict[str, Any] = {}
# keys which have a special meaning
_reserved_keys: List[str] = ["all"]
class OptionError(AttributeError, KeyError):
"""
Exception for pandas.options, backwards compatible with KeyError
checks
"""
#
# User API
def _get_single_key(pat: str, silent: bool) -> str:
keys = _select_options(pat)
if len(keys) == 0:
if not silent:
_warn_if_deprecated(pat)
raise OptionError(f"No such keys(s): {repr(pat)}")
if len(keys) > 1:
raise OptionError("Pattern matched multiple keys")
key = keys[0]
if not silent:
_warn_if_deprecated(key)
key = _translate_key(key)
return key
def _get_option(pat: str, silent: bool = False):
key = _get_single_key(pat, silent)
# walk the nested dict
root, k = _get_root(key)
return root[k]
def _set_option(*args, **kwargs) -> None:
# must at least 1 arg deal with constraints later
nargs = len(args)
if not nargs or nargs % 2 != 0:
raise ValueError("Must provide an even number of non-keyword arguments")
# default to false
silent = kwargs.pop("silent", False)
if kwargs:
kwarg = list(kwargs.keys())[0]
raise TypeError(f'_set_option() got an unexpected keyword argument "{kwarg}"')
for k, v in zip(args[::2], args[1::2]):
key = _get_single_key(k, silent)
o = _get_registered_option(key)
if o and o.validator:
o.validator(v)
# walk the nested dict
root, k = _get_root(key)
root[k] = v
if o.cb:
if silent:
with warnings.catch_warnings(record=True):
o.cb(key)
else:
o.cb(key)
def _describe_option(pat: str = "", _print_desc: bool = True):
keys = _select_options(pat)
if len(keys) == 0:
raise OptionError("No such keys(s)")
s = "\n".join([_build_option_description(k) for k in keys])
if _print_desc:
print(s)
else:
return s
def _reset_option(pat: str, silent: bool = False) -> None:
keys = _select_options(pat)
if len(keys) == 0:
raise OptionError("No such keys(s)")
if len(keys) > 1 and len(pat) < 4 and pat != "all":
raise ValueError(
"You must specify at least 4 characters when "
"resetting multiple keys, use the special keyword "
'"all" to reset all the options to their default value'
)
for k in keys:
_set_option(k, _registered_options[k].defval, silent=silent)
def get_default_val(pat: str):
key = _get_single_key(pat, silent=True)
return _get_registered_option(key).defval
class DictWrapper:
""" provide attribute-style access to a nested dict"""
def __init__(self, d: Dict[str, Any], prefix: str = ""):
object.__setattr__(self, "d", d)
object.__setattr__(self, "prefix", prefix)
def __setattr__(self, key: str, val: Any) -> None:
prefix = object.__getattribute__(self, "prefix")
if prefix:
prefix += "."
prefix += key
# you can't set new keys
# can you can't overwrite subtrees
if key in self.d and not isinstance(self.d[key], dict):
_set_option(prefix, val)
else:
raise OptionError("You can only set the value of existing options")
def __getattr__(self, key: str):
prefix = object.__getattribute__(self, "prefix")
if prefix:
prefix += "."
prefix += key
try:
v = object.__getattribute__(self, "d")[key]
except KeyError as err:
raise OptionError("No such option") from err
if isinstance(v, dict):
return DictWrapper(v, prefix)
else:
return _get_option(prefix)
def __dir__(self) -> Iterable[str]:
return list(self.d.keys())
# For user convenience, we'd like to have the available options described
# in the docstring. For dev convenience we'd like to generate the docstrings
# dynamically instead of maintaining them by hand. To this, we use the
# class below which wraps functions inside a callable, and converts
# __doc__ into a property function. The doctsrings below are templates
# using the py2.6+ advanced formatting syntax to plug in a concise list
# of options, and option descriptions.
class CallableDynamicDoc:
def __init__(self, func, doc_tmpl):
self.__doc_tmpl__ = doc_tmpl
self.__func__ = func
def __call__(self, *args, **kwds):
return self.__func__(*args, **kwds)
@property
def __doc__(self):
opts_desc = _describe_option("all", _print_desc=False)
opts_list = pp_options_list(list(_registered_options.keys()))
return self.__doc_tmpl__.format(opts_desc=opts_desc, opts_list=opts_list)
_get_option_tmpl = """
get_option(pat)
Retrieves the value of the specified option.
Available options:
{opts_list}
Parameters
----------
pat : str
Regexp which should match a single option.
Note: partial matches are supported for convenience, but unless you use the
full option name (e.g. x.y.z.option_name), your code may break in future
versions if new options with similar names are introduced.
Returns
-------
result : the value of the option
Raises
------
OptionError : if no such option exists
Notes
-----
The available options with its descriptions:
{opts_desc}
"""
_set_option_tmpl = """
set_option(pat, value)
Sets the value of the specified option.
Available options:
{opts_list}
Parameters
----------
pat : str
Regexp which should match a single option.
Note: partial matches are supported for convenience, but unless you use the
full option name (e.g. x.y.z.option_name), your code may break in future
versions if new options with similar names are introduced.
value : object
New value of option.
Returns
-------
None
Raises
------
OptionError if no such option exists
Notes
-----
The available options with its descriptions:
{opts_desc}
"""
_describe_option_tmpl = """
describe_option(pat, _print_desc=False)
Prints the description for one or more registered options.
Call with not arguments to get a listing for all registered options.
Available options:
{opts_list}
Parameters
----------
pat : str
Regexp pattern. All matching keys will have their description displayed.
_print_desc : bool, default True
If True (default) the description(s) will be printed to stdout.
Otherwise, the description(s) will be returned as a unicode string
(for testing).
Returns
-------
None by default, the description(s) as a unicode string if _print_desc
is False
Notes
-----
The available options with its descriptions:
{opts_desc}
"""
_reset_option_tmpl = """
reset_option(pat)
Reset one or more options to their default value.
Pass "all" as argument to reset all options.
Available options:
{opts_list}
Parameters
----------
pat : str/regex
If specified only options matching `prefix*` will be reset.
Note: partial matches are supported for convenience, but unless you
use the full option name (e.g. x.y.z.option_name), your code may break
in future versions if new options with similar names are introduced.
Returns
-------
None
Notes
-----
The available options with its descriptions:
{opts_desc}
"""
# bind the functions with their docstrings into a Callable
# and use that as the functions exposed in pd.api
get_option = CallableDynamicDoc(_get_option, _get_option_tmpl)
set_option = CallableDynamicDoc(_set_option, _set_option_tmpl)
reset_option = CallableDynamicDoc(_reset_option, _reset_option_tmpl)
describe_option = CallableDynamicDoc(_describe_option, _describe_option_tmpl)
options = DictWrapper(_global_config)
#
# Functions for use by pandas developers, in addition to User - api
class option_context(ContextDecorator):
"""
Context manager to temporarily set options in the `with` statement context.
You need to invoke as ``option_context(pat, val, [(pat, val), ...])``.
Examples
--------
>>> with option_context('display.max_rows', 10, 'display.max_columns', 5):
... ...
"""
def __init__(self, *args):
if len(args) % 2 != 0 or len(args) < 2:
raise ValueError(
"Need to invoke as option_context(pat, val, [(pat, val), ...])."
)
self.ops = list(zip(args[::2], args[1::2]))
def __enter__(self):
self.undo = [(pat, _get_option(pat, silent=True)) for pat, val in self.ops]
for pat, val in self.ops:
_set_option(pat, val, silent=True)
def __exit__(self, *args):
if self.undo:
for pat, val in self.undo:
_set_option(pat, val, silent=True)
def register_option(
key: str,
defval: object,
doc: str = "",
validator: Optional[Callable[[Any], Any]] = None,
cb: Optional[Callable[[str], Any]] = None,
) -> None:
"""
Register an option in the package-wide pandas config object
Parameters
----------
key : str
Fully-qualified key, e.g. "x.y.option - z".
defval : object
Default value of the option.
doc : str
Description of the option.
validator : Callable, optional
Function of a single argument, should raise `ValueError` if
called with a value which is not a legal value for the option.
cb
a function of a single argument "key", which is called
immediately after an option value is set/reset. key is
the full name of the option.
Raises
------
ValueError if `validator` is specified and `defval` is not a valid value.
"""
import keyword
import tokenize
key = key.lower()
if key in _registered_options:
raise OptionError(f"Option '{key}' has already been registered")
if key in _reserved_keys:
raise OptionError(f"Option '{key}' is a reserved key")
# the default value should be legal
if validator:
validator(defval)
# walk the nested dict, creating dicts as needed along the path
path = key.split(".")
for k in path:
if not re.match("^" + tokenize.Name + "$", k):
raise ValueError(f"{k} is not a valid identifier")
if keyword.iskeyword(k):
raise ValueError(f"{k} is a python keyword")
cursor = _global_config
msg = "Path prefix to option '{option}' is already an option"
for i, p in enumerate(path[:-1]):
if not isinstance(cursor, dict):
raise OptionError(msg.format(option=".".join(path[:i])))
if p not in cursor:
cursor[p] = {}
cursor = cursor[p]
if not isinstance(cursor, dict):
raise OptionError(msg.format(option=".".join(path[:-1])))
cursor[path[-1]] = defval # initialize
# save the option metadata
_registered_options[key] = RegisteredOption(
key=key, defval=defval, doc=doc, validator=validator, cb=cb
)
def deprecate_option(
key: str, msg: Optional[str] = None, rkey: Optional[str] = None, removal_ver=None
) -> None:
"""
Mark option `key` as deprecated, if code attempts to access this option,
a warning will be produced, using `msg` if given, or a default message
if not.
if `rkey` is given, any access to the key will be re-routed to `rkey`.
Neither the existence of `key` nor that if `rkey` is checked. If they
do not exist, any subsequence access will fail as usual, after the
deprecation warning is given.
Parameters
----------
key : str
Name of the option to be deprecated.
must be a fully-qualified option name (e.g "x.y.z.rkey").
msg : str, optional
Warning message to output when the key is referenced.
if no message is given a default message will be emitted.
rkey : str, optional
Name of an option to reroute access to.
If specified, any referenced `key` will be
re-routed to `rkey` including set/get/reset.
rkey must be a fully-qualified option name (e.g "x.y.z.rkey").
used by the default message if no `msg` is specified.
removal_ver : optional
Specifies the version in which this option will
be removed. used by the default message if no `msg` is specified.
Raises
------
OptionError
If the specified key has already been deprecated.
"""
key = key.lower()
if key in _deprecated_options:
raise OptionError(f"Option '{key}' has already been defined as deprecated.")
_deprecated_options[key] = DeprecatedOption(key, msg, rkey, removal_ver)
#
# functions internal to the module
def _select_options(pat: str) -> List[str]:
"""
returns a list of keys matching `pat`
if pat=="all", returns all registered options
"""
# short-circuit for exact key
if pat in _registered_options:
return [pat]
# else look through all of them
keys = sorted(_registered_options.keys())
if pat == "all": # reserved key
return keys
return [k for k in keys if re.search(pat, k, re.I)]
def _get_root(key: str) -> Tuple[Dict[str, Any], str]:
path = key.split(".")
cursor = _global_config
for p in path[:-1]:
cursor = cursor[p]
return cursor, path[-1]
def _is_deprecated(key: str) -> bool:
""" Returns True if the given option has been deprecated """
key = key.lower()
return key in _deprecated_options
def _get_deprecated_option(key: str):
"""
Retrieves the metadata for a deprecated option, if `key` is deprecated.
Returns
-------
DeprecatedOption (namedtuple) if key is deprecated, None otherwise
"""
try:
d = _deprecated_options[key]
except KeyError:
return None
else:
return d
def _get_registered_option(key: str):
"""
Retrieves the option metadata if `key` is a registered option.
Returns
-------
RegisteredOption (namedtuple) if key is deprecated, None otherwise
"""
return _registered_options.get(key)
def _translate_key(key: str) -> str:
"""
if key id deprecated and a replacement key defined, will return the
replacement key, otherwise returns `key` as - is
"""
d = _get_deprecated_option(key)
if d:
return d.rkey or key
else:
return key
def _warn_if_deprecated(key: str) -> bool:
"""
Checks if `key` is a deprecated option and if so, prints a warning.
Returns
-------
bool - True if `key` is deprecated, False otherwise.
"""
d = _get_deprecated_option(key)
if d:
if d.msg:
print(d.msg)
warnings.warn(d.msg, FutureWarning)
else:
msg = f"'{key}' is deprecated"
if d.removal_ver:
msg += f" and will be removed in {d.removal_ver}"
if d.rkey:
msg += f", please use '{d.rkey}' instead."
else:
msg += ", please refrain from using it."
warnings.warn(msg, FutureWarning)
return True
return False
def _build_option_description(k: str) -> str:
""" Builds a formatted description of a registered option and prints it """
o = _get_registered_option(k)
d = _get_deprecated_option(k)
s = f"{k} "
if o.doc:
s += "\n".join(o.doc.strip().split("\n"))
else:
s += "No description available."
if o:
s += f"\n [default: {o.defval}] [currently: {_get_option(k, True)}]"
if d:
rkey = d.rkey or ""
s += "\n (Deprecated"
s += f", use `{rkey}` instead."
s += ")"
return s
def pp_options_list(keys: Iterable[str], width=80, _print: bool = False):
""" Builds a concise listing of available options, grouped by prefix """
from itertools import groupby
from textwrap import wrap
def pp(name: str, ks: Iterable[str]) -> List[str]:
pfx = "- " + name + ".[" if name else ""
ls = wrap(
", ".join(ks),
width,
initial_indent=pfx,
subsequent_indent=" ",
break_long_words=False,
)
if ls and ls[-1] and name:
ls[-1] = ls[-1] + "]"
return ls
ls: List[str] = []
singles = [x for x in sorted(keys) if x.find(".") < 0]
if singles:
ls += pp("", singles)
keys = [x for x in keys if x.find(".") >= 0]
for k, g in groupby(sorted(keys), lambda x: x[: x.rfind(".")]):
ks = [x[len(k) + 1 :] for x in list(g)]
ls += pp(k, ks)
s = "\n".join(ls)
if _print:
print(s)
else:
return s
#
# helpers
@contextmanager
def config_prefix(prefix):
"""
contextmanager for multiple invocations of API with a common prefix
supported API functions: (register / get / set )__option
Warning: This is not thread - safe, and won't work properly if you import
the API functions into your module using the "from x import y" construct.
Example
-------
import pandas._config.config as cf
with cf.config_prefix("display.font"):
cf.register_option("color", "red")
cf.register_option("size", " 5 pt")
cf.set_option(size, " 6 pt")
cf.get_option(size)
...
etc'
will register options "display.font.color", "display.font.size", set the
value of "display.font.size"... and so on.
"""
# Note: reset_option relies on set_option, and on key directly
# it does not fit in to this monkey-patching scheme
global register_option, get_option, set_option, reset_option
def wrap(func: F) -> F:
def inner(key: str, *args, **kwds):
pkey = f"{prefix}.{key}"
return func(pkey, *args, **kwds)
return cast(F, inner)
_register_option = register_option
_get_option = get_option
_set_option = set_option
set_option = wrap(set_option)
get_option = wrap(get_option)
register_option = wrap(register_option)
yield None
set_option = _set_option
get_option = _get_option
register_option = _register_option
# These factories and methods are handy for use as the validator
# arg in register_option
def is_type_factory(_type: Type[Any]) -> Callable[[Any], None]:
"""
Parameters
----------
`_type` - a type to be compared against (e.g. type(x) == `_type`)
Returns
-------
validator - a function of a single argument x , which raises
ValueError if type(x) is not equal to `_type`
"""
def inner(x) -> None:
if type(x) != _type:
raise ValueError(f"Value must have type '{_type}'")
return inner
def is_instance_factory(_type) -> Callable[[Any], None]:
"""
Parameters
----------
`_type` - the type to be checked against
Returns
-------
validator - a function of a single argument x , which raises
ValueError if x is not an instance of `_type`
"""
if isinstance(_type, (tuple, list)):
_type = tuple(_type)
type_repr = "|".join(map(str, _type))
else:
type_repr = f"'{_type}'"
def inner(x) -> None:
if not isinstance(x, _type):
raise ValueError(f"Value must be an instance of {type_repr}")
return inner
def is_one_of_factory(legal_values) -> Callable[[Any], None]:
callables = [c for c in legal_values if callable(c)]
legal_values = [c for c in legal_values if not callable(c)]
def inner(x) -> None:
if x not in legal_values:
if not any(c(x) for c in callables):
uvals = [str(lval) for lval in legal_values]
pp_values = "|".join(uvals)
msg = f"Value must be one of {pp_values}"
if len(callables):
msg += " or a callable"
raise ValueError(msg)
return inner
def is_nonnegative_int(value: Optional[int]) -> None:
"""
Verify that value is None or a positive int.
Parameters
----------
value : None or int
The `value` to be checked.
Raises
------
ValueError
When the value is not None or is a negative integer
"""
if value is None:
return
elif isinstance(value, int):
if value >= 0:
return
msg = "Value must be a nonnegative integer or None"
raise ValueError(msg)
# common type validators, for convenience
# usage: register_option(... , validator = is_int)
is_int = is_type_factory(int)
is_bool = is_type_factory(bool)
is_float = is_type_factory(float)
is_str = is_type_factory(str)
is_text = is_instance_factory((str, bytes))
def is_callable(obj) -> bool:
"""
Parameters
----------
`obj` - the object to be checked
Returns
-------
validator - returns True if object is callable
raises ValueError otherwise.
"""
if not callable(obj):
raise ValueError("Value must be a callable")
return True
| [
"[email protected]"
]
| |
97dd05a028b4a33a6d447d46642a0deaecd17029 | 60acb606318869410d7437bf6c1a16fd6762b6b4 | /app/api/img/image_info.py | 65775a862d3bc5708bec535f36d2bce928fd5b49 | [
"Apache-2.0"
]
| permissive | heraclitusj/mgek_imgbed | 8fb0c69599fab3fce06684f659dfd5c0b4c5f866 | d8a77ba1401f42237adda1b3ea8611f6464a704e | refs/heads/master | 2022-07-28T01:48:51.314094 | 2020-05-20T05:35:52 | 2020-05-20T05:35:52 | 265,461,338 | 0 | 0 | null | 2020-05-20T05:31:37 | 2020-05-20T05:31:37 | null | UTF-8 | Python | false | false | 755 | py | # -*- coding: utf-8 -*-
# @Author: Landers
# @Github: Landers1037
# @File: image_info.py
# @Date: 2020-05-14
from app.api.img import img
from app.utils import format_response
from flask import request
from app.database import database
from app import global_config
@img.route('/api/image_info')
def image_info():
# ๆ นๆฎๅพ็็id่ทๅๅพ็ไฟกๆฏ
try:
name = request.json["name"]
img = database().get(global_config.engine,'image',name)
if img:
return format_response('ok',img)
else:
return format_response('error','ๅพ็ไฟกๆฏ่ทๅๅคฑ่ดฅ')
except Exception as e:
return format_response('error', 'ๅพ็ไฟกๆฏ่ทๅๅคฑ่ดฅ')
| [
"[email protected]"
]
| |
a6f5e5981efd1440e11604556dda3322676ef081 | 4364fb1fec2ebda2cd240ddc19ef89243812c122 | /tensorflow_datasets/core/utils/version_test.py | 29f7c9e85799c708ef7350ffb36ccf426741908b | [
"Apache-2.0"
]
| permissive | undeadinu/datasets | 67ebbe6c20462ed6f58713ccd8dc1d67db89f4d9 | a6f1bce86404d534b7343fb90f0ebfd6d098c346 | refs/heads/master | 2020-04-16T03:31:37.564934 | 2019-01-11T10:12:42 | 2019-01-11T10:13:12 | 165,234,637 | 0 | 0 | Apache-2.0 | 2019-01-11T11:44:44 | 2019-01-11T11:41:26 | Python | UTF-8 | Python | false | false | 2,090 | py | # coding=utf-8
# Copyright 2018 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tensorflow_datasets.core.utils.version."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow_datasets.core.utils import version
class VersionTest(tf.test.TestCase):
def test_version(self):
"""Test the zip nested function."""
self.assertEqual(version.Version(), version.Version(0, 0, 0))
self.assertEqual(version.Version('1.3.534'), version.Version(1, 3, 534))
self.assertEqual(
version.Version(major=1, minor=3, patch=5), version.Version(1, 3, 5))
self.assertEqual(version.Version('latest'), version.Version.LATEST)
self.assertEqual(
version.Version(version.Version('1.3.5')), version.Version(1, 3, 5))
self.assertEqual(str(version.Version(10, 2, 3)), '10.2.3')
self.assertEqual(str(version.Version()), '0.0.0')
with self.assertRaisesWithPredicateMatch(ValueError, 'Format should be '):
version.Version('1.3.-534')
with self.assertRaisesWithPredicateMatch(ValueError, 'Format should be '):
version.Version('1.3')
with self.assertRaisesWithPredicateMatch(ValueError, 'Format should be '):
version.Version('1.3.')
with self.assertRaisesWithPredicateMatch(ValueError, 'Format should be '):
version.Version('1..5')
with self.assertRaisesWithPredicateMatch(ValueError, 'Format should be '):
version.Version('a.b.c')
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
]
| |
11446499dcc2fcc6f3086f797eac1521364b3182 | 0a530e71f248f0f731c6a3f28f090d1bb26b55e3 | /apps/testdatas/migrations/0045_iframebodyinputtext.py | 64f33a379d95c61ef45c53a1b937676f5cd9d202 | []
| no_license | wawj901124/fuwuqi | cca9935f7b110dfdf38c93d59602a596ecac0d58 | e388fd10cf4fdb889d6566b7a30702b7c7351750 | refs/heads/master | 2022-12-13T13:12:10.854319 | 2020-01-18T00:42:52 | 2020-01-18T00:42:52 | 209,901,486 | 0 | 0 | null | 2022-11-22T02:40:33 | 2019-09-21T00:23:51 | Python | UTF-8 | Python | false | false | 3,286 | py | # Generated by Django 2.0.5 on 2019-11-12 15:40
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('testdatas', '0044_newaddandcheck_depend_new_add_and_check_case'),
]
operations = [
migrations.CreateModel(
name='IframeBodyInputText',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('iframe_ele_find', models.CharField(blank=True, default='xpath', help_text='ๅ
็ด ๆฅๆพ้ฃๆ ผ๏ผidใnameใclass_nameใtag_nameใlink_textใpartial_link_textใcss_selectorใxpath', max_length=100, null=True, verbose_name='iframeๆฅๆพ้ฃๆ ผ')),
('iframe_ele_find_value', models.CharField(blank=True, default='', max_length=1000, null=True, verbose_name='iframeๆฅๆพ้ฃๆ ผ็็กฎๅๅผ')),
('input_ele_find', models.CharField(blank=True, default='xpath', help_text='ๅ
็ด ๆฅๆพ้ฃๆ ผ๏ผidใnameใclass_nameใtag_nameใlink_textใpartial_link_textใcss_selectorใxpath', max_length=100, null=True, verbose_name='่พๅ
ฅๆกๆฅๆพ้ฃๆ ผ')),
('input_ele_find_value', models.CharField(blank=True, default='', max_length=1000, null=True, verbose_name='่พๅ
ฅๆกๆฅๆพ้ฃๆ ผ็็กฎๅๅผ')),
('is_auto_input', models.BooleanField(default=False, verbose_name='ๆฏๅฆ่ชๅจ่พๅ
ฅ')),
('auto_input_type', models.CharField(blank=True, choices=[('1', 'ๆฐๅญ'), ('2', 'ๅญๆฏ๏ผๅฐๅ๏ผ'), ('3', 'ๅญๆฏ๏ผๅคงๅ๏ผ'), ('4', '็นๆฎ็ฌฆๅท'), ('5', 'ๆฐๅญๅๅญๆฏ๏ผๅฐๅ๏ผ'), ('6', 'ๆฐๅญๅๅญๆฏ๏ผๅคงๅ๏ผ'), ('7', 'ๅญๆฏ๏ผๅคงๅฐๅ๏ผ'), ('8', 'ๆฐๅญๅๅญๆฏ๏ผๅคงๅฐๅ๏ผ'), ('9', 'ๆฐๅญๅๅญๆฏๅ็นๆฎ็ฌฆๅท'), ('10', 'ๆฐๅญๅๅญๆฏๅ็นๆฎ็ฌฆๅทๅ็ฉบ็ฝๅญ็ฌฆ'), ('11', 'ๆฑๅญ'), ('12', 'ๆๆบๅท'), ('13', '่บซไปฝ่ฏๅท')], default='11', max_length=10, null=True, verbose_name='่ชๅจ่พๅ
ฅๅญ็ฌฆ็็ฑปๅ')),
('auto_input_long', models.CharField(blank=True, default='300', help_text='ๅญ็ฌฆ็ไธชๆฐ๏ผ่ฏทๅกซๅๆฐๅญ๏ผไพๅฆ๏ผ1ใ2ใ3', max_length=100, null=True, verbose_name='่ชๅจ่พๅ
ฅ็ๅญ็ฌฆ็ไธชๆฐ')),
('input_text', models.CharField(blank=True, default='', max_length=300, null=True, verbose_name='่พๅ
ฅๆกไธญ่ฆ่พๅ
ฅ็ๅ
ๅฎน')),
('is_with_time', models.BooleanField(default=True, verbose_name='ๆฏๅฆๅธฆๆถ้ดไธฒ')),
('is_check', models.BooleanField(default=True, verbose_name='ๆฏๅฆ่ฟ่ก้ช่ฏ')),
('add_time', models.DateTimeField(auto_now_add=True, null=True, verbose_name='ๆทปๅ ๆถ้ด')),
('update_time', models.DateTimeField(blank=True, default=datetime.datetime.now, null=True, verbose_name='ๆดๆฐๆถ้ด')),
('newaddandcheck', models.ForeignKey(blank=True, default='', null=True, on_delete=django.db.models.deletion.PROTECT, to='testdatas.NewAddAndCheck', verbose_name='ไพ่ต็ๆทปๅ ๅบๆฏ')),
],
options={
'verbose_name': 'ๅฏๆๆฌ่พๅ
ฅๆก็ธๅ
ณๅ
ๅฎน',
'verbose_name_plural': 'ๅฏๆๆฌ่พๅ
ฅๆก็ธๅ
ณๅ
ๅฎน',
},
),
]
| [
"wawj900805"
]
| wawj900805 |
380ab96a78fd2ad55b5dc0cb5388595bfce7478f | 58b05123d46ce78d35e5ef0cad88b80c5d16939c | /test/test_binary.py | 83aad9dbb98f13b696e2120dbb2d22c2984ac9ae | [
"MIT"
]
| permissive | vibhatha/CrypTen | eab2b7243f55140b440bcd0fc04276ab65e770ce | e550a35aaa0615b5744fdd735a994622c87b47ae | refs/heads/master | 2020-09-23T02:00:44.446982 | 2019-12-02T12:49:26 | 2019-12-02T12:49:26 | 225,373,781 | 0 | 0 | MIT | 2019-12-02T12:48:37 | 2019-12-02T12:48:36 | null | UTF-8 | Python | false | false | 16,452 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import itertools
import logging
import unittest
from test.multiprocess_test_case import MultiProcessTestCase, get_random_test_tensor
import crypten
import torch
from crypten.common.tensor_types import is_int_tensor
from crypten.mpc.primitives import BinarySharedTensor
class TestBinary(MultiProcessTestCase):
"""
This class tests all functions of BinarySharedTensor.
"""
benchmarks_enabled = False
def setUp(self):
super().setUp()
# We don't want the main process (rank -1) to initialize the communcator
if self.rank >= 0:
crypten.init()
def _check(self, encrypted_tensor, reference, msg, tolerance=None):
if tolerance is None:
tolerance = getattr(self, "default_tolerance", 0.05)
tensor = encrypted_tensor.get_plain_text()
# Check sizes match
self.assertTrue(tensor.size() == reference.size(), msg)
self.assertTrue(is_int_tensor(reference), "reference must be a long")
test_passed = (tensor == reference).all().item() == 1
if not test_passed:
logging.info(msg)
logging.info("Result = %s;\nreference = %s" % (tensor, reference))
self.assertTrue(test_passed, msg=msg)
def test_encrypt_decrypt(self):
"""
Tests tensor encryption and decryption for both positive
and negative values.
"""
sizes = [
(),
(1,),
(5,),
(1, 1),
(1, 5),
(5, 1),
(5, 5),
(1, 5, 5),
(5, 1, 5),
(5, 5, 1),
(5, 5, 5),
(1, 3, 32, 32),
(5, 3, 32, 32),
]
for size in sizes:
reference = get_random_test_tensor(size=size, is_float=False)
with self.benchmark(tensor_type="BinarySharedTensor") as bench:
for _ in bench.iters:
encrypted_tensor = BinarySharedTensor(reference)
self._check(encrypted_tensor, reference, "en/decryption failed")
def test_transpose(self):
sizes = [
(1,),
(5,),
(1, 1),
(1, 5),
(5, 1),
(5, 5),
(1, 5, 5),
(5, 1, 5),
(5, 5, 1),
(5, 5, 5),
(1, 3, 32, 32),
(5, 3, 32, 32),
]
for size in sizes:
tensor = get_random_test_tensor(size=size, is_float=False)
encrypted_tensor = BinarySharedTensor(tensor)
if len(size) == 2: # t() asserts dim == 2
reference = tensor.t()
with self.benchmark(niters=10) as bench:
for _ in bench.iters:
encrypted_out = encrypted_tensor.t()
self._check(encrypted_out, reference, "t() failed")
for dim0 in range(len(size)):
for dim1 in range(len(size)):
reference = tensor.transpose(dim0, dim1)
with self.benchmark(niters=10) as bench:
for _ in bench.iters:
encrypted_out = encrypted_tensor.transpose(dim0, dim1)
self._check(encrypted_out, reference, "transpose failed")
def test_XOR(self):
"""Test bitwise-XOR function on BinarySharedTensor"""
for tensor_type in [lambda x: x, BinarySharedTensor]:
tensor = get_random_test_tensor(is_float=False)
tensor2 = get_random_test_tensor(is_float=False)
reference = tensor ^ tensor2
encrypted_tensor = BinarySharedTensor(tensor)
encrypted_tensor2 = tensor_type(tensor2)
with self.benchmark(tensor_type=tensor_type.__name__) as bench:
for _ in bench.iters:
encrypted_out = encrypted_tensor ^ encrypted_tensor2
self._check(encrypted_out, reference, "%s XOR failed" % tensor_type)
def test_AND(self):
"""Test bitwise-AND function on BinarySharedTensor"""
for tensor_type in [lambda x: x, BinarySharedTensor]:
tensor = get_random_test_tensor(is_float=False)
tensor2 = get_random_test_tensor(is_float=False)
reference = tensor & tensor2
encrypted_tensor = BinarySharedTensor(tensor)
encrypted_tensor2 = tensor_type(tensor2)
with self.benchmark(tensor_type=tensor_type.__name__) as bench:
for _ in bench.iters:
encrypted_out = encrypted_tensor & encrypted_tensor2
self._check(encrypted_out, reference, "%s AND failed" % tensor_type)
def test_OR(self):
"""Test bitwise-OR function on BinarySharedTensor"""
for tensor_type in [lambda x: x, BinarySharedTensor]:
tensor = get_random_test_tensor(is_float=False)
tensor2 = get_random_test_tensor(is_float=False)
reference = tensor | tensor2
encrypted_tensor = BinarySharedTensor(tensor)
encrypted_tensor2 = tensor_type(tensor2)
with self.benchmark(tensor_type=tensor_type.__name__) as bench:
for _ in bench.iters:
encrypted_out = encrypted_tensor | encrypted_tensor2
self._check(encrypted_out, reference, "%s OR failed" % tensor_type)
def test_bitwise_broadcasting(self):
"""Tests bitwise function broadcasting"""
bitwise_ops = ["__and__", "__or__", "__xor__"]
sizes = [
(),
(1,),
(2,),
(1, 1),
(1, 2),
(2, 1),
(2, 2),
(1, 1, 1),
(1, 1, 2),
(1, 2, 1),
(2, 1, 1),
(2, 2, 2),
(1, 1, 1, 1),
(1, 1, 1, 2),
(1, 1, 2, 1),
(1, 2, 1, 1),
(2, 1, 1, 1),
(2, 2, 2, 2),
]
for tensor_type in [lambda x: x, BinarySharedTensor]:
for op in bitwise_ops:
for size1, size2 in itertools.combinations(sizes, 2):
tensor1 = get_random_test_tensor(size=size1, is_float=False)
tensor2 = get_random_test_tensor(size=size2, is_float=False)
encrypted_tensor1 = BinarySharedTensor(tensor1)
tensor2_transformed = tensor_type(tensor2)
if isinstance(tensor2_transformed, BinarySharedTensor):
tensor2_transformed_type = "private"
else:
tensor2_transformed_type = "public"
self._check(
getattr(encrypted_tensor1, op)(tensor2_transformed),
getattr(tensor1, op)(tensor2),
f"{tensor2_transformed_type} {op} broadcasting "
f"failed with sizes {size1}, {size2}",
)
def test_invert(self):
"""Test bitwise-invert function on BinarySharedTensor"""
tensor = get_random_test_tensor(is_float=False)
encrypted_tensor = BinarySharedTensor(tensor)
reference = ~tensor
with self.benchmark() as bench:
for _ in bench.iters:
encrypted_out = ~encrypted_tensor
self._check(encrypted_out, reference, "invert failed")
def test_add(self):
"""Tests add using binary shares"""
for tensor_type in [lambda x: x, BinarySharedTensor]:
tensor = get_random_test_tensor(is_float=False)
tensor2 = get_random_test_tensor(is_float=False)
reference = tensor + tensor2
encrypted_tensor = BinarySharedTensor(tensor)
encrypted_tensor2 = tensor_type(tensor2)
with self.benchmark(tensor_type=tensor_type.__name__) as bench:
for _ in bench.iters:
encrypted_out = encrypted_tensor + encrypted_tensor2
self._check(encrypted_out, reference, "%s AND failed" % tensor_type)
def test_sum(self):
"""Tests sum using binary shares"""
tensor = get_random_test_tensor(size=(5, 5, 5), is_float=False)
encrypted = BinarySharedTensor(tensor)
self._check(encrypted.sum(), tensor.sum(), "sum failed")
for dim in [0, 1, 2]:
reference = tensor.sum(dim)
with self.benchmark(type="sum", dim=dim) as bench:
for _ in bench.iters:
encrypted_out = encrypted.sum(dim)
self._check(encrypted_out, reference, "sum failed")
def test_get_set(self):
for tensor_type in [lambda x: x, BinarySharedTensor]:
for size in range(1, 5):
# Test __getitem__
tensor = get_random_test_tensor(size=(size, size), is_float=False)
reference = tensor[:, 0]
encrypted_tensor = BinarySharedTensor(tensor)
encrypted_out = encrypted_tensor[:, 0]
self._check(encrypted_out, reference, "getitem failed")
reference = tensor[0, :]
encrypted_out = encrypted_tensor[0, :]
self._check(encrypted_out, reference, "getitem failed")
# Test __setitem__
tensor2 = get_random_test_tensor(size=(size,), is_float=False)
reference = tensor.clone()
reference[:, 0] = tensor2
encrypted_out = BinarySharedTensor(tensor)
encrypted2 = tensor_type(tensor2)
encrypted_out[:, 0] = encrypted2
self._check(
encrypted_out, reference, "%s setitem failed" % type(encrypted2)
)
reference = tensor.clone()
reference[0, :] = tensor2
encrypted_out = BinarySharedTensor(tensor)
encrypted2 = tensor_type(tensor2)
encrypted_out[0, :] = encrypted2
self._check(
encrypted_out, reference, "%s setitem failed" % type(encrypted2)
)
def test_share_attr(self):
"""Tests share attribute getter and setter"""
for is_float in (True, False):
reference = get_random_test_tensor(is_float=is_float)
encrypted_tensor = BinarySharedTensor(reference)
self.assertTrue(
torch.equal(encrypted_tensor.share, encrypted_tensor.share),
"share getter failed",
)
new_share = get_random_test_tensor(is_float=False)
encrypted_tensor.share = new_share
self.assertTrue(
torch.equal(encrypted_tensor.share, new_share), "share setter failed"
)
def test_inplace(self):
"""Test inplace vs. out-of-place functions"""
for op in ["__xor__", "__and__", "__or__"]:
for tensor_type in [lambda x: x, BinarySharedTensor]:
tensor1 = get_random_test_tensor(is_float=False)
tensor2 = get_random_test_tensor(is_float=False)
reference = getattr(tensor1, op)(tensor2)
encrypted1 = BinarySharedTensor(tensor1)
encrypted2 = tensor_type(tensor2)
input_plain_id = id(encrypted1.share)
input_encrypted_id = id(encrypted1)
# Test that out-of-place functions do not modify the input
private = isinstance(encrypted2, BinarySharedTensor)
encrypted_out = getattr(encrypted1, op)(encrypted2)
self._check(
encrypted1,
tensor1,
"%s out-of-place %s modifies input"
% ("private" if private else "public", op),
)
self._check(
encrypted_out,
reference,
"%s out-of-place %s produces incorrect output"
% ("private" if private else "public", op),
)
self.assertFalse(id(encrypted_out.share) == input_plain_id)
self.assertFalse(id(encrypted_out) == input_encrypted_id)
# Test that in-place functions modify the input
inplace_op = op[:2] + "i" + op[2:]
encrypted_out = getattr(encrypted1, inplace_op)(encrypted2)
self._check(
encrypted1,
reference,
"%s in-place %s does not modify input"
% ("private" if private else "public", inplace_op),
)
self._check(
encrypted_out,
reference,
"%s in-place %s produces incorrect output"
% ("private" if private else "public", inplace_op),
)
self.assertTrue(id(encrypted_out.share) == input_plain_id)
self.assertTrue(id(encrypted_out) == input_encrypted_id)
def test_control_flow_failure(self):
"""Tests that control flow fails as expected"""
tensor = get_random_test_tensor(is_float=False)
encrypted_tensor = BinarySharedTensor(tensor)
with self.assertRaises(RuntimeError):
if encrypted_tensor:
pass
with self.assertRaises(RuntimeError):
tensor = 5 if encrypted_tensor else 0
with self.assertRaises(RuntimeError):
if False:
pass
elif encrypted_tensor:
pass
def test_src_failure(self):
"""Tests that out-of-bounds src fails as expected"""
tensor = get_random_test_tensor(is_float=True)
for src in [None, "abc", -2, self.world_size]:
with self.assertRaises(AssertionError):
BinarySharedTensor(tensor, src=src)
def test_src_match_input_data(self):
"""Tests incorrect src in BinarySharedTensor fails as expected"""
tensor = get_random_test_tensor(is_float=True)
tensor.src = 0
for testing_src in [None, "abc", -2, self.world_size]:
with self.assertRaises(AssertionError):
BinarySharedTensor(tensor, src=testing_src)
def test_where(self):
"""Tests where() conditional element selection"""
sizes = [(10,), (5, 10), (1, 5, 10)]
y_types = [lambda x: x, BinarySharedTensor]
for size, y_type in itertools.product(sizes, y_types):
tensor1 = get_random_test_tensor(size=size, is_float=False)
encrypted_tensor1 = BinarySharedTensor(tensor1)
tensor2 = get_random_test_tensor(size=size, is_float=False)
encrypted_tensor2 = y_type(tensor2)
condition_tensor = (
get_random_test_tensor(max_value=1, size=[1], is_float=False) + 1
)
condition_encrypted = BinarySharedTensor(condition_tensor)
condition_bool = condition_tensor.bool()
reference_out = tensor1 * condition_tensor + tensor2 * (
1 - condition_tensor
)
encrypted_out = encrypted_tensor1.where(condition_bool, encrypted_tensor2)
y_is_private = y_type == BinarySharedTensor
self._check(
encrypted_out,
reference_out,
f"{'private' if y_is_private else 'public'} y "
"where failed with public condition",
)
encrypted_out = encrypted_tensor1.where(
condition_encrypted, encrypted_tensor2
)
self._check(
encrypted_out,
reference_out,
f"{'private' if y_is_private else 'public'} y "
"where failed with private condition",
)
# TODO: Write the following unit tests
@unittest.skip("Test not implemented")
def test_gather_scatter(self):
pass
@unittest.skip("Test not implemented")
def test_split(self):
pass
# This code only runs when executing the file outside the test harness (e.g.
# via the buck target test_mpc_benchmark)
if __name__ == "__main__":
TestBinary.benchmarks_enabled = True
unittest.main()
| [
"[email protected]"
]
| |
9a14bed785bfa237ff5c6773094106b0f1003dfd | 0c0168a4676bce7453836a7509e7133044aa8975 | /byceps/services/board/dbmodels/last_topic_view.py | bf001ad5b4d7e1f3890dc09d3263e58d53b1050d | [
"BSD-3-Clause"
]
| permissive | byceps/byceps | 0aad3c4d974f76c6f8c3674d5539a80c9107b97a | eaee2b7fdc08c76c16ddf7f436110e0b5f1812e5 | refs/heads/main | 2023-09-01T04:03:13.365687 | 2023-09-01T03:28:18 | 2023-09-01T03:28:18 | 40,150,239 | 44 | 23 | BSD-3-Clause | 2023-05-16T18:41:32 | 2015-08-03T22:05:23 | Python | UTF-8 | Python | false | false | 1,280 | py | """
byceps.services.board.dbmodels.last_topic_view
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2014-2023 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
from datetime import datetime
from byceps.database import db
from byceps.services.board.models import TopicID
from byceps.typing import UserID
from byceps.util.instances import ReprBuilder
from .topic import DbTopic
class DbLastTopicView(db.Model):
"""The last time a user looked into specific topic."""
__tablename__ = 'board_topics_lastviews'
user_id = db.Column(db.Uuid, db.ForeignKey('users.id'), primary_key=True)
topic_id = db.Column(
db.Uuid, db.ForeignKey('board_topics.id'), primary_key=True
)
topic = db.relationship(DbTopic)
occurred_at = db.Column(db.DateTime, nullable=False)
def __init__(
self, user_id: UserID, topic_id: TopicID, occurred_at: datetime
) -> None:
self.user_id = user_id
self.topic_id = topic_id
self.occurred_at = occurred_at
def __repr__(self) -> str:
return (
ReprBuilder(self)
.add_with_lookup('user_id')
.add('topic', self.topic.title)
.add_with_lookup('occurred_at')
.build()
)
| [
"[email protected]"
]
| |
d6a6a6f115d922ccb5f1b7753846a7efdb2e2a9f | 86bfb43636b24eef7ea580544797a5f84bf555b5 | /DjangoBlog/tests.py | 47fda838a1c4eca60f51314e7e57e8f125b6da98 | [
"MIT"
]
| permissive | kongnyc/DjangoBlog | 3e83af3b5e96706055964c3554cdff1b0b80f06e | 38173bf3b47bafa82fadc9e7a95fad0c959c3dd5 | refs/heads/master | 2023-08-22T10:01:36.074884 | 2021-10-18T08:29:29 | 2021-10-18T08:29:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | #!/usr/bin/env python
# encoding: utf-8
"""
@version: ??
@author: liangliangyy
@license: MIT Licence
@contact: [email protected]
@site: https://www.lylinux.net/
@software: PyCharm
@file: tests.py
@time: 2017/10/25 ไธๅ10:16
"""
from django.test import TestCase
from DjangoBlog.utils import *
class DjangoBlogTest(TestCase):
def setUp(self):
pass
def test_utils(self):
md5 = get_sha256('test')
self.assertIsNotNone(md5)
c = CommonMarkdown.get_markdown('''
# Title1
```python
import os
```
[url](https://www.lylinux.net/)
[ddd](http://www.baidu.com)
''')
self.assertIsNotNone(c)
d = {
'd': 'key1',
'd2': 'key2'
}
data = parse_dict_to_url(d)
self.assertIsNotNone(data)
| [
"[email protected]"
]
| |
cae778a22518efc67a593ae11d6e3eae36f3c314 | 9693f521acf87faa6406f071db432a248fbaa731 | /API_CRUD/asgi.py | e8ba240642e4df756cdd613ee48fb1a816350b0c | []
| no_license | musfiqraihan/Django_API_CRUD | a8678c4ed04ada73139cbc20740e5e1471f11ef0 | 1405fc867a797580ae0d3753fc6f09914d008d3d | refs/heads/master | 2023-02-11T16:56:24.290716 | 2020-12-30T11:13:40 | 2020-12-30T11:13:40 | 325,232,539 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | """
ASGI config for API_CRUD project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'API_CRUD.settings')
application = get_asgi_application()
| [
"[email protected]"
]
| |
b7f5069628cdc9ce0511d09d0286eda67f188636 | b1bc2e54f8cd35c9abb6fc4adb35b386c12fe6b4 | /toontown/src/coghq/DistributedMintBattle.py | 01664f0a9ce6d947852b03f28be3e5495ec65230 | []
| no_license | satire6/Anesidora | da3a44e2a49b85252b87b612b435fb4970469583 | 0e7bfc1fe29fd595df0b982e40f94c30befb1ec7 | refs/heads/master | 2022-12-16T20:05:13.167119 | 2020-09-11T16:58:04 | 2020-09-11T17:02:06 | 294,751,966 | 89 | 32 | null | null | null | null | UTF-8 | Python | false | false | 2,711 | py | from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from toontown.battle.BattleBase import *
from toontown.coghq import DistributedLevelBattle
from direct.directnotify import DirectNotifyGlobal
from toontown.toon import TTEmote
from otp.avatar import Emote
from toontown.battle import SuitBattleGlobals
import random
from toontown.suit import SuitDNA
from direct.fsm import State
from direct.fsm import ClassicFSM, State
from toontown.toonbase import ToontownGlobals
class DistributedMintBattle(DistributedLevelBattle.DistributedLevelBattle):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedMintBattle')
def __init__(self, cr):
"""
cr is a ClientRepository.
"""
DistributedLevelBattle.DistributedLevelBattle.__init__(self,cr)
# Add a new reward state to the battle ClassicFSM
self.fsm.addState(State.State('MintReward',
self.enterMintReward,
self.exitMintReward,
['Resume']))
offState = self.fsm.getStateNamed('Off')
offState.addTransition('MintReward')
playMovieState = self.fsm.getStateNamed('PlayMovie')
playMovieState.addTransition('MintReward')
##### MintReward state #####
def enterMintReward(self, ts):
self.notify.debug('enterMintReward()')
self.disableCollision()
self.delayDeleteMembers()
if (self.hasLocalToon()):
NametagGlobals.setMasterArrowsOn(0)
if self.bossBattle:
messenger.send('localToonConfrontedMintBoss')
self.movie.playReward(ts, self.uniqueName('building-reward'),
self.__handleMintRewardDone)
def __handleMintRewardDone(self):
self.notify.debug('mint reward done')
if (self.hasLocalToon()):
self.d_rewardDone(base.localAvatar.doId)
self.movie.resetReward()
# Now request our local battle object enter the Resume state,
# which frees us from the battle. The distributed object may
# not enter the Resume state yet (it has to wait until all the
# toons involved have reported back up), but there's no reason
# we have to wait around for that.
self.fsm.request('Resume')
def exitMintReward(self):
self.notify.debug('exitMintReward()')
# In case we're observing and the server cuts us off
# this guarantees all final animations get started and things
# get cleaned up
self.movie.resetReward(finish=1)
self._removeMembersKeep()
NametagGlobals.setMasterArrowsOn(1)
| [
"[email protected]"
]
| |
15fa2494c7c4baa6af4b59cd81d061f6ba182f0b | 4fee81e2fa833911ea2f65ee975cf7cc1724b6a2 | /venv/Lib/site-packages/pyrogram/raw/functions/help/set_bot_updates_status.py | 5092c28d617adc3e734574de9c35af707d2a2493 | []
| no_license | devID767/Farm_Duel_Bot | 453628d2bcaba27dab01338d9368e6727781901e | aaab3c16ec6626f5196f24f741bfeecc63492116 | refs/heads/master | 2023-08-14T22:06:06.705300 | 2021-10-08T13:50:15 | 2021-10-08T13:50:15 | 360,924,958 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,468 | py | # Pyrogram - Telegram MTProto API Client Library for Python
# Copyright (C) 2017-2021 Dan <https://github.com/delivrance>
#
# This file is part of Pyrogram.
#
# Pyrogram is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pyrogram is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Pyrogram. If not, see <http://www.gnu.org/licenses/>.
from io import BytesIO
from pyrogram.raw.core.primitives import Int, Long, Int128, Int256, Bool, Bytes, String, Double, Vector
from pyrogram.raw.core import TLObject
from pyrogram import raw
from typing import List, Union, Any
# # # # # # # # # # # # # # # # # # # # # # # #
# !!! WARNING !!! #
# This is a generated file! #
# All changes made in this file will be lost! #
# # # # # # # # # # # # # # # # # # # # # # # #
class SetBotUpdatesStatus(TLObject): # type: ignore
"""Telegram API method.
Details:
- Layer: ``126``
- ID: ``0xec22cfcd``
Parameters:
pending_updates_count: ``int`` ``32-bit``
message: ``str``
Returns:
``bool``
"""
__slots__: List[str] = ["pending_updates_count", "message"]
ID = 0xec22cfcd
QUALNAME = "functions.help.SetBotUpdatesStatus"
def __init__(self, *, pending_updates_count: int, message: str) -> None:
self.pending_updates_count = pending_updates_count # int
self.message = message # string
@staticmethod
def read(data: BytesIO, *args: Any) -> "SetBotUpdatesStatus":
# No flags
pending_updates_count = Int.read(data)
message = String.read(data)
return SetBotUpdatesStatus(pending_updates_count=pending_updates_count, message=message)
def write(self) -> bytes:
data = BytesIO()
data.write(Int(self.ID, False))
# No flags
data.write(Int(self.pending_updates_count))
data.write(String(self.message))
return data.getvalue()
| [
"[email protected]"
]
| |
cfec3e3f9e181f7560fa29eacbdd7626ae414e3b | 16ac02b8f427bd622af1564f1236e4913ed63521 | /Codes/Version 1.9.1/force_sorter.py | c3a4f9e16eb12403d6897fbe06a5af0f0c70f7b5 | [
"MIT"
]
| permissive | gharib85/Brownian-dynamics-in-a-time-varying-force-field | 20660665747310e1201e8ca7d404acc15ec7a3bd | 1dce268fcc4f27e066be0ec0b511178cbc1437c5 | refs/heads/main | 2023-08-16T03:47:51.957137 | 2021-10-23T19:09:50 | 2021-10-23T19:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,583 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on March 6, 2021
@author: Mohammad Asif Zaman
- May 28, 2021
- Functionalized fluid velocity
"""
import numpy as np
import pylab as py
import matplotlib as plt
import time
from scipy import interpolate
from parameters import *
from geometry_def_sorter import *
# Module global Parameters:
# =============================================================================
# Setting fluid flow velocity
# vx_flow = 120e-6
# vy_flow = 0e-6
# vz_flow = 0e-6
def vel_lm(xin, yin, zin): # linear model of fluid velocity
Np = xin.size
v_out = np.zeros((3,Np))
v_out[0,:] = 150e-6
v_out[1,:] = 0
v_out[2,:] = 0
v_out[1,np.where( (xin > 170e-6) & (yin >= 30e-6 ) )] = 30e-6
v_out[1,np.where( (xin > 170e-6) & (yin <= -30e-6 ) )] = -30e-6
return v_out
# def fluid_vel(r_in, t):
# Np = r_in[0,:].size
# xin = r_in[0,:]
# yin = r_in[1,:]
# zin = r_in[2,:]
# v_fluid = np.zeros((3,Np))
# v_fluid[0,:] = 120e-6
# v_fluid[1,:] = 0
# v_fluid[2,:] = 0
# v_fluid[1,np.where( (xin > 170e-6) & (yin >= 30e-6 ) )] = 30e-6
# v_fluid[1,np.where( (xin > 170e-6) & (yin <= -30e-6 ) )] = -30e-6
# return v_fluid
def fluid_vel(r_in, t):
Np = r_in[0,:].size
xi = r_in[0,:]
yi = r_in[1,:]
zi = r_in[2,:]
d = 10e-6
# temporary variables
v1 = np.zeros((3,Np))
v2 = np.zeros((3,Np))
# Moving average smoothing of fluid velocity
# number of points per axis over which to average the velocity predicted by the linear velocity model
N_avg_points = 15 # must be an odd number
for m in range(int((N_avg_points-1)/2 + 1)):
v1 = v1 + vel_lm(xi+d*m,yi,zi) + vel_lm(xi-d*m,yi,zi) if m > 0 else v1 + vel_lm(xi,yi,zi)
v2 = v2 + vel_lm(xi,yi+d*m,zi) + vel_lm(xi,yi-d*m,zi) if m > 0 else v2 + vel_lm(xi,yi,zi)
v_fluid = (v1 + v2)/(2*N_avg_points)
return v_fluid
# Read force data from data file
Mdata = np.genfromtxt('Fy_XY_grid2.csv',delimiter=',',skip_header=9)
xdata = Mdata[:,0]*1e-6
ydata = Mdata[:,1]*1e-6
points = np.array( (xdata, ydata)).T
Fydata = Mdata[:,2]*1e-12
# This is function that is called from the main program
# Simplified spring force model
def force_profile(r_in, t):
# Np = r_in[0,:].size
Np, ro, tfinal, x_lim, y_lim, z_lim, xi_lim, yi_lim, zi_lim = time_pos_ax_limits()
temp = [.7,-0.25]
od_ev = np.array(int(Np/2)*[temp]).flatten()
xin = r_in[0,:]
yin = r_in[1,:]
fy = interpolate.griddata(points,Fydata,(xin,abs(yin)),method='linear',fill_value = 0)*np.sign(yin)
fz = -od_ev*.3e-12
fm = np.zeros((3,Np))
fm[1,:] = fy*od_ev
fm[2,:] = fz
return fm
# force_plot()
# Np = 1
# # # xin = [1, 4, 2, 3]
# # # xin = np.array(xin)
# # # v_temp = np.zeros(Np)
# # # v_temp[np.where(xin > 2)] = 7
# r = np.random.rand(3,Np,2)
# rin = r[:,:,0]
# t = 0
# rin[0] = 176e-6
# rin[1] = 50e-6
# vf = fluid_vel(rin,t)
# # print(rin)
# print(vf)
# # vf = fluid_vel([[170e-6,40e-6,2e-6]],t)
# # print(vf)
# # interpolation speed test
# start_time = time.time()
# print('\n\n===========================================\n')
# Np = 24
# r = np.random.rand(3,Np,2)
# rin = r[:,:,0]
# t = 0
# tt= force_profile(rin,t)
# print("Execution time = %1.2f seconds \n" % (time.time() - start_time))
# print('\n===========================================\n')
| [
"[email protected]"
]
| |
ae355ce430c65d2fdcab3685713278f7881f618e | 32dda10669e459cf37c31f426fa709001d2c75b0 | /leetcode_cn/solved/pg_926.py | f858309b00cdcbf0e1b7e35377b4344e80aca5d1 | []
| no_license | fastso/learning-python | 3300f50d06871245d0bfcbe9d201224580f70852 | d21dbd1b9f31017cdb1ed9b9ffd1e53ffe326572 | refs/heads/master | 2023-02-10T14:43:53.726247 | 2023-01-26T10:14:59 | 2023-01-26T10:14:59 | 193,454,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 215 | py | class Solution:
def minFlipsMonoIncr(self, s: str) -> int:
p = [0]
for c in s:
p.append(p[-1] + int(c))
return min(p[i] + len(s) - i - (p[-1] - p[i]) for i in range(len(p)))
| [
"[email protected]"
]
| |
01ad1462bd75d8419ef238415f086483904f0de2 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /110_concurrency_parallelism/_examples/Learn Parallel Computing in Python/004_condition_variables/stingy_spendy_cond_variable.py | 790010e66a70ddcb69293dc906896900f0e63fe9 | []
| no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 791 | py | import time
from threading import Thread, Condition
class StingySpendy:
money = 100
cv = Condition()
def stingy(self):
for i in range(1000000):
self.cv.acquire()
self.money += 10
self.cv.notify()
self.cv.release()
print("Stingy Done")
def spendy(self):
for i in range(500000):
self.cv.acquire()
while self.money < 20:
self.cv.wait()
self.money -= 20
if self.money < 0:
print("Money in bank", self.money)
self.cv.release()
print("Spendy Done")
ss = StingySpendy()
Thread(target=ss.stingy, args=()).start()
Thread(target=ss.spendy, args=()).start()
time.sleep(5)
print("Money in the end", ss.money)
| [
"[email protected]"
]
| |
0b4478a24e878754d3ce2b47225c1fcdeda4363b | 53edf6b0f4262ee76bb4e3b943394cfeafe54865 | /linear_theory/Non_linear_stuff/geopack_test.py | 8885c66e0b3c6e38a6be562edce45df4d3d234f6 | []
| no_license | Yoshi2112/hybrid | f86265a2d35cb0a402ba6ab5f718717d8eeb740c | 85f3051be9368bced41af7d73b4ede9c3e15ff16 | refs/heads/master | 2023-07-07T21:47:59.791167 | 2023-06-27T23:09:23 | 2023-06-27T23:09:23 | 82,878,960 | 0 | 1 | null | 2020-04-16T18:03:59 | 2017-02-23T03:14:49 | Python | UTF-8 | Python | false | false | 3,948 | py | # -*- coding: utf-8 -*-
"""
Created on Sun Jan 31 11:17:17 2021
@author: Yoshi
"""
import pdb
import matplotlib.pyplot as plt
import numpy as np
import geopack as gp
# Want to use geopack to give me a trace of the magnetic field alone a field
# line, and somehow convert (x, y, z) in GSM (?) to an s-value
# Would it be easier to trace out the field line first and output (x,yz) at
# standard intervals along the line, then retrieve the fields at those points
# using whatever? That seems like the easy part (if each point has MLAT, r)
def read_QIN_file(filepath):
'''
Hard-coded because I'm too lazy to learn JSON
'''
# Could potentially be hardcoded with n_lines = 1440 but this is safer
print('Reading', filepath)
n_lines = 0
with open(filepath) as f:
for line in f:
if line[0] != '#':
n_lines += 1
# Initialize arrays
year, month, day, hour, minute, second, ByIMF, BzIMF, Vsw, den_P, Pdyn, \
ByIMF_status, BzIMF_status, Vsw_status, den_P_status, Pdyn_status, \
Kp, akp3, Dst, = [np.zeros(n_lines) for _ in range(19)]
epoch = np.zeros((n_lines), dtype=str)
G = np.zeros((n_lines, 3))
G_status = np.zeros((n_lines, 3))
Bz = np.zeros((n_lines, 6))
W = np.zeros((n_lines, 6))
W_status = np.zeros((n_lines, 6))
# Pack in dict? Do later
with open(filepath) as f:
ii = 0
for line in f:
if line[0] != '#':
A = line.split()
epoch[ii] = A[0]
year[ii] = int(A[1])
month[ii] = int(A[2])
day[ii] = int(A[3])
hour[ii] = int(A[4])
minute[ii] = int(A[5])
second[ii] = int(A[6])
ByIMF[ii] = float(A[7])
BzIMF[ii] = float(A[8])
Vsw[ii] = float(A[9])
den_P[ii] = float(A[10])
Pdyn[ii] = float(A[11])
G[ii, 0] = float(A[12])
G[ii, 1] = float(A[13])
G[ii, 2] = float(A[14])
ByIMF_status[ii] = float(A[15])
BzIMF_status[ii] = float(A[16])
Vsw_status[ii] = float(A[17])
den_P_status[ii] = float(A[18])
Pdyn_status[ii] = float(A[19])
G_status[ii, 0] = float(A[20])
G_status[ii, 1] = float(A[21])
G_status[ii, 2] = float(A[22])
Kp[ii] = float(A[23])
akp3[ii] = float(A[24])
Dst[ii] = float(A[25])
Bz[ii, 0] = float(A[26]); Bz[ii, 1] = float(A[27]); Bz[ii, 2] = float(A[28])
Bz[ii, 3] = float(A[29]); Bz[ii, 4] = float(A[30]); Bz[ii, 5] = float(A[31])
W[ii, 0] = float(A[32]); W[ii, 1] = float(A[33]); W[ii, 2] = float(A[34])
W[ii, 3] = float(A[35]); W[ii, 4] = float(A[36]); W[ii, 5] = float(A[37])
W_status[ii, 0] = float(A[38]); W_status[ii, 1] = float(A[39])
W_status[ii, 2] = float(A[40]); W_status[ii, 3] = float(A[41])
W_status[ii, 4] = float(A[42]); W_status[ii, 5] = float(A[43])
ii += 1
return
if __name__ == '__main__':
FPATH = 'G://DATA//QIN_DENTON//2020//QinDenton_20200101_1min.txt'
read_QIN_file(FPATH)
L_value = 6 # GSM: (L, 0, 0) would be the equatorial point
xf, yf, zf, xn, yn, zn=gp.geopack.trace(L_value, 0.0, 0.0, -1)
xf, yf, zf, xs, ys, zs=gp.geopack.trace(L_value, 0.0, 0.0, 1)
# Check radius:
r = np.sqrt(xf ** 2 + yf ** 2 + zf ** 2)
earth = plt.Circle((0, 0), 1.0, color='k', fill=False)
# Plot field
fig, ax = plt.subplots()
ax.scatter(xn, zn)
ax.scatter(xs, zs)
ax.add_patch(earth)
ax.axis('equal')
| [
"[email protected]"
]
| |
4c9df45b6b637fcd01d083c271d5cbf9cfbc3413 | c7e028d71b5dd72eb18b72c6733e7e98a969ade6 | /src/demos/datastructures/expressioneval.py | 30185336f42de290841e528b0e3f0eb704a41207 | [
"MIT"
]
| permissive | antoniosarosi/algoritmia | da075a7ac29cc09cbb31e46b82ae0b0ea8ee992f | 22b7d61e34f54a3dee03bf9e3de7bb4dd7daa31b | refs/heads/master | 2023-01-24T06:09:37.616107 | 2020-11-19T16:34:09 | 2020-11-19T16:34:09 | 314,302,653 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,609 | py | #coding: latin1
from algoritmia.problems.traversals.treetraversals import PostorderTreeTraverser #[]dos
from algoritmia.datastructures.queues import Lifo #[pre
from algoritmia.datastructures.trees import ListOfListsTree
class ExpressionEvaluator:
def __init__(self, createLifo=lambda: Lifo()):
self.createLifo = createLifo
def tokenize(self, expression: "str") -> "Iterable<str>":
i = 0
while i < len(expression):
lexeme = []
if '0' <= expression[i] <= '9':
while i < len(expression) and '0' <= expression[i] <= '9':
lexeme.append(expression[i])
i += 1
yield int(''.join(lexeme))
elif expression[i] in '+*-/()':
yield expression[i]
i += 1
else:
i += 1
def parse(self, expression: "str") -> "ITree<str>":
S = self.createLifo()
tree = []
op = {'+': 0, '-': 0, '*': 1, '/': 1}
for token in self.tokenize(expression):
if type(token) == int:
tree.append([token])
elif token in op:
while len(S) > 0 and S.top() in op and op[token] <= op[S.top()]:
tree[-2:] = [[S.pop(), tree[-2], tree[-1]]]
S.push(token)
elif token == '(':
S.push('(')
elif token == ')':
while S.top() != '(':
tree[-2:] = [[S.pop(), tree[-2], tree[-1]]]
S.pop()
while len(S) > 0:
tree[-2:] = [[S.pop(), tree[-2], tree[-1]]]
return ListOfListsTree(tree[0]) #]pre
def evaluate(self, exp: "str") -> "int": #[dos
tree = self.parse(exp)
stack = self.createLifo()
visitor = lambda t: self.process_root(t, stack=stack)
for dummy in PostorderTreeTraverser().traverse(tree, visitor): pass
return stack.pop()
def process_root(self, tree: "ITree<str>", stack: "Lifo"):
if isinstance(tree.root, str) and tree.root in "+-*/":
a, b = stack.pop(), stack.pop()
if tree.root == '+': stack.push(b + a)
elif tree.root == '-': stack.push(b - a)
elif tree.root == '*': stack.push(b * a)
else: stack.push(b // a)
else:
stack.push(tree.root) #]dos
if __name__ == "__main__": #[tres
ee = ExpressionEvaluator()
exp = "2 - 5 + 3 * 6"
print('{} -> {}'.format(ee.parse(exp), ee.evaluate(exp))) #]tres
| [
"amarzal@localhost"
]
| amarzal@localhost |
529e7a1ad3229cb026b3acc0d8c25e1c1d835045 | 2125593138c50b1fba5e46cd4d88d6c04d0b417a | /์๊ณ ๋ฆฌ์ฆ-ํ์ด์ฐธ/๋ฐฑ์ค/code_Test/LZW ์์ถ.py | 19315db3a2344d4ff8407c1596119911f4df610c | []
| no_license | minkishome/TIL-master | 5f0e6ef61b34a2983961ccf44f7523603ccb5907 | d8edc0ff8abff3b2239a2d751eee263b722013a6 | refs/heads/master | 2023-01-21T00:43:30.165535 | 2020-08-25T14:56:18 | 2020-08-25T14:56:18 | 203,070,283 | 0 | 1 | null | 2023-01-05T01:08:10 | 2019-08-19T00:18:31 | Python | UTF-8 | Python | false | false | 895 | py | from string import ascii_uppercase
def solution(msg):
answer = []
ls_abc = [0] + list(ascii_uppercase)
idx = 0
flag = len(msg)
# print(ls_abc.index('AA'))
while idx < flag:
word = ''
for i in range(idx, flag):
word += msg[i]
idx += 1
if word in ls_abc:
continue
else: # ์์ ๊ฒฝ์ฐ
ls_abc.append(word)
idx -= 1
break
# ls_abc.append(word)
if idx == flag:
answer.append(ls_abc.index(word))
else:
answer.append(ls_abc.index(word[:-1]))
# print(word)
# print(ls_abc)
# print(answer)
# print(idx)
# answer.append(ls_abc.index(word[:-1]))
# print(answer)
# print(len(msg))
return answer
solution('KAKAO')
solution('TOBEORNOTTOBEORTOBEORNOT') | [
"[email protected]"
]
| |
41028d5c4ae422f93009422780b1acaf80ec0334 | 96d77068d6762bad6847a2c74dfd6c220a3fdd75 | /features/generate_wordvector_distances.py | d3514741c4fc634bd2fd297014510fcb474f002a | [
"MIT"
]
| permissive | SunnyMarkLiu/Kaggle_Quora_Question_Pairs_Intent | 402cc2c4eaf40d25794dd6ff0b583411865f884b | 1b6914cd08d1bb3e815aacdf0f220458f5d75f7c | refs/heads/master | 2021-05-05T12:08:51.823673 | 2017-10-05T10:42:52 | 2017-10-05T10:42:52 | 104,720,649 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,489 | py | #!/usr/local/miniconda2/bin/python
# _*_ coding: utf-8 _*_
"""
@author: MarkLiu
@time : 17-10-5 ไธๅ11:28
"""
from __future__ import absolute_import, division, print_function
import os
import sys
module_path = os.path.abspath(os.path.join('..'))
sys.path.append(module_path)
import cPickle
import numpy as np
from utils import data_utils, jobs
from sklearn.metrics.pairwise import cosine_distances, euclidean_distances
from optparse import OptionParser
from conf.configure import Configure
def generate_word_vector_map():
"""
build index mapping words in the embeddings set
to their embedding vector
"""
embeddings_index = {}
embeddings_index_path = '/d_2t/lq/kaggle/Kaggle_Quora_Question_Pairs_Intent/embeddings_index.pkl'
if os.path.exists(embeddings_index_path):
with open(embeddings_index_path, "rb") as f:
embeddings_index = cPickle.load(f)
return embeddings_index
f = open(Configure.pretrained_wordvectors)
for line in f:
values = line.split()
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
embeddings_index[word] = coefs
f.close()
with open(embeddings_index_path, "wb") as f:
cPickle.dump(embeddings_index, f, -1)
return embeddings_index
def get_wordvector(word):
embedding_vector = embeddings_index.get(word)
embedding_vector = embedding_vector if embedding_vector is not None else [0] * 300
return embedding_vector
def generate_wordvectors_features(df):
df['cleaned_question1_vcs'] = df['cleaned_question1'].map(lambda x: [get_wordvector(word) for word in str(x).split()])
df['cq1_sentence_vector'] = df['cleaned_question1_vcs'].map(lambda x: np.mean(x, axis=0) if len(x) > 0 else [0] * 300)
del df['cleaned_question1_vcs']
df['cleaned_question2_vcs'] = df['cleaned_question2'].map(lambda x: [get_wordvector(word) for word in str(x).split()])
df['cq2_sentence_vector'] = df['cleaned_question2_vcs'].map(lambda x: np.mean(x, axis=0) if len(x) > 0 else [0] * 300)
del df['cleaned_question2_vcs']
return df
def generate_wordvector_distance(df):
df['cp1_mean_vector'] = df['cq1_sentence_vector'].map(lambda x: np.mean(x, axis=0))
df['cp2_mean_vector'] = df['cq2_sentence_vector'].map(lambda x: np.mean(x, axis=0))
df['cp_wordvector_cosine_distances'] = df.apply(lambda row: cosine_distances(np.array(row['cq1_sentence_vector']).reshape(1, -1),
np.array(row['cq2_sentence_vector']).reshape(1, -1))[0][0],
axis=1)
df['cp_wordvector_euclidean_distances'] = df.apply(lambda row: euclidean_distances(np.array(row['cq1_sentence_vector']).reshape(1, -1),
np.array(row['cq2_sentence_vector']).reshape(1, -1))[0][0],
axis=1)
del df['cq1_sentence_vector']
del df['cq2_sentence_vector']
return df
parser = OptionParser()
parser.add_option(
"-d", "--base_data_dir",
dest="base_data_dir",
default="perform_stem_words",
help="""base dataset dir:
perform_stem_words,
perform_no_stem_words,
full_data_perform_stem_words,
full_data_perform_no_stem_words"""
)
options, _ = parser.parse_args()
print("========== generate word vector features ==========")
base_data_dir = options.base_data_dir
op_scope = 5
if os.path.exists(Configure.processed_train_path.format(base_data_dir, op_scope + 1)):
exit()
print("---> load datasets from scope {}".format(op_scope))
train, test = data_utils.load_dataset(base_data_dir, op_scope)
print("train: {}, test: {}".format(train.shape, test.shape))
print('---> generate word vector mapping')
embeddings_index = generate_word_vector_map()
print('---> generate wordvectors features')
train = jobs.parallelize_dataframe(train, generate_wordvectors_features)
test = jobs.parallelize_dataframe(test, generate_wordvectors_features)
print('---> generate wordvector distance features')
train = jobs.parallelize_dataframe(train, generate_wordvector_distance)
test = jobs.parallelize_dataframe(test, generate_wordvector_distance)
print("train: {}, test: {}".format(train.shape, test.shape))
print("---> save datasets")
data_utils.save_dataset(base_data_dir, train, test, op_scope + 1)
| [
"[email protected]"
]
| |
0a0065801350f7189f26c180481a975d51f3d661 | abef98cfa3fb2c4626eb8c0a77c1080992d9b11b | /c/python-interface/spam/test-spam.py | c714b78481ecb371933cc40cde3dd794cddbcf59 | []
| no_license | mikebentley15/sandbox | ff88ed9dc4b9ac37668142a319d0a8162e88e9e3 | 4f5869544de18be21f415a9d6f9b71c362307f27 | refs/heads/main | 2023-04-14T00:22:34.623441 | 2023-03-24T21:43:56 | 2023-03-24T21:43:56 | 116,987,549 | 6 | 3 | null | 2022-10-26T03:02:06 | 2018-01-10T17:14:54 | C++ | UTF-8 | Python | false | false | 947 | py | #!/usr/bin/env python3
import spam
print()
status = spam.system("ls -l | wc")
print("status: ", status)
print()
print('Expect spam.SpamError')
try:
status = spam.check_system("false")
print("status: ", status)
except spam.SpamError as ex:
print(' ', ex)
print(' ignored')
print()
s = spam.Spam("Real brand of SPAM")
s.print()
print(s)
print()
n1 = spam.Noddy1()
print(n1)
print()
n2 = spam.Noddy2(first="Mike", last="Bentley")
print(n2)
print("Name: ", n2.name())
print()
n2 = spam.Noddy2(first=2)
print(n2)
print("Name: ", n2.name())
print()
n3 = spam.Noddy3(first="Mike", last="Bentley")
print(n3)
print("Name: ", n3.name())
print()
print('Expect TypeError')
try:
spam.Noddy3(first=3)
except TypeError as ex:
print(' ', ex)
print(' ignored')
print()
n4 = spam.Noddy4(first="Mike", last="Bentley")
print(n4)
print("Name: ", n4.name())
print()
n4 = spam.Noddy4(first=2)
print(n4)
print("Name: ", n4.name())
| [
"[email protected]"
]
| |
f2795230818272b8a213a4a39e164aa72cffeb1a | 0629ada6a00ceff0dc567b965ec021278ca27a7f | /pyhon_zh_web/pageobject/Home_page.py | b0428c5fb1c75ac098c54857b75b2147cf16d303 | []
| no_license | zhaozongzhao/sqbj_web | 48310e41dc914ae82bc3fa919b63a9ffea04f22e | 7d33acbaca18b6764f80e2247ff631e46d5b0b89 | refs/heads/master | 2020-04-09T18:22:49.280922 | 2019-09-26T10:01:53 | 2019-09-26T10:01:53 | 160,510,366 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,277 | py | from selenium import webdriver
import time
from selenium.webdriver.support.ui import WebDriverWait
import traceback
from Util.ParsePageObjectRepository import *
from Util.var import *
from Util.ObjectMap import *
from selenium.webdriver.support.ui import Select
class HomePage(object):
def __init__(self,driver):
self.driver = driver
self.paser_page_object = ParsePageObjectRepository('PageObjectRepository')
self.login_iteim = self.paser_page_object.getItemSection('zh_page_home')
self.wait = WebDriverWait(self.driver,10,0.2)
#ๅฎไฝ็ฉไธๆถ่ดน
def open_charge(self):
locateType, locateExpression = self.login_iteim['page.charge'].split('>')
charge = getElement(self.driver,locateType,locateExpression)
return charge
#ๅฎไฝๅบ็กๆๅก
def open_basic(self):
locateType, locateExpression = self.login_iteim['page.basic_services'].split('>')
charge = getElement(self.driver,locateType,locateExpression)
return charge
def get_login_name(self):
locateType, locateExpression = self.login_iteim['page.personal'].split('>')
time.sleep(1)
name = getElement(self.driver,locateType,locateExpression).text
print(name)
return name
| [
"[email protected]"
]
| |
1851b1294e96a418b521d90051a6aa7ba5f128fc | efd8628adc042ae2d58fa89cc31a5c1c80aa94f6 | /data/stochastic_dataset_script/load_save_data.py | faf224b7b20e02dfae644277e27d62b61bfaff04 | []
| no_license | Xharlie/stochast_dynamic_for_video_infilling | d7e0bfaf8b71cf3f39170793e5a1a50b289aee40 | a825de4c5178f7084925817f0631ac331073866f | refs/heads/master | 2021-03-27T09:11:15.478067 | 2019-10-23T17:59:25 | 2019-10-23T17:59:25 | 110,137,739 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,896 | py | import tensorflow as tf
import numpy as np
import imageio
import cv2
def vids_2_frames(tfiles, data_path, image_size_h, image_size_w, channel):
vids=[]
actions=[]
for i in xrange(len(tfiles)):
f_name = tfiles[i]
tokens = f_name.split()
vid_path = data_path + tokens[0] + ("_uncomp.avi" if channel ==1 else "")
print "vid_path:", vid_path
try:
vid = imageio.get_reader(vid_path, "ffmpeg")
if len(tokens) < 2:
low = 1
high = vid.get_length()
else:
low = int(tokens[1])
high = np.min([int(tokens[2]), vid.get_length()])
seq = np.zeros((image_size_h, image_size_w, high - low + 1, channel), dtype=np.uint8)
for t in xrange(high - low + 1):
if channel == 1:
# w,h not h,w here!
img = cv2.cvtColor(cv2.resize(vid.get_data(t),
(image_size_w, image_size_h)), cv2.COLOR_RGB2GRAY)
else:
img = cv2.resize(vid.get_data(t),(image_size_w, image_size_h))
# print img.shape, seq.shape
if len(img.shape) == 2: seq[:, :, t, :] = img[:, :, None]
else: seq[:, :, t, :] = img[:, :, :]
print tokens[0]
if (len(tokens[0].split("_"))) > 0:
actions.append(tokens[0].split("_")[1])
vids.append(seq)
except KeyError:
print KeyError.message
continue
return vids, actions
def save_data2record(tfiles, data_path, image_size_h, image_size_w, tf_record_dir, channel):
vids, actions = vids_2_frames(tfiles, data_path, image_size_h, image_size_w, channel)
print actions
tfrecords_filename = 'tfrecords'
writer = tf.python_io.TFRecordWriter(tf_record_dir + tfrecords_filename)
for i in xrange(len(vids)):
vids_record = tf.train.Example(features=tf.train.Features(
feature={
'height': _int64_feature(image_size_h),
'width': _int64_feature(image_size_w),
'depth': _int64_feature(vids[i].shape[2]),
'channels': _int64_feature(channel),
'action': _bytes_feature(actions[i]),
'vid': _bytes_feature(vids[i].tostring())
}
))
writer.write(vids_record.SerializeToString())
print "finish writing video{} to {}".format(i, tf_record_dir + tfrecords_filename)
writer.close()
return vids
def save_data2records(tfiles, data_path, image_size_h, image_size_w, tf_record_dir, channel):
tf_size = 800
start = 0
end = 0
files=[]
while start <= len(tfiles):
end = min(start + tf_size, len(tfiles) + 1)
if end + tf_size / 4 > len(tfiles): end = len(tfiles) + 1
print "file start and end:",start,end
vids, actions = vids_2_frames(tfiles[start:end], data_path, image_size_h, image_size_w, channel)
tfrecords_filename = 'tfrecords' + str(start / tf_size)
writer = tf.python_io.TFRecordWriter(tf_record_dir + tfrecords_filename)
for i in xrange(len(vids)):
vids_record = tf.train.Example(features=tf.train.Features(
feature={
'height': _int64_feature(image_size_h),
'width': _int64_feature(image_size_w),
'depth': _int64_feature(vids[i].shape[2]),
'channels': _int64_feature(channel),
'action': _bytes_feature(actions[i]),
'vid': _bytes_feature(vids[i].tostring())
}
))
writer.write(vids_record.SerializeToString())
print "finish writing video{} to {}".format(i, tf_record_dir + tfrecords_filename)
files.append(tf_record_dir + tfrecords_filename)
writer.close()
start = end
return files
def load_records(tf_record_files, length=None):
vids = []
for i in xrange(len(tf_record_files)):
print "loading {}".format(tf_record_files[i])
record_iterator = tf.python_io.tf_record_iterator(path=tf_record_files[i])
for string_record in record_iterator:
example = tf.train.Example()
example.ParseFromString(string_record)
height = int(example.features.feature['height']
.int64_list
.value[0])
width = int(example.features.feature['width']
.int64_list
.value[0])
depth = int(example.features.feature['depth']
.int64_list
.value[0])
vid_string = (example.features.feature['vid']
.bytes_list
.value[0])
vid_raw = np.fromstring(vid_string, dtype=np.uint8)
vid = vid_raw.reshape((height, width, depth, -1))
if length is not None and vid.shape[-2] < length:
print length, vid.shape[-2]
continue
vids.append(vid)
print "finish {}".format(tf_record_files[i])
print len(vids), " videos in total"
return vids
def load_record_inbatch(file_queue):
reader = tf.TFRecordReader()
_, serialized_example = reader.read(file_queue)
features = tf.parse_single_example(serialized_example,
features={
'depth': tf.FixedLenFeature([1], tf.int64),
'vid': tf.FixedLenFeature([],tf.string),
}
)
depth = tf.cast(features['depth'], tf.int32)
return features["vid"], depth
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
| [
"[email protected]"
]
| |
ad5dc2ef13c059d6e792cddc072b9046d03f772e | e6172246d02ab175f7490e2791ef691d670c72b1 | /.history/data-comparaison_20210202103348.py | 2e42eeed44f3b2e04b3eea2508fc1273d7527ee5 | []
| no_license | PaulSouille/rendu-data-tp1 | 2711ddd7f96eb7c1cf5ca350c0b0c40f767e5d43 | 8dcb85e4f5db38420be6f12bd18efb945d570aa2 | refs/heads/master | 2023-02-23T12:57:28.089782 | 2021-02-02T12:09:29 | 2021-02-02T12:09:29 | 327,024,559 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,881 | py | import matplotlib
matplotlib.use("TkAgg")
import matplotlib.pyplot as plt
import numpy as np
import glob
from tkinter import *
from matplotlib.backends.backend_tkagg import (
FigureCanvasTkAgg, NavigationToolbar2Tk)
from matplotlib.backend_bases import key_press_handler
import pandas as pd
from pandas import ExcelWriter
from pandas import ExcelFile
array_csv = glob.glob("data/*.xlsx")
array_df = []
for file in array_csv:
df = pd.read_excel(file, skiprows = 2, nrows= 32, usecols = 'C:O',sheet_name=0)
df = df.drop(df.index[0])
df.drop(df.columns[0], axis=1, inplace=True)
array_df.append(df)
def clear_data(array):
for index,data in enumerate(array, start=1):
# Get current value of our index
current = data
# Retrieve the value of the next day
if(index < len(array) and is_float_try(current)):
if(not is_float_try(array[index+1]) or abs(array[index+1]-float(current)) > 15):
next_ = float(array[index+2])
else:
next_ = float(array[index+1])
# Check if the current index isn't a String and if so convert it to the average value of the day before/after
if(not is_float_try(current)):
average = float((previous + next_)/2)
array[index]=average
# If the absolute difference between next day and previous day is > 15ยฐC then it's considered as an error
# We check values and the average difference is 15ยฐC expect for records
if(index > 1):
if(not is_float_try(array[index-1]) and abs(array[index-1]-float(current)) > 15):
previous = float(array[index-2])
else:
previous = float(array[index-1])
if(not is_float_try(current) or abs(previous-float(current)) > 15 or abs(next_-float(current)) > 15):
average = (previous + next_)/2
print(str(index) + " : " + str(current))
print(str(previous) + " : " + str(next_))
array[index]=average
def is_float_try(str):
try:
float(str)
return True
except ValueError:
return False
def config_plot():
fig, ax = plt.subplots()
return (fig, ax)
class matplotlibSwitchGraphs:
def __init__(self, master):
self.master = master
self.frame = Frame(self.master)
self.fig, self.ax = config_plot()
self.graphIndex = 0
self.canvas = FigureCanvasTkAgg(self.fig, self.master)
self.config_window()
self.draw_graph('janvier')
self.frame.pack(expand=YES, fill=BOTH)
def config_window(self):
self.canvas.mpl_connect("key_press_event", self.on_key_press)
toolbar = NavigationToolbar2Tk(self.canvas, self.master)
toolbar.update()
self.canvas.get_tk_widget().pack(side=TOP, fill=BOTH, expand=1)
self.button = Button(self.master, text="Quit", command=self._quit)
self.button.pack(side=BOTTOM)
self.button_back = Button(self.master, text="Graphique prรฉcรฉdent", command=self.back_graph)
self.button_back.pack(side=BOTTOM)
self.button_next = Button(self.master, text="Graphique suivant", command=self.next_graph)
self.button_next.pack(side=BOTTOM)
def draw_graph(self, month):
if(month == 'annรฉe'):
df_temp = pd.DataFrame(columns = ['Tempรฉrature'])
df_temp_error = pd.DataFrame(columns = ['Tempรฉrature'])
for column in df:
for value in df[column]:
df_temp = df_temp.append({'Tempรฉrature':value},ignore_index=True)
for column in df_error:
for value in df_error[column]:
df_temp_error = df_temp_error.append({'Tempรฉrature':value},ignore_index=True)
df_temp.dropna()
df_temp_error.dropna()
self.ax.clear()
self.ax.plot(df_temp['Tempรฉrature'])
self.ax.plot(df_temp_error['Tempรฉrature'])
self.ax.set(title='Annรฉe')
self.canvas.draw()
else:
for df,index in enumerate(array_df):
clear_data(df[month])
self.ax.clear()
self.ax.plot(df[month],label=array_csv[index])
self.ax.plot(df[month],label='Jeux de donnรฉes propre')
self.ax.legend()
self.ax.set(title=month)
self.canvas.draw()
def on_key_press(event):
key_press_handler(event, self.canvas, toolbar)
def _quit(self):
self.master.quit()
def next_graph(self):
if self.graphIndex == 0:
self.draw_graph('fรฉvrier')
self.graphIndex = 1
elif self.graphIndex == 1:
self.draw_graph('mars')
self.graphIndex = 2
elif self.graphIndex == 2:
self.draw_graph('avril')
self.graphIndex = 3
elif self.graphIndex == 3:
self.draw_graph('mai')
self.graphIndex = 4
elif self.graphIndex == 4:
self.draw_graph('juin')
self.graphIndex = 5
elif self.graphIndex == 5:
self.draw_graph('juillet')
self.graphIndex = 6
elif self.graphIndex == 6:
self.draw_graph('aoรปt')
self.graphIndex = 7
elif self.graphIndex == 7:
self.draw_graph('septembre')
self.graphIndex = 8
elif self.graphIndex == 8:
self.draw_graph('octobre')
self.graphIndex = 9
elif self.graphIndex == 9:
self.draw_graph('novembre')
self.graphIndex = 10
elif self.graphIndex == 10:
self.draw_graph('dรฉcembre')
self.graphIndex = 11
elif self.graphIndex == 11:
self.draw_graph('janvier')
self.graphIndex = 12
elif self.graphIndex == 12:
self.draw_graph('annรฉe')
self.graphIndex = 0
def back_graph(self):
if self.graphIndex == 0:
self.draw_graph('dรฉcembre')
self.graphIndex = 11
elif self.graphIndex == 11:
self.draw_graph('novembre')
self.graphIndex = 10
elif self.graphIndex == 10:
self.draw_graph('octobre')
self.graphIndex = 9
elif self.graphIndex == 9:
self.draw_graph('septembre')
self.graphIndex = 8
elif self.graphIndex == 8:
self.draw_graph('aoรปt')
self.graphIndex = 7
elif self.graphIndex == 7:
self.draw_graph('juillet')
self.graphIndex = 6
elif self.graphIndex == 6:
self.draw_graph('juin')
self.graphIndex = 5
elif self.graphIndex == 5:
self.draw_graph('mai')
self.graphIndex = 4
elif self.graphIndex == 4:
self.draw_graph('avril')
self.graphIndex = 3
elif self.graphIndex == 3:
self.draw_graph('mars')
self.graphIndex = 2
elif self.graphIndex == 2:
self.draw_graph('fรฉvrier')
self.graphIndex = 1
elif self.graphIndex == 1:
self.draw_graph('janvier')
self.graphIndex = 12
elif self.graphIndex == 12:
self.draw_graph('annรฉe')
self.graphIndex = 0
def main():
root = Tk()
matplotlibSwitchGraphs(root)
root.mainloop()
def show_graph():
main()
fenetre = Tk()
fenetre.title("Data tp 1")
fenetre.geometry('800x800')
canvas=Canvas(fenetre,bg='#FFFFFF',width=800,height=800,scrollregion=(0,0,1500,1500))
hbar=Scrollbar(fenetre,orient=HORIZONTAL)
hbar.pack(side=BOTTOM,fill=X)
hbar.config(command=canvas.xview)
vbar=Scrollbar(fenetre,orient=VERTICAL)
vbar.pack(side=RIGHT,fill=Y)
vbar.config(command=canvas.yview)
canvas.config(width=800,height=800)
canvas.config(xscrollcommand=hbar.set, yscrollcommand=vbar.set)
canvas.pack(side=LEFT,expand=True,fill=BOTH)
canvas.create_text(100,20,fill="black",font="Times 15 bold",
text="Jeux de donnรฉes base")
for index,df in enumerate(array_df):
canvas.create_text(400+(index*300),20,fill="black",font="Times 15 bold",
text=array_csv[index])
for index_, column in enumerate(df):
canvas.create_text(400,50+index_*40,fill="black",font="Times 15 bold",
text="Mois : "+str(column))
b = Button(fenetre, text="Afficher les graphiques", width=10, command=show_graph,background="white",foreground="black",activebackground="white",activeforeground="black")
b.place(x=600, y=20, anchor="nw", width=150, height=30)
fenetre.mainloop() | [
"[email protected]"
]
| |
28d5878f4c2a1cf00f7f31d34b8e4562a84581ec | 347f0db8fa1d2d589fd20bffd52f1a1b2030aa0b | /docs/conf.py | d9d82c156cbb01277f482730530c9b5b822443f0 | [
"MIT"
]
| permissive | szymanskirafal/tryread | 45879f8f4867851dc5f8697e1dc98bf68c1a58e4 | f7e395c318e33de84992c79eaa5844028c378a2d | refs/heads/master | 2021-07-21T11:50:51.702422 | 2018-10-05T13:37:34 | 2018-10-05T13:37:34 | 134,755,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,904 | py | # Tryread documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
# sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ["_templates"]
# The suffix of source filenames.
source_suffix = ".rst"
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = "index"
# General information about the project.
project = "Tryread"
copyright = """2018, Rafal Szymanski"""
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = "0.1"
# The full version, including alpha/beta/rc tags.
release = "0.1"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
# language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
# today = ''
# Else, today_fmt is used as the format for a strftime call.
# today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ["_build"]
# The reST default role (used for this markup: `text`) to use for all documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = "sphinx"
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = "default"
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
# html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = "tryreaddoc"
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
(
"index",
"tryread.tex",
"Tryread Documentation",
"""Rafal Szymanski""",
"manual",
)
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(
"index",
"tryread",
"Tryread Documentation",
["""Rafal Szymanski"""],
1,
)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(
"index",
"tryread",
"Tryread Documentation",
"""Rafal Szymanski""",
"Tryread",
"""Paas for readers and writers.""",
"Miscellaneous",
)
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
| [
"[email protected]"
]
| |
263512d0be279d0f6bd225b8a3da8f787fa2f742 | 57ac8a6a7b61d2d740f877d29cab5f208ba74bfc | /thinc/neural/ids2vecs.py | d617e567bac12e3aeb506b92dcd53f8e081b1007 | [
"MIT"
]
| permissive | kootenpv/thinc | 0a75509733aa5afb1b59115a13a78aa6d3a88c24 | 6de95eff2079a4fb880e9de6d756be9aa59db480 | refs/heads/master | 2021-01-11T20:52:03.566727 | 2017-01-16T17:29:15 | 2017-01-16T17:29:15 | 79,200,645 | 0 | 0 | null | 2017-01-17T07:25:58 | 2017-01-17T07:25:58 | null | UTF-8 | Python | false | false | 55 | py | from ._classes.window_encode import MaxoutWindowEncode
| [
"[email protected]"
]
| |
4d33d5ef634b5714a020bc5477a6c0a495e1ad47 | fa3d19403750d300a4228fa0fc414c88c49a6d35 | /bin/pfurl | 37f78b8d6bb0f06d9b056bd75cde10fb8222b4ab | [
"MIT"
]
| permissive | priyakapadia/pfurl | 2c700217d0f2c623e5fddde62c96f10502eebb49 | 3392347b5874fc4af7f1f3798e692bd0e3733351 | refs/heads/master | 2021-01-20T02:24:47.745713 | 2017-04-21T19:36:07 | 2017-04-21T19:36:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,075 | #!/usr/bin/env python3.5
#
# (c) 2017 Fetal-Neonatal Neuroimaging & Developmental Science Center
# Boston Children's Hospital
#
# http://childrenshospital.org/FNNDSC/
# [email protected]
#
import sys, os
sys.path.insert(1, os.path.join(os.path.dirname(__file__), '..'))
import socket
import json
import sys
import pfurl
from argparse import RawTextHelpFormatter
from argparse import ArgumentParser
from pfurl._colors import Colors
str_defIP = [l for l in ([ip for ip in socket.gethostbyname_ex(socket.gethostname())[2] if not ip.startswith("127.")][:1], [[(s.connect(('8.8.8.8', 53)), s.getsockname()[0], s.close()) for s in [socket.socket(socket.AF_INET, socket.SOCK_DGRAM)]][0][1]]) if l][0][0]
str_defPort = '5055'
str_version = "1.1.1"
str_desc = Colors.CYAN + """
__ _
/ _| | |
_ __ | |_ _ _ _ __| |
| '_ \| _| | | | '__| |
| |_) | | | |_| | | | |
| .__/|_| \__,_|_| |_|
| |
|_|
Process-File-over-URL
A simple URL-based communication and control script.
-- version """ + \
Colors.YELLOW + str_version + Colors.CYAN + """ --
'pfurl' sends REST conforming commands and data to remote services, similar
in some ways to the well-known CLI tool, 'curl' or the Python tool, 'httpie'
'pfurl' not only sends curl type payloads, but can also zip and unzip entire
directories of files for transmission and reception.
'pfurl' is designed to be part of the ChRIS/CHIPS framework.
""" + \
Colors.BLINK_RED + """
+---------------------------------------------------------+
| NOTE THAT 'pfurl' COMMS ARE NOT NATIVELY ENCRYPTED! |
| USE AN SSH TUNNEL IF YOU NEED SECURE DATA TRANSMISSION. |
+---------------------------------------------------------+
""" + Colors.NO_COLOUR
parser = ArgumentParser(description = str_desc, formatter_class = RawTextHelpFormatter)
parser.add_argument(
'--msg',
action = 'store',
dest = 'msg',
default = '',
help = 'Message to send to pman or similar listener.'
)
parser.add_argument(
'--verb',
action = 'store',
dest = 'verb',
default = 'POST',
help = 'REST verb.'
)
parser.add_argument(
'--http',
action = 'store',
dest = 'http',
default = '%s:%s' % (str_defIP, str_defPort),
help = 'HTTP string: <IP>[:<port>]</some/path/>'
)
parser.add_argument(
'--auth',
action = 'store',
dest = 'auth',
default = '',
help = 'user:passwd authorization'
)
parser.add_argument(
'--jsonwrapper',
action = 'store',
dest = 'jsonwrapper',
default = '',
help = 'wrap msg in optional field'
)
parser.add_argument(
'--quiet',
help = 'if specified, only echo final JSON output returned from server',
dest = 'b_quiet',
action = 'store_true',
default = False
)
parser.add_argument(
'--raw',
help = 'if specified, do not wrap return data from remote call in json field',
dest = 'b_raw',
action = 'store_true',
default = False
)
parser.add_argument(
'--oneShot',
help = 'if specified, transmit a shutdown ctl to the remote service after event',
dest = 'b_oneShot',
action = 'store_true',
default = False
)
parser.add_argument(
'--man',
help = 'request help: --man commands',
dest = 'man',
action = 'store',
default = ''
)
parser.add_argument(
'--content-type',
help = 'content type',
dest = 'contentType',
action = 'store',
default = ''
)
parser.add_argument(
'--jsonpprintindent',
help = 'pretty print json-formatted payloads',
dest = 'jsonpprintindent',
action = 'store',
default = 0
)
parser.add_argument(
'--httpResponse',
help = 'if specified, return HTTP responses',
dest = 'b_httpResponse',
action = 'store_true',
default = False
)
parser.add_argument(
'--version',
help = 'if specified, print version number',
dest = 'b_version',
action = 'store_true',
default = False
)
args = parser.parse_args()
if args.b_version:
print("Version: %s" % str_version)
sys.exit(1)
pfurl = pfurl.Pfurl(
msg = args.msg,
http = args.http,
verb = args.verb,
contentType = args.contentType,
auth = args.auth,
b_raw = args.b_raw,
b_quiet = args.b_quiet,
b_oneShot = args.b_oneShot,
b_httpResponse = args.b_httpResponse,
jsonwrapper = args.jsonwrapper,
man = args.man,
startFromCLI = True,
desc = str_desc
)
if not args.jsonpprintindent:
print(pfurl())
else:
print(json.dumps(json.loads(pfurl()), indent=int(args.jsonpprintindent)))
sys.exit(0)
| [
"[email protected]"
]
| ||
7638f5b06086fd4352f99f4e28e29719f59c1a59 | 3b786d3854e830a4b46ee55851ca186becbfa650 | /SystemTesting/pylib/nsx/vsm/edge/edge_firewall_schema/edge_firewall_rule_application_schema.py | e6b475f52d5dcc3b1981baeeef177e822df242aa | []
| no_license | Cloudxtreme/MyProject | d81f8d38684333c22084b88141b712c78b140777 | 5b55817c050b637e2747084290f6206d2e622938 | refs/heads/master | 2021-05-31T10:26:42.951835 | 2015-12-10T09:57:04 | 2015-12-10T09:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | import base_schema
from edge_firewall_rule_application_service_schema import FirewallRuleApplicationServiceSchema
class FirewallRuleApplicationSchema(base_schema.BaseSchema):
_schema_name = "application"
def __init__(self, py_dict=None):
""" Constructor to create FirewallRuleApplicationSchema object
@param py_dict : python dictionary to construct this object
"""
super(FirewallRuleApplicationSchema, self).__init__()
self.set_data_type('xml')
self.applicationId = None
self.service = FirewallRuleApplicationServiceSchema()
if py_dict is not None:
self.get_object_from_py_dict(py_dict) | [
"[email protected]"
]
| |
02519f538cf828be80054c0bca5864505200a4ce | 9e0c8ee707cba341ccfd5007cc94ab122687ac4c | /isho/apps.py | 9983d2dc520fb2b9938c45e5508ee6897e50a61b | []
| no_license | rakib06/POS | a9eb467a4528c6d6956acf42c73314626b858418 | e1390c3353280fb9cbabf4ed957d92a4f0852b3f | refs/heads/master | 2020-12-21T11:10:33.888058 | 2020-01-28T13:11:12 | 2020-01-28T13:11:12 | 236,414,785 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | from django.apps import AppConfig
class IshoConfig(AppConfig):
name = 'isho'
| [
"[email protected]"
]
| |
7108aea2c66162999af1e28d09bad1a12b17d561 | 6a403251befa23a48229831baf58af247db7b855 | /src/utils.py | bce4a71b16ecbc75c38d215f0361be0c32f2568f | []
| no_license | huasmc/RemindMeBot | 0003fde7f4be24beae76659082619e0a38f2d48b | ad3d5749935a71973be88985059ad7f94f73ad6e | refs/heads/master | 2023-03-01T19:46:42.901881 | 2021-02-09T03:49:29 | 2021-02-09T03:49:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,214 | py | import re
import discord_logging
import dateparser
from dateutil.relativedelta import relativedelta
from dateparser.search import search_dates
import parsedatetime
import pytz
from datetime import datetime
from datetime import timedelta
import urllib.parse
import urllib3
import prawcore
import requests
import counters
import static
log = discord_logging.get_logger()
debug_time = None
cal = parsedatetime.Calendar()
def process_error(message, exception, traceback):
is_transient = \
isinstance(exception, prawcore.exceptions.ServerError) or \
isinstance(exception, prawcore.exceptions.ResponseException) or \
isinstance(exception, prawcore.exceptions.RequestException) or \
isinstance(exception, requests.exceptions.Timeout) or \
isinstance(exception, requests.exceptions.ReadTimeout) or \
isinstance(exception, requests.exceptions.RequestException) or \
isinstance(exception, urllib3.exceptions.ReadTimeoutError)
log.warning(f"{message}: {type(exception).__name__} : {exception}")
if is_transient:
log.info(traceback)
counters.errors.labels(type='api').inc()
else:
log.warning(traceback)
counters.errors.labels(type='other').inc()
return is_transient
def find_reminder_message(body, trigger):
line_match = re.search(
r'(?:{trigger}.+)(?:(?:\[)([^\]]+?)(?:\])|(?:\")([^\"]+?)(?:\")|(?:โ)([^โ]*?)(?:โ))(?:[^(]|\n|$)'.format(
trigger=trigger),
body,
flags=re.IGNORECASE)
if line_match:
return line_match.group(1) or line_match.group(2) or line_match.group(3)
match = re.search(
r'(?:(?:\[)([^\]]+?)(?:\])|(?:\")([^\"]+?)(?:\")|(?:โ)([^โ]*?)(?:โ))(?:[^(]|\n|$)',
body,
flags=re.IGNORECASE)
if match:
return match.group(1) or match.group(2) or match.group(3)
else:
return None
def find_reminder_time(body, trigger):
regex_string = r'(?:{trigger}.? +)(.*?)(?:\[|\n|\"|โ|$)'.format(trigger=trigger)
times = re.findall(regex_string, body, flags=re.IGNORECASE)
if len(times) > 0 and times[0] != "":
return times[0][:80]
regex_string = r'(?:{trigger}.? *)(.*?)(?:\[|\n|\"|โ|$)'.format(trigger=trigger)
times = re.findall(regex_string, body, flags=re.IGNORECASE)
if len(times) > 0 and times[0] != "":
return times[0][:80]
else:
return None
def parse_time(time_string, base_time, timezone_string):
base_time = datetime_as_timezone(base_time, timezone_string)
try:
date_time = dateparser.parse(
time_string,
languages=['en'],
settings={"PREFER_DATES_FROM": 'future', "RELATIVE_BASE": base_time.replace(tzinfo=None)})
except Exception:
date_time = None
if date_time is None:
try:
results = search_dates(
time_string,
languages=['en'],
settings={"PREFER_DATES_FROM": 'future', "RELATIVE_BASE": base_time.replace(tzinfo=None)})
if results is not None:
temp_time = results[0][1]
if temp_time.tzinfo is None:
temp_time = datetime_force_utc(temp_time)
if temp_time > base_time:
date_time = results[0][1]
else:
date_time = None
except Exception:
date_time = None
if date_time is None:
try:
date_time, result_code = cal.parseDT(time_string, base_time)
if result_code == 0:
date_time = None
except Exception:
date_time = None
if date_time is None:
return None
if date_time.tzinfo is None:
if timezone_string is not None:
date_time = pytz.timezone(timezone_string).localize(date_time)
else:
date_time = datetime_force_utc(date_time)
date_time = datetime_as_utc(date_time)
return date_time
def render_time(date_time, user=None, format_string=None):
timezone = user.timezone if user is not None else None
time_format = user.time_format if user is not None else None
if format_string is None:
if time_format == "12":
format_string = "%Y-%m-%d %I:%M:%S %p %Z"
else:
format_string = "%Y-%m-%d %H:%M:%S %Z"
bldr = str_bldr()
bldr.append("[**")
bldr.append(datetime_as_timezone(date_time, timezone).strftime(format_string))
bldr.append("**](http://www.wolframalpha.com/input/?i=")
bldr.append(date_time.strftime('%Y-%m-%d %H:%M:%S %Z').replace(" ", "%20"))
bldr.append(" To Local Time".replace(" ", "%20"))
bldr.append(")")
return ''.join(bldr)
def render_time_diff(start_date, end_date):
seconds = int((end_date - start_date).total_seconds())
if seconds > 59:
try:
adjusted_end_date = start_date + relativedelta(seconds=int(min(seconds * 1.02, seconds + 60 * 60 * 24)))
except OverflowError:
adjusted_end_date = datetime_force_utc(datetime(year=9999, month=12, day=31))
delta = relativedelta(adjusted_end_date, start_date)
else:
delta = relativedelta(end_date, start_date)
if delta.years > 0:
return f"{delta.years} year{('s' if delta.years > 1 else '')}"
elif delta.months > 0:
return f"{delta.months} month{('s' if delta.months > 1 else '')}"
elif delta.days > 0:
return f"{delta.days} day{('s' if delta.days > 1 else '')}"
elif delta.hours > 0:
return f"{delta.hours} hour{('s' if delta.hours > 1 else '')}"
elif delta.minutes > 0:
return f"{delta.minutes} minute{('s' if delta.minutes > 1 else '')}"
elif delta.seconds > 0:
return f"{delta.seconds} second{('s' if delta.seconds > 1 else '')}"
else:
return ""
def message_link(message_id, np=False):
return f"https://{('np' if np else 'www')}.reddit.com/message/messages/{message_id}"
def reddit_link(slug, np=False):
return f"https://{('np' if np else 'www')}.reddit.com{slug}"
def id_from_fullname(fullname):
return re.sub(r't\d_', "", fullname)
def datetime_as_timezone(date_time, timezone_string):
if timezone_string is None:
return date_time
else:
return date_time.astimezone(pytz.timezone(timezone_string))
def datetime_as_utc(date_time):
return date_time.astimezone(pytz.utc)
def datetime_force_utc(date_time):
return pytz.utc.localize(date_time)
def time_offset(date_time, hours=0, minutes=0, seconds=0):
if date_time is None:
return True
return date_time < datetime_now() - timedelta(hours=hours, minutes=minutes, seconds=seconds)
def add_years(date_time, years):
try:
return date_time.replace(year=date_time.year + years)
except ValueError:
return date_time + (datetime(date_time.year + years, 3, 1) - datetime(date_time.year, 3, 1))
def get_next_anniversary(account_created_utc):
if account_created_utc is None:
log.info("Account creation date is none")
return datetime_now()
account_created = datetime_from_timestamp(account_created_utc)
next_anniversary = add_years(account_created, datetime_now().year - account_created.year)
if next_anniversary < datetime_now():
next_anniversary = add_years(next_anniversary, 1)
log.debug(
f"Account created {get_datetime_string(account_created)}, anniversary {get_datetime_string(next_anniversary)}")
return next_anniversary
def datetime_now():
if debug_time is None:
return datetime_force_utc(datetime.utcnow().replace(microsecond=0))
else:
return debug_time
def datetime_from_timestamp(timestamp):
return datetime_force_utc(datetime.utcfromtimestamp(timestamp))
def get_datetime_string(date_time, convert_utc=True, format_string="%Y-%m-%d %H:%M:%S"):
if date_time is None:
return ""
if convert_utc:
date_time = datetime_as_utc(date_time)
return date_time.strftime(format_string)
def parse_datetime_string(date_time_string, force_utc=True, format_string="%Y-%m-%d %H:%M:%S"):
if date_time_string is None or date_time_string == "None" or date_time_string == "":
return None
date_time = datetime.strptime(date_time_string, format_string)
if force_utc:
date_time = datetime_force_utc(date_time)
return date_time
def html_encode(message):
return urllib.parse.quote(message, safe='')
def build_message_link(recipient, subject, content=None):
base = "https://np.reddit.com/message/compose/?"
bldr = str_bldr()
bldr.append(f"to={recipient}")
bldr.append(f"subject={html_encode(subject)}")
if content is not None:
bldr.append(f"message={html_encode(content)}")
return base + '&'.join(bldr)
def replace_np(link):
return re.sub(r"(www|old|new)\.reddit\.com", "np.reddit.com", link)
def get_footer(bldr=None):
if bldr is None:
bldr = str_bldr()
bldr.append("\n\n")
bldr.append("*****")
bldr.append("\n\n")
bldr.append("|[^(Info)](")
bldr.append(replace_np(static.INFO_POST))
bldr.append(")|[^(Custom)](")
bldr.append(build_message_link(
static.ACCOUNT_NAME,
"Reminder",
f"[Link or message inside square brackets]\n\n{static.TRIGGER}! Time period here"
))
bldr.append(")")
bldr.append("|[^(Your Reminders)](")
bldr.append(build_message_link(
static.ACCOUNT_NAME,
"List Of Reminders",
"MyReminders!"
))
bldr.append(")")
bldr.append("|[^(Feedback)](")
bldr.append(build_message_link(
static.OWNER,
"RemindMeBot Feedback"
))
bldr.append(")")
bldr.append("|\n|-|-|-|-|")
return bldr
def str_bldr():
return []
def bldr_length(bldr):
length = 0
for item in bldr:
length += len(item)
return length
def requests_available(requests_pending):
if requests_pending == 0:
return 0
elif requests_pending < 200:
return 30
else:
return min(1000, int(requests_pending / 5))
def check_append_context_to_link(link):
if re.search(r"reddit\.com/r/\w+/comments/(\w+/){3}", link):
return link + "?context=3"
else:
return link
| [
"[email protected]"
]
| |
6653fd7aae6111569cd0976c391af665848be25d | 3ae62276c9aad8b9612d3073679b5cf3cb695e38 | /easyleetcode/leetcodes/Leetcode_677_Map_Sum_Pairs.py | 11d79eb447c4cfb0764b3abcb7c2c672576f0286 | [
"Apache-2.0"
]
| permissive | gongtian1234/easy_leetcode | bc0b33c3c4f61d58a6111d76707903efe0510cb4 | d2b8eb5d2cafc71ee1ca633ce489c1a52bcc39ce | refs/heads/master | 2022-11-16T17:48:33.596752 | 2020-07-13T02:55:03 | 2020-07-13T02:55:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35 | py | print('Leetcode_677_Map_Sum_Pairs') | [
"[email protected]"
]
| |
80abf2358c23c7ec917b6220f31a5fdcbd57bea4 | 29fc11636bf5053feb113ad837ec42ffe3f09bfd | /Terra_2/Bug_10/latex.py | eda6df4ac9865f2fa2bb86cbe2736f49d77c29c6 | []
| no_license | deyh2020/Bugs4Q | 8446db4a8efcf3541ba740b7d658b6812d56fe3e | b6e306e12bd1c5fdec126655ad008c386340ab57 | refs/heads/master | 2023-05-31T03:41:28.576877 | 2021-06-17T15:08:03 | 2021-06-17T15:08:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 37,775 | py | # -*- coding: utf-8 -*-
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2018.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
# pylint: disable=invalid-name
"""latex circuit visualization backends."""
import collections
import io
import json
import math
import re
try:
from pylatexenc.latexencode import utf8tolatex
HAS_PYLATEX = True
except ImportError:
HAS_PYLATEX = False
import numpy as np
from qiskit.visualization import qcstyle as _qcstyle
from qiskit.visualization import exceptions
from .tools.pi_check import pi_check
class QCircuitImage:
"""This class contains methods to create \\LaTeX circuit images.
The class targets the \\LaTeX package Q-circuit
(https://arxiv.org/pdf/quant-ph/0406003).
Thanks to Eric Sabo for the initial implementation for Qiskit.
"""
def __init__(self, qubits, clbits, ops, scale, style=None,
plot_barriers=True, reverse_bits=False, layout=None):
"""QCircuitImage initializer.
Args:
qubits (list[Qubit]): list of qubits
clbits (list[Clbit]): list of clbits
ops (list[list[DAGNode]]): list of circuit instructions, grouped by layer
scale (float): image scaling
style (dict or str): dictionary of style or file name of style file
reverse_bits (bool): When set to True reverse the bit order inside
registers for the output visualization.
plot_barriers (bool): Enable/disable drawing barriers in the output
circuit. Defaults to True.
layout (Layout or None): If present, the layout information will be
included.
Raises:
ImportError: If pylatexenc is not installed
"""
if not HAS_PYLATEX:
raise ImportError('The latex and latex_source drawers need '
'pylatexenc installed. Run "pip install '
'pylatexenc" before using the latex or '
'latex_source drawers.')
# style sheet
self._style = _qcstyle.BWStyle()
if style:
if isinstance(style, dict):
self._style.set_style(style)
elif isinstance(style, str):
with open(style, 'r') as infile:
dic = json.load(infile)
self._style.set_style(dic)
# list of lists corresponding to layers of the circuit
self.ops = ops
# image scaling
self.scale = scale
# Map of qregs to sizes
self.qregs = {}
# Map of cregs to sizes
self.cregs = {}
# List of qregs and cregs in order of appearance in code and image
self.ordered_regs = []
# Map from registers to the list they appear in the image
self.img_regs = {}
# Array to hold the \\LaTeX commands to generate a circuit image.
self._latex = []
# Variable to hold image depth (width)
self.img_depth = 0
# Variable to hold image width (height)
self.img_width = 0
# Variable to hold total circuit depth
self.sum_column_widths = 0
# Variable to hold total circuit width
self.sum_row_heights = 0
# em points of separation between circuit columns
self.column_separation = 1
# em points of separation between circuit row
self.row_separation = 0
# presence of "box" or "target" determines row spacing
self.has_box = False
self.has_target = False
self.reverse_bits = reverse_bits
self.layout = layout
self.plot_barriers = plot_barriers
#################################
self.qregs = _get_register_specs(qubits)
self.qubit_list = qubits
self.ordered_regs = qubits + clbits
self.cregs = _get_register_specs(clbits)
self.clbit_list = clbits
self.img_regs = {bit: ind for ind, bit in
enumerate(self.ordered_regs)}
self.img_width = len(self.img_regs)
self.wire_type = {}
for bit in self.ordered_regs:
self.wire_type[bit] = bit.register in self.cregs.keys()
def latex(self, aliases=None):
"""Return LaTeX string representation of circuit.
This method uses the LaTeX Qconfig package to create a graphical
representation of the circuit.
Returns:
string: for writing to a LaTeX file.
"""
self._initialize_latex_array(aliases)
self._build_latex_array(aliases)
header_1 = r"""% \documentclass[preview]{standalone}
% If the image is too large to fit on this documentclass use
\documentclass[draft]{beamer}
"""
beamer_line = "\\usepackage[size=custom,height=%d,width=%d,scale=%.1f]{beamerposter}\n"
header_2 = r"""% instead and customize the height and width (in cm) to fit.
% Large images may run out of memory quickly.
% To fix this use the LuaLaTeX compiler, which dynamically
% allocates memory.
\usepackage[braket, qm]{qcircuit}
\usepackage{amsmath}
\pdfmapfile{+sansmathaccent.map}
% \usepackage[landscape]{geometry}
% Comment out the above line if using the beamer documentclass.
\begin{document}
\begin{equation*}"""
qcircuit_line = r"""
\Qcircuit @C=%.1fem @R=%.1fem @!R {
"""
output = io.StringIO()
output.write(header_1)
output.write('%% img_width = %d, img_depth = %d\n' % (self.img_width, self.img_depth))
output.write(beamer_line % self._get_beamer_page())
output.write(header_2)
output.write(qcircuit_line %
(self.column_separation, self.row_separation))
for i in range(self.img_width):
output.write("\t \t")
for j in range(self.img_depth + 1):
cell_str = self._latex[i][j]
# Don't truncate offset float if drawing a barrier
if 'barrier' in cell_str:
output.write(cell_str)
else:
# floats can cause "Dimension too large" latex error in
# xymatrix this truncates floats to avoid issue.
cell_str = re.sub(r'[-+]?\d*\.\d{2,}|\d{2,}',
_truncate_float,
cell_str)
output.write(cell_str)
if j != self.img_depth:
output.write(" & ")
else:
output.write(r'\\' + '\n')
output.write('\t }\n')
output.write('\\end{equation*}\n\n')
output.write('\\end{document}')
contents = output.getvalue()
output.close()
return contents
def _initialize_latex_array(self, aliases=None):
del aliases # unused
self.img_depth, self.sum_column_widths = self._get_image_depth()
self.sum_row_heights = self.img_width
# choose the most compact row spacing, while not squashing them
if self.has_box:
self.row_separation = 0.0
elif self.has_target:
self.row_separation = 0.2
else:
self.row_separation = 1.0
self._latex = [
["\\cw" if self.wire_type[self.ordered_regs[j]]
else "\\qw" for _ in range(self.img_depth + 1)]
for j in range(self.img_width)]
self._latex.append([" "] * (self.img_depth + 1))
for i in range(self.img_width):
if self.wire_type[self.ordered_regs[i]]:
self._latex[i][0] = "\\lstick{" + self.ordered_regs[i].register.name + \
"_{" + str(self.ordered_regs[i].index) + "}" + \
": 0}"
else:
if self.layout is None:
self._latex[i][0] = "\\lstick{{ {}_{{{}}} : \\ket{{0}} }}".format(
self.ordered_regs[i].register.name, self.ordered_regs[i].index)
else:
self._latex[i][0] = "\\lstick{{({}_{{{}}})~q_{{{}}} : \\ket{{0}} }}".format(
self.layout[self.ordered_regs[i].index].register.name,
self.layout[self.ordered_regs[i].index].index,
self.ordered_regs[i].index)
def _get_image_depth(self):
"""Get depth information for the circuit.
Returns:
int: number of columns in the circuit
int: total size of columns in the circuit
"""
max_column_widths = []
# Determine row spacing before image depth
for layer in self.ops:
for op in layer:
# useful information for determining row spacing
boxed_gates = ['u0', 'u1', 'u2', 'u3', 'x', 'y', 'z', 'h', 's',
'sdg', 't', 'tdg', 'rx', 'ry', 'rz', 'ch', 'cy',
'crz', 'cu3', 'id']
target_gates = ['cx', 'ccx']
if op.name in boxed_gates:
self.has_box = True
if op.name in target_gates:
self.has_target = True
for layer in self.ops:
# store the max width for the layer
current_max = 0
for op in layer:
# update current op width
arg_str_len = 0
# the wide gates
for arg in op.op.params:
arg_str = re.sub(r'[-+]?\d*\.\d{2,}|\d{2,}',
_truncate_float, str(arg))
arg_str_len += len(arg_str)
# the width of the column is the max of all the gates in the column
current_max = max(arg_str_len, current_max)
max_column_widths.append(current_max)
# wires in the beginning and end
columns = 2
# all gates take up 1 column except from those with labels (cu1) which take 2
columns += sum([2 if nd.name == 'cu1' else 1 for layer in self.ops for nd in layer])
# every 3 characters is roughly one extra 'unit' of width in the cell
# the gate name is 1 extra 'unit'
# the qubit/cbit labels plus initial states is 2 more
# the wires poking out at the ends is 2 more
sum_column_widths = sum(1 + v / 3 for v in max_column_widths)
max_reg_name = 3
for reg in self.ordered_regs:
max_reg_name = max(max_reg_name,
len(reg.register.name))
sum_column_widths += 5 + max_reg_name / 3
# could be a fraction so ceil
return columns, math.ceil(sum_column_widths)
def _get_beamer_page(self):
"""Get height, width & scale attributes for the beamer page.
Returns:
tuple: (height, width, scale) desirable page attributes
"""
# PIL python package limits image size to around a quarter gigabyte
# this means the beamer image should be limited to < 50000
# if you want to avoid a "warning" too, set it to < 25000
PIL_limit = 40000
# the beamer latex template limits each dimension to < 19 feet
# (i.e. 575cm)
beamer_limit = 550
# columns are roughly twice as big as rows
aspect_ratio = self.sum_row_heights / self.sum_column_widths
# choose a page margin so circuit is not cropped
margin_factor = 1.5
height = min(self.sum_row_heights * margin_factor, beamer_limit)
width = min(self.sum_column_widths * margin_factor, beamer_limit)
# if too large, make it fit
if height * width > PIL_limit:
height = min(np.sqrt(PIL_limit * aspect_ratio), beamer_limit)
width = min(np.sqrt(PIL_limit / aspect_ratio), beamer_limit)
# if too small, give it a minimum size
height = max(height, 10)
width = max(width, 10)
return (height, width, self.scale)
def _get_mask(self, creg_name):
mask = 0
for index, cbit in enumerate(self.clbit_list):
if creg_name == cbit.register:
mask |= (1 << index)
return mask
def _build_latex_array(self, aliases=None):
"""Returns an array of strings containing \\LaTeX for this circuit.
If aliases is not None, aliases contains a dict mapping
the current qubits in the circuit to new qubit names.
We will deduce the register names and sizes from aliases.
"""
# Rename qregs if necessary
if aliases:
qregdata = {}
for q in aliases.values():
if q[0] not in qregdata:
qregdata[q[0]] = q[1] + 1
elif qregdata[q[0]] < q[1] + 1:
qregdata[q[0]] = q[1] + 1
else:
qregdata = self.qregs
column = 1
for layer in self.ops:
num_cols_used = 1
for op in layer:
if op.condition:
mask = self._get_mask(op.condition[0])
cl_reg = self.clbit_list[self._ffs(mask)]
if_reg = cl_reg.register
pos_2 = self.img_regs[cl_reg]
if_value = format(op.condition[1],
'b').zfill(self.cregs[if_reg])[::-1]
if op.name not in ['measure', 'barrier', 'snapshot', 'load',
'save', 'noise']:
nm = utf8tolatex(op.name).replace(" ", "\\,")
qarglist = op.qargs
if aliases is not None:
qarglist = map(lambda x: aliases[x], qarglist)
if len(qarglist) == 1:
pos_1 = self.img_regs[qarglist[0]]
if op.condition:
mask = self._get_mask(op.condition[0])
cl_reg = self.clbit_list[self._ffs(mask)]
if_reg = cl_reg.register
pos_2 = self.img_regs[cl_reg]
if nm == "x":
self._latex[pos_1][column] = "\\gate{X}"
elif nm == "y":
self._latex[pos_1][column] = "\\gate{Y}"
elif nm == "z":
self._latex[pos_1][column] = "\\gate{Z}"
elif nm == "h":
self._latex[pos_1][column] = "\\gate{H}"
elif nm == "s":
self._latex[pos_1][column] = "\\gate{S}"
elif nm == "sdg":
self._latex[pos_1][column] = "\\gate{S^\\dag}"
elif nm == "t":
self._latex[pos_1][column] = "\\gate{T}"
elif nm == "tdg":
self._latex[pos_1][column] = "\\gate{T^\\dag}"
elif nm == "u0":
self._latex[pos_1][column] = "\\gate{U_0(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "u1":
self._latex[pos_1][column] = "\\gate{U_1(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "u2":
self._latex[pos_1][column] = \
"\\gate{U_2\\left(%s,%s\\right)}" % (
pi_check(op.op.params[0], output='latex'),
pi_check(op.op.params[1], output='latex'))
elif nm == "u3":
self._latex[pos_1][column] = ("\\gate{U_3(%s,%s,%s)}" % (
pi_check(op.op.params[0], output='latex'),
pi_check(op.op.params[1], output='latex'),
pi_check(op.op.params[2], output='latex')))
elif nm == "rx":
self._latex[pos_1][column] = "\\gate{R_x(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "ry":
self._latex[pos_1][column] = "\\gate{R_y(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "rz":
self._latex[pos_1][column] = "\\gate{R_z(%s)}" % (
pi_check(op.op.params[0], output='latex'))
else:
self._latex[pos_1][column] = ("\\gate{%s}" % nm)
gap = pos_2 - pos_1
for i in range(self.cregs[if_reg]):
if if_value[i] == '1':
self._latex[pos_2 + i][column] = \
"\\control \\cw \\cwx[-" + str(gap) + "]"
gap = 1
else:
self._latex[pos_2 + i][column] = \
"\\controlo \\cw \\cwx[-" + str(gap) + "]"
gap = 1
else:
if nm == "x":
self._latex[pos_1][column] = "\\gate{X}"
elif nm == "y":
self._latex[pos_1][column] = "\\gate{Y}"
elif nm == "z":
self._latex[pos_1][column] = "\\gate{Z}"
elif nm == "h":
self._latex[pos_1][column] = "\\gate{H}"
elif nm == "s":
self._latex[pos_1][column] = "\\gate{S}"
elif nm == "sdg":
self._latex[pos_1][column] = "\\gate{S^\\dag}"
elif nm == "t":
self._latex[pos_1][column] = "\\gate{T}"
elif nm == "tdg":
self._latex[pos_1][column] = "\\gate{T^\\dag}"
elif nm == "u0":
self._latex[pos_1][column] = "\\gate{U_0(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "u1":
self._latex[pos_1][column] = "\\gate{U_1(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "u2":
self._latex[pos_1][column] = \
"\\gate{U_2\\left(%s,%s\\right)}" % (
pi_check(op.op.params[0], output='latex'),
pi_check(op.op.params[1], output='latex'))
elif nm == "u3":
self._latex[pos_1][column] = ("\\gate{U_3(%s,%s,%s)}" % (
pi_check(op.op.params[0], output='latex'),
pi_check(op.op.params[1], output='latex'),
pi_check(op.op.params[2], output='latex')))
elif nm == "rx":
self._latex[pos_1][column] = "\\gate{R_x(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "ry":
self._latex[pos_1][column] = "\\gate{R_y(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "rz":
self._latex[pos_1][column] = "\\gate{R_z(%s)}" % (
pi_check(op.op.params[0], output='latex'))
elif nm == "reset":
self._latex[pos_1][column] = (
"\\push{\\rule{.6em}{0em}\\ket{0}\\"
"rule{.2em}{0em}} \\qw")
else:
self._latex[pos_1][column] = ("\\gate{%s}" % nm)
elif len(qarglist) == 2:
pos_1 = self.img_regs[qarglist[0]]
pos_2 = self.img_regs[qarglist[1]]
if op.condition:
pos_3 = self.img_regs[(if_reg, 0)]
temp = [pos_1, pos_2, pos_3]
temp.sort(key=int)
bottom = temp[1]
gap = pos_3 - bottom
for i in range(self.cregs[if_reg]):
if if_value[i] == '1':
self._latex[pos_3 + i][column] = \
"\\control \\cw \\cwx[-" + str(gap) + "]"
gap = 1
else:
self._latex[pos_3 + i][column] = \
"\\controlo \\cw \\cwx[-" + str(gap) + "]"
gap = 1
if nm == "cx":
self._latex[pos_1][column] = \
"\\ctrl{" + str(pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\targ"
elif nm == "cz":
self._latex[pos_1][column] = \
"\\ctrl{" + str(pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\control\\qw"
elif nm == "cy":
self._latex[pos_1][column] = \
"\\ctrl{" + str(pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\gate{Y}"
elif nm == "ch":
self._latex[pos_1][column] = \
"\\ctrl{" + str(pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\gate{H}"
elif nm == "swap":
self._latex[pos_1][column] = "\\qswap"
self._latex[pos_2][column] = \
"\\qswap \\qwx[" + str(pos_1 - pos_2) + "]"
elif nm == "crz":
self._latex[pos_1][column] = \
"\\ctrl{" + str(pos_2 - pos_1) + "}"
self._latex[pos_2][column] = \
"\\gate{R_z(%s)}" % (pi_check(op.op.params[0], output='latex'))
elif nm == "cu1":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\control \\qw"
self._latex[min(pos_1, pos_2)][column + 1] = \
"\\dstick{%s}\\qw" % (pi_check(op.op.params[0], output='latex'))
self._latex[max(pos_1, pos_2)][column + 1] = "\\qw"
# this is because this gate takes up 2 columns,
# and we have just written to the next column
num_cols_used = 2
elif nm == "cu3":
self._latex[pos_1][column] = \
"\\ctrl{" + str(pos_2 - pos_1) + "}"
self._latex[pos_2][column] = \
"\\gate{U_3(%s,%s,%s)}" % \
(pi_check(op.op.params[0], output='latex'),
pi_check(op.op.params[1], output='latex'),
pi_check(op.op.params[2], output='latex'))
else:
temp = [pos_1, pos_2]
temp.sort(key=int)
if nm == "cx":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\targ"
elif nm == "cz":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\control\\qw"
elif nm == "cy":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\gate{Y}"
elif nm == "ch":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\gate{H}"
elif nm == "swap":
self._latex[pos_1][column] = "\\qswap"
self._latex[pos_2][column] = \
"\\qswap \\qwx[" + str(pos_1 - pos_2) + "]"
elif nm == "crz":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = \
"\\gate{R_z(%s)}" % (pi_check(op.op.params[0], output='latex'))
elif nm == "cu1":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\control \\qw"
self._latex[min(pos_1, pos_2)][column + 1] = \
"\\dstick{%s}\\qw" % (pi_check(op.op.params[0], output='latex'))
self._latex[max(pos_1, pos_2)][column + 1] = "\\qw"
num_cols_used = 2
elif nm == "cu3":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = \
("\\gate{U_3(%s,%s,%s)}" %
(pi_check(op.op.params[0], output='latex'),
pi_check(op.op.params[1], output='latex'),
pi_check(op.op.params[2], output='latex')))
else:
start_pos = min([pos_1, pos_2])
stop_pos = max([pos_1, pos_2])
if stop_pos - start_pos >= 2:
delta = stop_pos - start_pos
self._latex[start_pos][column] = ("\\multigate{%s}{%s}"
% (delta, nm))
for i_pos in range(start_pos + 1, stop_pos + 1):
self._latex[i_pos][column] = ("\\ghost{%s}"
% nm)
else:
self._latex[start_pos][column] = ("\\multigate{1}{%s}"
% nm)
self._latex[stop_pos][column] = ("\\ghost{%s}" %
nm)
elif len(qarglist) == 3:
pos_1 = self.img_regs[qarglist[0]]
pos_2 = self.img_regs[qarglist[1]]
pos_3 = self.img_regs[qarglist[2]]
if op.condition:
pos_4 = self.img_regs[(if_reg, 0)]
temp = [pos_1, pos_2, pos_3, pos_4]
temp.sort(key=int)
bottom = temp[2]
gap = pos_4 - bottom
for i in range(self.cregs[if_reg]):
if if_value[i] == '1':
self._latex[pos_4 + i][column] = \
"\\control \\cw \\cwx[-" + str(gap) + "]"
gap = 1
else:
self._latex[pos_4 + i][column] = \
"\\controlo \\cw \\cwx[-" + str(gap) + "]"
gap = 1
if nm == "ccx":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\ctrl{" + str(
pos_3 - pos_2) + "}"
self._latex[pos_3][column] = "\\targ"
if nm == "cswap":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\qswap"
self._latex[pos_3][column] = \
"\\qswap \\qwx[" + str(pos_2 - pos_3) + "]"
else:
if nm == "ccx":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\ctrl{" + str(
pos_3 - pos_2) + "}"
self._latex[pos_3][column] = "\\targ"
elif nm == "cswap":
self._latex[pos_1][column] = "\\ctrl{" + str(
pos_2 - pos_1) + "}"
self._latex[pos_2][column] = "\\qswap"
self._latex[pos_3][column] = \
"\\qswap \\qwx[" + str(pos_2 - pos_3) + "]"
else:
start_pos = min([pos_1, pos_2, pos_3])
stop_pos = max([pos_1, pos_2, pos_3])
if stop_pos - start_pos >= 3:
delta = stop_pos - start_pos
self._latex[start_pos][column] = ("\\multigate{%s}{%s}" %
(delta, nm))
for i_pos in range(start_pos + 1, stop_pos + 1):
self._latex[i_pos][column] = ("\\ghost{%s}" %
nm)
else:
self._latex[pos_1][column] = ("\\multigate{2}{%s}" %
nm)
self._latex[pos_2][column] = ("\\ghost{%s}" %
nm)
self._latex[pos_3][column] = ("\\ghost{%s}" %
nm)
elif len(qarglist) > 3:
nbits = len(qarglist)
pos_array = [self.img_regs[qarglist[0]]]
for i in range(1, nbits):
pos_array.append(self.img_regs[qarglist[i]])
pos_start = min(pos_array)
pos_stop = max(pos_array)
self._latex[pos_start][column] = ("\\multigate{%s}{%s}" %
(nbits - 1, nm))
for pos in range(pos_start + 1, pos_stop + 1):
self._latex[pos][column] = ("\\ghost{%s}" % nm)
elif op.name == "measure":
if (len(op.cargs) != 1
or len(op.qargs) != 1
or op.op.params):
raise exceptions.VisualizationError("bad operation record")
if op.condition:
raise exceptions.VisualizationError(
"If controlled measures currently not supported.")
if aliases:
newq = aliases[(qname, qindex)]
qname = newq[0]
qindex = newq[1]
pos_1 = self.img_regs[op.qargs[0]]
pos_2 = self.img_regs[op.cargs[0]]
try:
self._latex[pos_1][column] = "\\meter"
self._latex[pos_2][column] = \
"\\cw \\cwx[-" + str(pos_2 - pos_1) + "]"
except Exception as e:
raise exceptions.VisualizationError(
'Error during Latex building: %s' % str(e))
elif op.name in ['barrier', 'snapshot', 'load', 'save',
'noise']:
if self.plot_barriers:
qarglist = op.qargs
indexes = [self._get_qubit_index(x) for x in qarglist]
indexes.sort()
if aliases is not None:
qarglist = map(lambda x: aliases[x], qarglist)
first = last = indexes[0]
for index in indexes[1:]:
if index - 1 == last:
last = index
else:
pos = self.img_regs[self.qubit_list[first]]
self._latex[pos][column - 1] += " \\barrier[0em]{" + str(
last - first) + "}"
self._latex[pos][column] = "\\qw"
first = last = index
pos = self.img_regs[self.qubit_list[first]]
self._latex[pos][column - 1] += " \\barrier[0em]{" + str(
last - first) + "}"
self._latex[pos][column] = "\\qw"
else:
raise exceptions.VisualizationError("bad node data")
# increase the number of columns by the number of columns this layer used
column += num_cols_used
def _get_qubit_index(self, qubit):
"""Get the index number for a quantum bit.
Args:
qubit (tuple): The tuple of the bit of the form
(register_name, bit_number)
Returns:
int: The index in the bit list
Raises:
VisualizationError: If the bit isn't found
"""
for i, bit in enumerate(self.qubit_list):
if qubit == bit:
qindex = i
break
else:
raise exceptions.VisualizationError("unable to find bit for operation")
return qindex
def _ffs(self, mask):
"""Find index of first set bit.
Args:
mask (int): integer to search
Returns:
int: index of the first set bit.
"""
origin = (mask & (-mask)).bit_length()
return origin - 1
def _get_register_specs(bits):
"""Get the number and size of unique registers from bits list.
Args:
bits (list[Bit]): this list is of the form::
[Qubit(v0, 0), Qubit(v0, 1), Qubit(v0, 2), Qubit(v0, 3), Qubit(v1, 0)]
which indicates a size-4 register and a size-1 register
Returns:
OrderedDict: ordered map of Registers to their sizes
"""
regs = collections.OrderedDict([(bit.register, bit.register.size) for bit in bits])
return regs
def _truncate_float(matchobj, ndigits=3):
"""Truncate long floats
Args:
matchobj (re.Match): contains original float
ndigits (int): Number of digits to print
Returns:
str: returns truncated float
"""
if matchobj.group(0):
return '%.{}g'.format(ndigits) % float(matchobj.group(0))
return ''
| [
"[email protected]"
]
| |
58668ff0ee439f72a7abb030a5a1180a8f974d1e | 7882860350c714e6c08368288dab721288b8d9db | /1์ผ์ฐจ/func(1๋ฒ๋ฌธ์ ).py | 30e5d8dec0d211d9db0a6a342ba840820f468502 | []
| no_license | park-seonju/Algorithm | 682fca984813a54b92a3f2ab174e4f05a95921a8 | 30e5bcb756e9388693624e8880e57bc92bfda969 | refs/heads/master | 2023-08-11T18:23:49.644259 | 2021-09-27T10:07:49 | 2021-09-27T10:07:49 | 388,741,922 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 388 | py | # def abc(t):
# for i in range(len(t)):
# if t[i] != t[len(t)-1-i]:
# return False
# return True
# word = input()
# print(word)
# if abc(word):
# print("์
๋ ฅํ์ ๋จ์ด๋ ํ๋ฌธ(Palindrome)์
๋๋ค.")
# else :
# print("์
๋ ฅํ์ ๋จ์ด๋ ํ๋ฌธ(Palindrome)์ด ์๋๋๋ค.")
a=input()
b=str(reversed(a))
print(type(b))
if a==b: print('O') | [
"[email protected]"
]
| |
f8e1c79af7f8ff238e4aa312ef2ee68f1e70f4a8 | 03c00aa07607c1f206c0fb3cf00fc5c510d7a4bf | /Infoplus/apis/carrier_service_api.py | 3666fa7df7e4744314451fd9fceb448261b997ef | []
| no_license | infopluscommerce/infoplus-python-client | 748cc9af739615036c52adce70aa7f4303601b97 | bde657057fedb5396ecf6c42e8ba668456bd1c43 | refs/heads/master | 2023-08-23T11:32:01.160320 | 2017-03-17T14:43:15 | 2017-03-17T14:43:15 | 58,404,638 | 2 | 2 | null | null | null | null | UTF-8 | Python | false | false | 7,611 | py | # coding: utf-8
"""
CarrierServiceApi.py
Copyright 2016 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
import sys
import os
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class CarrierServiceApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def get_carrier_service_by_id(self, carrier_service_id, **kwargs):
"""
Get a carrierService by id
Returns the carrierService identified by the specified id.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_carrier_service_by_id(carrier_service_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str carrier_service_id: Id of carrierService to be returned. (required)
:return: CarrierService
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['carrier_service_id']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_carrier_service_by_id" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'carrier_service_id' is set
if ('carrier_service_id' not in params) or (params['carrier_service_id'] is None):
raise ValueError("Missing the required parameter `carrier_service_id` when calling `get_carrier_service_by_id`")
resource_path = '/beta/carrierService/{carrierServiceId}'.replace('{format}', 'json')
path_params = {}
if 'carrier_service_id' in params:
path_params['carrierServiceId'] = params['carrier_service_id']
query_params = {}
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['api_key']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='CarrierService',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
def get_carrier_service_by_search_text(self, **kwargs):
"""
Search carrierServices
Returns the list of carrierServices that match the given searchText.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_carrier_service_by_search_text(callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str search_text: Search text, used to filter results.
:param int page: Result page number. Defaults to 1.
:param int limit: Maximum results per page. Defaults to 20. Max allowed value is 250.
:return: list[CarrierService]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['search_text', 'page', 'limit']
all_params.append('callback')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_carrier_service_by_search_text" % key
)
params[key] = val
del params['kwargs']
resource_path = '/beta/carrierService/search'.replace('{format}', 'json')
path_params = {}
query_params = {}
if 'search_text' in params:
query_params['searchText'] = params['search_text']
if 'page' in params:
query_params['page'] = params['page']
if 'limit' in params:
query_params['limit'] = params['limit']
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
if not header_params['Accept']:
del header_params['Accept']
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type([])
# Authentication setting
auth_settings = ['api_key']
response = self.api_client.call_api(resource_path, 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[CarrierService]',
auth_settings=auth_settings,
callback=params.get('callback'))
return response
| [
"[email protected]"
]
| |
5964837f4eff397b07abd2f9cf033fbca7a302c2 | 6540f5a1ba587fb06067a09e67d0603e22305631 | /apps/users/admin.py | 7a124deede616de4741134a5ccf003d45c473533 | []
| no_license | bluehawkarthur/ebil | 3ff8171672baaebeacfd95f8fb68c9df3e0d54ad | 9d3d5291f2aa3f0822047cd88d398add4b987c54 | refs/heads/master | 2021-01-17T11:18:24.173696 | 2016-10-06T22:20:53 | 2016-10-06T22:20:53 | 46,352,506 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | from django.contrib import admin
from .models import User, Personajuridica
admin.site.register(User)
admin.site.register(Personajuridica)
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.