body_hash
stringlengths 64
64
| body
stringlengths 23
109k
| docstring
stringlengths 1
57k
| path
stringlengths 4
198
| name
stringlengths 1
115
| repository_name
stringlengths 7
111
| repository_stars
float64 0
191k
| lang
stringclasses 1
value | body_without_docstring
stringlengths 14
108k
| unified
stringlengths 45
133k
|
---|---|---|---|---|---|---|---|---|---|
b529b45680297492d77c471f823e863c8ad253f494f92d294818b9a5b7852735
|
def load_data(training_file_path: str, dev_file_path: str, batch_size: int, max_length: int, device, is_bert_model=True):
'\n loads training data from csv files\n :param batch_size:\n :param max_length:\n :param device:\n :param is_bert_model:\n :param training_file_path:\n :param dev_file_path:\n :return:\n '
bert_tokenizer = BertTokenizer.from_pretrained('bert-base-german-cased', do_lower_case=False)
def tokenize(sent):
return bert_tokenizer.tokenize(sent)
def tokenize_sentence(sentence: str):
if is_bert_model:
sentence = (('[CLS] ' + sentence) + ' [SEP]')
tokens = tokenize(sentence)
return list(tokens)
TEXT_FIELD = Field(tokenize=tokenize_sentence, sequential=True, lower=False, fix_length=max_length, pad_token='[PAD]', batch_first=(not is_bert_model), use_vocab=(not is_bert_model))
def numericalize(seq_lists, device=device):
ids = []
for seq in seq_lists:
ids.append(bert_tokenizer.convert_tokens_to_ids(seq))
return torch.LongTensor(ids).to(device)
TEXT_FIELD.numericalize = numericalize
CLASS_FIELD = Field(sequential=False)
POSITION_FIELD = Field(sequential=False, use_vocab=False)
data_fields = ([('sample_class', CLASS_FIELD), ('sample', TEXT_FIELD), ('position', POSITION_FIELD), ('previous_classes', CLASS_FIELD), ('previous_sample', TEXT_FIELD), ('sample_pos', TEXT_FIELD)] + [(('sample_neg_' + str(i)), TEXT_FIELD) for i in range(NEGATIVE_SAMPLE_SIZE)])
train_data = TabularDataset(path=training_file_path, format='tsv', fields=data_fields, skip_header=True)
dev_data = TabularDataset(path=dev_file_path, format='tsv', fields=data_fields, skip_header=True)
train_iter = BucketIterator(train_data, batch_size=batch_size, device=device, sort=True, sort_key=(lambda x: x.sample), sort_within_batch=True, shuffle=True)
dev_iter = BucketIterator(dev_data, batch_size=batch_size, device=device, sort=True, sort_key=(lambda x: x.sample), sort_within_batch=True, shuffle=True)
CLASS_FIELD.build_vocab(train_data)
if (not is_bert_model):
TEXT_FIELD.build_vocab(train_data)
return (train_iter, dev_iter, CLASS_FIELD, TEXT_FIELD, data_fields)
|
loads training data from csv files
:param batch_size:
:param max_length:
:param device:
:param is_bert_model:
:param training_file_path:
:param dev_file_path:
:return:
|
utility/training.py
|
load_data
|
RobinRojowiec/intent-recognition-in-doctor-patient-interviews
| 0 |
python
|
def load_data(training_file_path: str, dev_file_path: str, batch_size: int, max_length: int, device, is_bert_model=True):
'\n loads training data from csv files\n :param batch_size:\n :param max_length:\n :param device:\n :param is_bert_model:\n :param training_file_path:\n :param dev_file_path:\n :return:\n '
bert_tokenizer = BertTokenizer.from_pretrained('bert-base-german-cased', do_lower_case=False)
def tokenize(sent):
return bert_tokenizer.tokenize(sent)
def tokenize_sentence(sentence: str):
if is_bert_model:
sentence = (('[CLS] ' + sentence) + ' [SEP]')
tokens = tokenize(sentence)
return list(tokens)
TEXT_FIELD = Field(tokenize=tokenize_sentence, sequential=True, lower=False, fix_length=max_length, pad_token='[PAD]', batch_first=(not is_bert_model), use_vocab=(not is_bert_model))
def numericalize(seq_lists, device=device):
ids = []
for seq in seq_lists:
ids.append(bert_tokenizer.convert_tokens_to_ids(seq))
return torch.LongTensor(ids).to(device)
TEXT_FIELD.numericalize = numericalize
CLASS_FIELD = Field(sequential=False)
POSITION_FIELD = Field(sequential=False, use_vocab=False)
data_fields = ([('sample_class', CLASS_FIELD), ('sample', TEXT_FIELD), ('position', POSITION_FIELD), ('previous_classes', CLASS_FIELD), ('previous_sample', TEXT_FIELD), ('sample_pos', TEXT_FIELD)] + [(('sample_neg_' + str(i)), TEXT_FIELD) for i in range(NEGATIVE_SAMPLE_SIZE)])
train_data = TabularDataset(path=training_file_path, format='tsv', fields=data_fields, skip_header=True)
dev_data = TabularDataset(path=dev_file_path, format='tsv', fields=data_fields, skip_header=True)
train_iter = BucketIterator(train_data, batch_size=batch_size, device=device, sort=True, sort_key=(lambda x: x.sample), sort_within_batch=True, shuffle=True)
dev_iter = BucketIterator(dev_data, batch_size=batch_size, device=device, sort=True, sort_key=(lambda x: x.sample), sort_within_batch=True, shuffle=True)
CLASS_FIELD.build_vocab(train_data)
if (not is_bert_model):
TEXT_FIELD.build_vocab(train_data)
return (train_iter, dev_iter, CLASS_FIELD, TEXT_FIELD, data_fields)
|
def load_data(training_file_path: str, dev_file_path: str, batch_size: int, max_length: int, device, is_bert_model=True):
'\n loads training data from csv files\n :param batch_size:\n :param max_length:\n :param device:\n :param is_bert_model:\n :param training_file_path:\n :param dev_file_path:\n :return:\n '
bert_tokenizer = BertTokenizer.from_pretrained('bert-base-german-cased', do_lower_case=False)
def tokenize(sent):
return bert_tokenizer.tokenize(sent)
def tokenize_sentence(sentence: str):
if is_bert_model:
sentence = (('[CLS] ' + sentence) + ' [SEP]')
tokens = tokenize(sentence)
return list(tokens)
TEXT_FIELD = Field(tokenize=tokenize_sentence, sequential=True, lower=False, fix_length=max_length, pad_token='[PAD]', batch_first=(not is_bert_model), use_vocab=(not is_bert_model))
def numericalize(seq_lists, device=device):
ids = []
for seq in seq_lists:
ids.append(bert_tokenizer.convert_tokens_to_ids(seq))
return torch.LongTensor(ids).to(device)
TEXT_FIELD.numericalize = numericalize
CLASS_FIELD = Field(sequential=False)
POSITION_FIELD = Field(sequential=False, use_vocab=False)
data_fields = ([('sample_class', CLASS_FIELD), ('sample', TEXT_FIELD), ('position', POSITION_FIELD), ('previous_classes', CLASS_FIELD), ('previous_sample', TEXT_FIELD), ('sample_pos', TEXT_FIELD)] + [(('sample_neg_' + str(i)), TEXT_FIELD) for i in range(NEGATIVE_SAMPLE_SIZE)])
train_data = TabularDataset(path=training_file_path, format='tsv', fields=data_fields, skip_header=True)
dev_data = TabularDataset(path=dev_file_path, format='tsv', fields=data_fields, skip_header=True)
train_iter = BucketIterator(train_data, batch_size=batch_size, device=device, sort=True, sort_key=(lambda x: x.sample), sort_within_batch=True, shuffle=True)
dev_iter = BucketIterator(dev_data, batch_size=batch_size, device=device, sort=True, sort_key=(lambda x: x.sample), sort_within_batch=True, shuffle=True)
CLASS_FIELD.build_vocab(train_data)
if (not is_bert_model):
TEXT_FIELD.build_vocab(train_data)
return (train_iter, dev_iter, CLASS_FIELD, TEXT_FIELD, data_fields)<|docstring|>loads training data from csv files
:param batch_size:
:param max_length:
:param device:
:param is_bert_model:
:param training_file_path:
:param dev_file_path:
:return:<|endoftext|>
|
701802d1dca43a05f55665110689940b150f0b62b38f322d2f4aeb5a753ed9db
|
def train_model(model_class, criterion, device, config: Configuration, skip=False, patience=5, task='classification', bert_preprocessing=True, margin=0.2, log_train_loss=False):
'\n trains a model using the provide configuration\n :param skip:\n :param model:\n :param config:\n :return:\n '
TRAINING_DATA_FILE = config.get_string(ModelParameter.TRAINING_FILE)
DEV_DATA_FILE = config.get_string(ModelParameter.DEV_FILE)
BATCH_SIZE = config.get_int(ModelParameter.BATCH_SIZE)
MAX_LENGTH = config.get_int(ModelParameter.MAX_LENGTH)
(train_iterator, dev_iterator, class_field, text_field, data_fields) = load_data(TRAINING_DATA_FILE, DEV_DATA_FILE, BATCH_SIZE, MAX_LENGTH, device=device, is_bert_model=bert_preprocessing)
EPOCHS = config.get_int(ModelParameter.EPOCHS)
model = model_class(config, (len(class_field.vocab.freqs) + 1), class_field=class_field, device=device).to(device)
LR = config.get_float(ModelParameter.LEARNING_RATE)
WEIGHT_DECAY = config.get_float(ModelParameter.WEIGHT_DECAY)
OPTIMIZER_NAME = config.get_string(ModelParameter.OPTIMIZER)
OPTIMIZER = get_optimizer(model, OPTIMIZER_NAME, **{'lr': LR, 'weight_decay': WEIGHT_DECAY})
if (not skip):
protocol = TrainingProtocol('../protocols/', model_class.__name__, ['map', 'acc'])
print('Starting training...')
max_acc_score = 0.0
early_stopping = EarlyStopping(max_acc_score, patience=patience)
for epoch in range(1, (EPOCHS + 1)):
print('\n', ('Epoch %i of %i' % (epoch, EPOCHS)), '\n')
model.train()
epoch_loss = 0.0
for batch in tqdm(train_iterator):
(loss, _) = get_loss(model, batch, task, criterion, device, margin=margin)
epoch_loss += loss.item()
if (loss.item() > 0):
OPTIMIZER.zero_grad()
loss.backward()
OPTIMIZER.step()
del loss
print(('Epoch Loss: %f' % epoch_loss))
(acc, map, dev_loss) = evaluate_epoch(model, dev_iterator, criterion, batch_size=BATCH_SIZE, device=device, task=task)
if (acc > max_acc_score):
max_acc_score = acc
save_snapshot(model, OPTIMIZER, epoch, epoch_loss, max_acc_score, '../snapshots')
if log_train_loss:
(train_acc, _, epoch_loss) = evaluate_epoch(model, dev_iterator, criterion, batch_size=BATCH_SIZE, device=device, task=task)
protocol.log_epoch(epoch, epoch_loss, dev_loss, map, acc)
del epoch_loss
if early_stopping.update(max_acc_score):
print('Stopping training...')
break
load_snapshot(model, directory='../snapshots')
return (model, data_fields, criterion)
|
trains a model using the provide configuration
:param skip:
:param model:
:param config:
:return:
|
utility/training.py
|
train_model
|
RobinRojowiec/intent-recognition-in-doctor-patient-interviews
| 0 |
python
|
def train_model(model_class, criterion, device, config: Configuration, skip=False, patience=5, task='classification', bert_preprocessing=True, margin=0.2, log_train_loss=False):
'\n trains a model using the provide configuration\n :param skip:\n :param model:\n :param config:\n :return:\n '
TRAINING_DATA_FILE = config.get_string(ModelParameter.TRAINING_FILE)
DEV_DATA_FILE = config.get_string(ModelParameter.DEV_FILE)
BATCH_SIZE = config.get_int(ModelParameter.BATCH_SIZE)
MAX_LENGTH = config.get_int(ModelParameter.MAX_LENGTH)
(train_iterator, dev_iterator, class_field, text_field, data_fields) = load_data(TRAINING_DATA_FILE, DEV_DATA_FILE, BATCH_SIZE, MAX_LENGTH, device=device, is_bert_model=bert_preprocessing)
EPOCHS = config.get_int(ModelParameter.EPOCHS)
model = model_class(config, (len(class_field.vocab.freqs) + 1), class_field=class_field, device=device).to(device)
LR = config.get_float(ModelParameter.LEARNING_RATE)
WEIGHT_DECAY = config.get_float(ModelParameter.WEIGHT_DECAY)
OPTIMIZER_NAME = config.get_string(ModelParameter.OPTIMIZER)
OPTIMIZER = get_optimizer(model, OPTIMIZER_NAME, **{'lr': LR, 'weight_decay': WEIGHT_DECAY})
if (not skip):
protocol = TrainingProtocol('../protocols/', model_class.__name__, ['map', 'acc'])
print('Starting training...')
max_acc_score = 0.0
early_stopping = EarlyStopping(max_acc_score, patience=patience)
for epoch in range(1, (EPOCHS + 1)):
print('\n', ('Epoch %i of %i' % (epoch, EPOCHS)), '\n')
model.train()
epoch_loss = 0.0
for batch in tqdm(train_iterator):
(loss, _) = get_loss(model, batch, task, criterion, device, margin=margin)
epoch_loss += loss.item()
if (loss.item() > 0):
OPTIMIZER.zero_grad()
loss.backward()
OPTIMIZER.step()
del loss
print(('Epoch Loss: %f' % epoch_loss))
(acc, map, dev_loss) = evaluate_epoch(model, dev_iterator, criterion, batch_size=BATCH_SIZE, device=device, task=task)
if (acc > max_acc_score):
max_acc_score = acc
save_snapshot(model, OPTIMIZER, epoch, epoch_loss, max_acc_score, '../snapshots')
if log_train_loss:
(train_acc, _, epoch_loss) = evaluate_epoch(model, dev_iterator, criterion, batch_size=BATCH_SIZE, device=device, task=task)
protocol.log_epoch(epoch, epoch_loss, dev_loss, map, acc)
del epoch_loss
if early_stopping.update(max_acc_score):
print('Stopping training...')
break
load_snapshot(model, directory='../snapshots')
return (model, data_fields, criterion)
|
def train_model(model_class, criterion, device, config: Configuration, skip=False, patience=5, task='classification', bert_preprocessing=True, margin=0.2, log_train_loss=False):
'\n trains a model using the provide configuration\n :param skip:\n :param model:\n :param config:\n :return:\n '
TRAINING_DATA_FILE = config.get_string(ModelParameter.TRAINING_FILE)
DEV_DATA_FILE = config.get_string(ModelParameter.DEV_FILE)
BATCH_SIZE = config.get_int(ModelParameter.BATCH_SIZE)
MAX_LENGTH = config.get_int(ModelParameter.MAX_LENGTH)
(train_iterator, dev_iterator, class_field, text_field, data_fields) = load_data(TRAINING_DATA_FILE, DEV_DATA_FILE, BATCH_SIZE, MAX_LENGTH, device=device, is_bert_model=bert_preprocessing)
EPOCHS = config.get_int(ModelParameter.EPOCHS)
model = model_class(config, (len(class_field.vocab.freqs) + 1), class_field=class_field, device=device).to(device)
LR = config.get_float(ModelParameter.LEARNING_RATE)
WEIGHT_DECAY = config.get_float(ModelParameter.WEIGHT_DECAY)
OPTIMIZER_NAME = config.get_string(ModelParameter.OPTIMIZER)
OPTIMIZER = get_optimizer(model, OPTIMIZER_NAME, **{'lr': LR, 'weight_decay': WEIGHT_DECAY})
if (not skip):
protocol = TrainingProtocol('../protocols/', model_class.__name__, ['map', 'acc'])
print('Starting training...')
max_acc_score = 0.0
early_stopping = EarlyStopping(max_acc_score, patience=patience)
for epoch in range(1, (EPOCHS + 1)):
print('\n', ('Epoch %i of %i' % (epoch, EPOCHS)), '\n')
model.train()
epoch_loss = 0.0
for batch in tqdm(train_iterator):
(loss, _) = get_loss(model, batch, task, criterion, device, margin=margin)
epoch_loss += loss.item()
if (loss.item() > 0):
OPTIMIZER.zero_grad()
loss.backward()
OPTIMIZER.step()
del loss
print(('Epoch Loss: %f' % epoch_loss))
(acc, map, dev_loss) = evaluate_epoch(model, dev_iterator, criterion, batch_size=BATCH_SIZE, device=device, task=task)
if (acc > max_acc_score):
max_acc_score = acc
save_snapshot(model, OPTIMIZER, epoch, epoch_loss, max_acc_score, '../snapshots')
if log_train_loss:
(train_acc, _, epoch_loss) = evaluate_epoch(model, dev_iterator, criterion, batch_size=BATCH_SIZE, device=device, task=task)
protocol.log_epoch(epoch, epoch_loss, dev_loss, map, acc)
del epoch_loss
if early_stopping.update(max_acc_score):
print('Stopping training...')
break
load_snapshot(model, directory='../snapshots')
return (model, data_fields, criterion)<|docstring|>trains a model using the provide configuration
:param skip:
:param model:
:param config:
:return:<|endoftext|>
|
d3f5e244e5d9153cbde1ae84f07842a459a449b43581fe9b82bb32ef1109dd52
|
def __init__(self, initial_scorce, patience):
'\n Stops the training once there is no significant improvement over the last epochs\n (patience = number of epochs to wait for improvement\n :param initial_scorce:\n :param patience:\n '
self.score = initial_scorce
self.patience = patience
self.epoch_counter = 0
|
Stops the training once there is no significant improvement over the last epochs
(patience = number of epochs to wait for improvement
:param initial_scorce:
:param patience:
|
utility/training.py
|
__init__
|
RobinRojowiec/intent-recognition-in-doctor-patient-interviews
| 0 |
python
|
def __init__(self, initial_scorce, patience):
'\n Stops the training once there is no significant improvement over the last epochs\n (patience = number of epochs to wait for improvement\n :param initial_scorce:\n :param patience:\n '
self.score = initial_scorce
self.patience = patience
self.epoch_counter = 0
|
def __init__(self, initial_scorce, patience):
'\n Stops the training once there is no significant improvement over the last epochs\n (patience = number of epochs to wait for improvement\n :param initial_scorce:\n :param patience:\n '
self.score = initial_scorce
self.patience = patience
self.epoch_counter = 0<|docstring|>Stops the training once there is no significant improvement over the last epochs
(patience = number of epochs to wait for improvement
:param initial_scorce:
:param patience:<|endoftext|>
|
5480844beebe63e84a1194998a876f9fdb01ec2c731ce11e35d78cb4cbb248b2
|
@functools.wraps(click.echo)
def echo(*args, **kwargs) -> None:
'\n Most of our outputs relate to significant changes to the state of a file\n system and/or a database, so we should print to `stderr` by default.\n '
kwargs.setdefault('file', sys.stderr)
return click.echo(*args, **kwargs)
|
Most of our outputs relate to significant changes to the state of a file
system and/or a database, so we should print to `stderr` by default.
|
flask_postgres/utils.py
|
echo
|
dwreeves/Flask-Postgres
| 23 |
python
|
@functools.wraps(click.echo)
def echo(*args, **kwargs) -> None:
'\n Most of our outputs relate to significant changes to the state of a file\n system and/or a database, so we should print to `stderr` by default.\n '
kwargs.setdefault('file', sys.stderr)
return click.echo(*args, **kwargs)
|
@functools.wraps(click.echo)
def echo(*args, **kwargs) -> None:
'\n Most of our outputs relate to significant changes to the state of a file\n system and/or a database, so we should print to `stderr` by default.\n '
kwargs.setdefault('file', sys.stderr)
return click.echo(*args, **kwargs)<|docstring|>Most of our outputs relate to significant changes to the state of a file
system and/or a database, so we should print to `stderr` by default.<|endoftext|>
|
0737ee0d449ed896ca6674153e4d957d11a2e29a56795f7dbd31591dbc957f4e
|
def raise_err_if_disallowed():
'\n You can protect your app against accidental or (some, but not much)\n malicious use in sensitive environments with the\n `FLASK_POSTGRES_CLI_DISALLOWED_ENVS` config variable.\n\n This is only enabled for the CLI.\n '
li = config.get('FLASK_POSTGRES_CLI_DISALLOWED_ENVS')
if isinstance(li, str):
li = li.split(';')
if (current_app.env in li):
raise EnvironmentNotAllowed
|
You can protect your app against accidental or (some, but not much)
malicious use in sensitive environments with the
`FLASK_POSTGRES_CLI_DISALLOWED_ENVS` config variable.
This is only enabled for the CLI.
|
flask_postgres/utils.py
|
raise_err_if_disallowed
|
dwreeves/Flask-Postgres
| 23 |
python
|
def raise_err_if_disallowed():
'\n You can protect your app against accidental or (some, but not much)\n malicious use in sensitive environments with the\n `FLASK_POSTGRES_CLI_DISALLOWED_ENVS` config variable.\n\n This is only enabled for the CLI.\n '
li = config.get('FLASK_POSTGRES_CLI_DISALLOWED_ENVS')
if isinstance(li, str):
li = li.split(';')
if (current_app.env in li):
raise EnvironmentNotAllowed
|
def raise_err_if_disallowed():
'\n You can protect your app against accidental or (some, but not much)\n malicious use in sensitive environments with the\n `FLASK_POSTGRES_CLI_DISALLOWED_ENVS` config variable.\n\n This is only enabled for the CLI.\n '
li = config.get('FLASK_POSTGRES_CLI_DISALLOWED_ENVS')
if isinstance(li, str):
li = li.split(';')
if (current_app.env in li):
raise EnvironmentNotAllowed<|docstring|>You can protect your app against accidental or (some, but not much)
malicious use in sensitive environments with the
`FLASK_POSTGRES_CLI_DISALLOWED_ENVS` config variable.
This is only enabled for the CLI.<|endoftext|>
|
1b41b5e54a05b636d97416c5e28e75ea839513fd53abca85cedc2ffd2f04157a
|
def validate(config):
'\n Validate the beacon configuration\n '
if (not isinstance(config, list)):
return (False, 'Configuration for diskusage beacon must be a list.')
return (True, 'Valid beacon configuration')
|
Validate the beacon configuration
|
salt/beacons/diskusage.py
|
validate
|
oldmantaiter/salt
| 12 |
python
|
def validate(config):
'\n \n '
if (not isinstance(config, list)):
return (False, 'Configuration for diskusage beacon must be a list.')
return (True, 'Valid beacon configuration')
|
def validate(config):
'\n \n '
if (not isinstance(config, list)):
return (False, 'Configuration for diskusage beacon must be a list.')
return (True, 'Valid beacon configuration')<|docstring|>Validate the beacon configuration<|endoftext|>
|
ff6e6e78a670082f1d2712d4fe937750fd1a049023a133176d3c78cc9b4883cf
|
def beacon(config):
'\n Monitor the disk usage of the minion\n\n Specify thresholds for each disk and only emit a beacon if any of them are\n exceeded.\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - /: 63%\n - /mnt/nfs: 50%\n\n Windows drives must be quoted to avoid yaml syntax errors\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - interval: 120\n - \'c:\\\': 90%\n - \'d:\\\': 50%\n\n Regular expressions can be used as mount points.\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - \'^\\/(?!home).*$\': 90%\n - \'^[a-zA-Z]:\\$\': 50%\n\n The first one will match all mounted disks beginning with "/", except /home\n The second one will match disks from A:\\ to Z:\\ on a Windows system\n\n Note that if a regular expression are evaluated after static mount points,\n which means that if a regular expression matches another defined mount point,\n it will override the previously defined threshold.\n\n '
parts = psutil.disk_partitions(all=False)
ret = []
for mounts in config:
mount = next(iter(mounts))
for part in parts:
if re.match(mount, part.mountpoint):
_mount = part.mountpoint
try:
_current_usage = psutil.disk_usage(mount)
except OSError:
log.warning('%s is not a valid mount point.', mount)
continue
current_usage = _current_usage.percent
monitor_usage = mounts[mount]
log.info('current_usage %s', current_usage)
if ('%' in monitor_usage):
monitor_usage = re.sub('%', '', monitor_usage)
monitor_usage = float(monitor_usage)
if (current_usage >= monitor_usage):
ret.append({'diskusage': current_usage, 'mount': _mount})
return ret
|
Monitor the disk usage of the minion
Specify thresholds for each disk and only emit a beacon if any of them are
exceeded.
.. code-block:: yaml
beacons:
diskusage:
- /: 63%
- /mnt/nfs: 50%
Windows drives must be quoted to avoid yaml syntax errors
.. code-block:: yaml
beacons:
diskusage:
- interval: 120
- 'c:\': 90%
- 'd:\': 50%
Regular expressions can be used as mount points.
.. code-block:: yaml
beacons:
diskusage:
- '^\/(?!home).*$': 90%
- '^[a-zA-Z]:\$': 50%
The first one will match all mounted disks beginning with "/", except /home
The second one will match disks from A:\ to Z:\ on a Windows system
Note that if a regular expression are evaluated after static mount points,
which means that if a regular expression matches another defined mount point,
it will override the previously defined threshold.
|
salt/beacons/diskusage.py
|
beacon
|
oldmantaiter/salt
| 12 |
python
|
def beacon(config):
'\n Monitor the disk usage of the minion\n\n Specify thresholds for each disk and only emit a beacon if any of them are\n exceeded.\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - /: 63%\n - /mnt/nfs: 50%\n\n Windows drives must be quoted to avoid yaml syntax errors\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - interval: 120\n - \'c:\\\': 90%\n - \'d:\\\': 50%\n\n Regular expressions can be used as mount points.\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - \'^\\/(?!home).*$\': 90%\n - \'^[a-zA-Z]:\\$\': 50%\n\n The first one will match all mounted disks beginning with "/", except /home\n The second one will match disks from A:\\ to Z:\\ on a Windows system\n\n Note that if a regular expression are evaluated after static mount points,\n which means that if a regular expression matches another defined mount point,\n it will override the previously defined threshold.\n\n '
parts = psutil.disk_partitions(all=False)
ret = []
for mounts in config:
mount = next(iter(mounts))
for part in parts:
if re.match(mount, part.mountpoint):
_mount = part.mountpoint
try:
_current_usage = psutil.disk_usage(mount)
except OSError:
log.warning('%s is not a valid mount point.', mount)
continue
current_usage = _current_usage.percent
monitor_usage = mounts[mount]
log.info('current_usage %s', current_usage)
if ('%' in monitor_usage):
monitor_usage = re.sub('%', , monitor_usage)
monitor_usage = float(monitor_usage)
if (current_usage >= monitor_usage):
ret.append({'diskusage': current_usage, 'mount': _mount})
return ret
|
def beacon(config):
'\n Monitor the disk usage of the minion\n\n Specify thresholds for each disk and only emit a beacon if any of them are\n exceeded.\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - /: 63%\n - /mnt/nfs: 50%\n\n Windows drives must be quoted to avoid yaml syntax errors\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - interval: 120\n - \'c:\\\': 90%\n - \'d:\\\': 50%\n\n Regular expressions can be used as mount points.\n\n .. code-block:: yaml\n\n beacons:\n diskusage:\n - \'^\\/(?!home).*$\': 90%\n - \'^[a-zA-Z]:\\$\': 50%\n\n The first one will match all mounted disks beginning with "/", except /home\n The second one will match disks from A:\\ to Z:\\ on a Windows system\n\n Note that if a regular expression are evaluated after static mount points,\n which means that if a regular expression matches another defined mount point,\n it will override the previously defined threshold.\n\n '
parts = psutil.disk_partitions(all=False)
ret = []
for mounts in config:
mount = next(iter(mounts))
for part in parts:
if re.match(mount, part.mountpoint):
_mount = part.mountpoint
try:
_current_usage = psutil.disk_usage(mount)
except OSError:
log.warning('%s is not a valid mount point.', mount)
continue
current_usage = _current_usage.percent
monitor_usage = mounts[mount]
log.info('current_usage %s', current_usage)
if ('%' in monitor_usage):
monitor_usage = re.sub('%', , monitor_usage)
monitor_usage = float(monitor_usage)
if (current_usage >= monitor_usage):
ret.append({'diskusage': current_usage, 'mount': _mount})
return ret<|docstring|>Monitor the disk usage of the minion
Specify thresholds for each disk and only emit a beacon if any of them are
exceeded.
.. code-block:: yaml
beacons:
diskusage:
- /: 63%
- /mnt/nfs: 50%
Windows drives must be quoted to avoid yaml syntax errors
.. code-block:: yaml
beacons:
diskusage:
- interval: 120
- 'c:\': 90%
- 'd:\': 50%
Regular expressions can be used as mount points.
.. code-block:: yaml
beacons:
diskusage:
- '^\/(?!home).*$': 90%
- '^[a-zA-Z]:\$': 50%
The first one will match all mounted disks beginning with "/", except /home
The second one will match disks from A:\ to Z:\ on a Windows system
Note that if a regular expression are evaluated after static mount points,
which means that if a regular expression matches another defined mount point,
it will override the previously defined threshold.<|endoftext|>
|
83ebbe27ae8f03bdf7e3846a169378a162c2f3d59fd3f32e8deb3a434c985e4a
|
@classmethod
def _get_secret_key(cls):
'\n Returns the secret key to be used for the API request\n '
return hypertrack.secret_key
|
Returns the secret key to be used for the API request
|
hypertrack/resource.py
|
_get_secret_key
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def _get_secret_key(cls):
'\n \n '
return hypertrack.secret_key
|
@classmethod
def _get_secret_key(cls):
'\n \n '
return hypertrack.secret_key<|docstring|>Returns the secret key to be used for the API request<|endoftext|>
|
ec9fb2564f11d895d464ab0884a2e3a14f87a3707e42fee01775a8ec9f08f07f
|
@classmethod
def _get_base_url(cls):
'\n Returns the base URL to be used for the API request\n '
return ((hypertrack.base_url + hypertrack.api_version) + '/')
|
Returns the base URL to be used for the API request
|
hypertrack/resource.py
|
_get_base_url
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def _get_base_url(cls):
'\n \n '
return ((hypertrack.base_url + hypertrack.api_version) + '/')
|
@classmethod
def _get_base_url(cls):
'\n \n '
return ((hypertrack.base_url + hypertrack.api_version) + '/')<|docstring|>Returns the base URL to be used for the API request<|endoftext|>
|
7c30cd457a8b5cd5e0d486072258d609990bd6cab1b1e188f499c6a93de8da5c
|
@classmethod
def _get_user_agent(cls):
'\n Returns user agent for the API request\n '
user_agent = 'HyperTrack/{api} PythonBindings/{version}'.format(api=hypertrack.api_version, version=version.VERSION)
return user_agent
|
Returns user agent for the API request
|
hypertrack/resource.py
|
_get_user_agent
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def _get_user_agent(cls):
'\n \n '
user_agent = 'HyperTrack/{api} PythonBindings/{version}'.format(api=hypertrack.api_version, version=version.VERSION)
return user_agent
|
@classmethod
def _get_user_agent(cls):
'\n \n '
user_agent = 'HyperTrack/{api} PythonBindings/{version}'.format(api=hypertrack.api_version, version=version.VERSION)
return user_agent<|docstring|>Returns user agent for the API request<|endoftext|>
|
409cd5b43a0a98ff1e23241f73e8a23949d74be45f9eae80d88b483b5db1166b
|
@classmethod
def _get_headers(cls, has_files=False):
'\n Returns headers for the API request\n '
headers = {'Authorization': ('token %s' % cls._get_secret_key()), 'User-Agent': cls._get_user_agent()}
if (not has_files):
headers['Content-Type'] = 'application/json'
return headers
|
Returns headers for the API request
|
hypertrack/resource.py
|
_get_headers
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def _get_headers(cls, has_files=False):
'\n \n '
headers = {'Authorization': ('token %s' % cls._get_secret_key()), 'User-Agent': cls._get_user_agent()}
if (not has_files):
headers['Content-Type'] = 'application/json'
return headers
|
@classmethod
def _get_headers(cls, has_files=False):
'\n \n '
headers = {'Authorization': ('token %s' % cls._get_secret_key()), 'User-Agent': cls._get_user_agent()}
if (not has_files):
headers['Content-Type'] = 'application/json'
return headers<|docstring|>Returns headers for the API request<|endoftext|>
|
7769bbc892f58a842611a9f123219d6d11988aa4d918e14650a1bac66ab80860
|
@classmethod
def _make_request(cls, method, url, data=None, params=None, files=None):
'\n Makes the network call to the API\n '
if (data and (not files)):
data = json.dumps(data)
if files:
headers = cls._get_headers(has_files=True)
else:
headers = cls._get_headers(has_files=False)
try:
resp = requests.request(method, url, headers=headers, data=data, params=params, files=files, timeout=20)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as excp:
msg = 'Unexpected error communicating with HyperTrack. If this problem persists, let us know at [email protected]. %s'
err = ('%s: %s' % (type(excp).__name__, str(excp)))
raise exceptions.APIConnectionException((msg % err))
if (not (200 <= resp.status_code < 300)):
cls._handle_api_error(resp)
return resp
|
Makes the network call to the API
|
hypertrack/resource.py
|
_make_request
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def _make_request(cls, method, url, data=None, params=None, files=None):
'\n \n '
if (data and (not files)):
data = json.dumps(data)
if files:
headers = cls._get_headers(has_files=True)
else:
headers = cls._get_headers(has_files=False)
try:
resp = requests.request(method, url, headers=headers, data=data, params=params, files=files, timeout=20)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as excp:
msg = 'Unexpected error communicating with HyperTrack. If this problem persists, let us know at [email protected]. %s'
err = ('%s: %s' % (type(excp).__name__, str(excp)))
raise exceptions.APIConnectionException((msg % err))
if (not (200 <= resp.status_code < 300)):
cls._handle_api_error(resp)
return resp
|
@classmethod
def _make_request(cls, method, url, data=None, params=None, files=None):
'\n \n '
if (data and (not files)):
data = json.dumps(data)
if files:
headers = cls._get_headers(has_files=True)
else:
headers = cls._get_headers(has_files=False)
try:
resp = requests.request(method, url, headers=headers, data=data, params=params, files=files, timeout=20)
except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as excp:
msg = 'Unexpected error communicating with HyperTrack. If this problem persists, let us know at [email protected]. %s'
err = ('%s: %s' % (type(excp).__name__, str(excp)))
raise exceptions.APIConnectionException((msg % err))
if (not (200 <= resp.status_code < 300)):
cls._handle_api_error(resp)
return resp<|docstring|>Makes the network call to the API<|endoftext|>
|
e4f8ae6933bef4c1b7b18cc455ac5a3f909e4866c58b7ef666efa01259e0f913
|
@classmethod
def _handle_api_error(cls, response):
'\n Raises appropriate exceptions for API errors\n '
if (response.status_code in [401, 403]):
raise exceptions.AuthenticationException(response.content, response.content, response.status_code, response.headers)
elif (response.status_code == 429):
raise exceptions.RateLimitException(response.content, response.content, response.status_code, response.headers)
elif (response.status_code in [400, 404]):
raise exceptions.InvalidRequestException(response.content, response.content, response.status_code, response.headers)
else:
raise exceptions.APIException(response.content, response.content, response.status_code, response.headers)
|
Raises appropriate exceptions for API errors
|
hypertrack/resource.py
|
_handle_api_error
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def _handle_api_error(cls, response):
'\n \n '
if (response.status_code in [401, 403]):
raise exceptions.AuthenticationException(response.content, response.content, response.status_code, response.headers)
elif (response.status_code == 429):
raise exceptions.RateLimitException(response.content, response.content, response.status_code, response.headers)
elif (response.status_code in [400, 404]):
raise exceptions.InvalidRequestException(response.content, response.content, response.status_code, response.headers)
else:
raise exceptions.APIException(response.content, response.content, response.status_code, response.headers)
|
@classmethod
def _handle_api_error(cls, response):
'\n \n '
if (response.status_code in [401, 403]):
raise exceptions.AuthenticationException(response.content, response.content, response.status_code, response.headers)
elif (response.status_code == 429):
raise exceptions.RateLimitException(response.content, response.content, response.status_code, response.headers)
elif (response.status_code in [400, 404]):
raise exceptions.InvalidRequestException(response.content, response.content, response.status_code, response.headers)
else:
raise exceptions.APIException(response.content, response.content, response.status_code, response.headers)<|docstring|>Raises appropriate exceptions for API errors<|endoftext|>
|
2abdff88ba89175af74b2f92400a60b191c7e59410c035c3d12e05a6bf02d8db
|
@classmethod
def get_class_url(cls):
'\n Returns the URI for the resource\n '
url = urlparse.urljoin(cls._get_base_url(), cls.resource_url)
return url
|
Returns the URI for the resource
|
hypertrack/resource.py
|
get_class_url
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def get_class_url(cls):
'\n \n '
url = urlparse.urljoin(cls._get_base_url(), cls.resource_url)
return url
|
@classmethod
def get_class_url(cls):
'\n \n '
url = urlparse.urljoin(cls._get_base_url(), cls.resource_url)
return url<|docstring|>Returns the URI for the resource<|endoftext|>
|
d8ca19587ee51bd08515f1048d4f2cf4924c270d151a6e847c727401c1256f6b
|
def get_instance_url(self):
'\n Returns the URI for the individual resource\n '
url = urlparse.urljoin(self._get_base_url(), '{resource_url}{resource_id}/'.format(resource_url=self.resource_url, resource_id=self.id))
return url
|
Returns the URI for the individual resource
|
hypertrack/resource.py
|
get_instance_url
|
Instawork/hypertrack-python
| 0 |
python
|
def get_instance_url(self):
'\n \n '
url = urlparse.urljoin(self._get_base_url(), '{resource_url}{resource_id}/'.format(resource_url=self.resource_url, resource_id=self.id))
return url
|
def get_instance_url(self):
'\n \n '
url = urlparse.urljoin(self._get_base_url(), '{resource_url}{resource_id}/'.format(resource_url=self.resource_url, resource_id=self.id))
return url<|docstring|>Returns the URI for the individual resource<|endoftext|>
|
e055dd079780b74f0f51266d340df350cd9c19877cc35af488805fd3f5622bb0
|
def __init__(self, object_class, **kwargs):
'\n Converts objects in the list to their resource class\n '
super(ListObject, self).__init__(**kwargs)
self._object_class = object_class
if self.get('results'):
self.results = [object_class(**obj) for obj in self.results]
else:
self.results = []
|
Converts objects in the list to their resource class
|
hypertrack/resource.py
|
__init__
|
Instawork/hypertrack-python
| 0 |
python
|
def __init__(self, object_class, **kwargs):
'\n \n '
super(ListObject, self).__init__(**kwargs)
self._object_class = object_class
if self.get('results'):
self.results = [object_class(**obj) for obj in self.results]
else:
self.results = []
|
def __init__(self, object_class, **kwargs):
'\n \n '
super(ListObject, self).__init__(**kwargs)
self._object_class = object_class
if self.get('results'):
self.results = [object_class(**obj) for obj in self.results]
else:
self.results = []<|docstring|>Converts objects in the list to their resource class<|endoftext|>
|
65f8acbc9aa07323cf56255faa1ff07853d566da1f0d4d7547d478d939300af1
|
def __iter__(self):
'\n Allow iteration over the resources in the list\n '
return getattr(self, 'results', []).__iter__()
|
Allow iteration over the resources in the list
|
hypertrack/resource.py
|
__iter__
|
Instawork/hypertrack-python
| 0 |
python
|
def __iter__(self):
'\n \n '
return getattr(self, 'results', []).__iter__()
|
def __iter__(self):
'\n \n '
return getattr(self, 'results', []).__iter__()<|docstring|>Allow iteration over the resources in the list<|endoftext|>
|
c1e75e542ea5d353b1e1fd5fc379e248289c3ee35b736b8afdb601979f846582
|
def list(self, **params):
'\n Mixin method to list the resources from the API\n '
url = self.get_class_url()
resp = self._make_request('get', url, params=params)
return ListObject(self._object_class, **resp.json())
|
Mixin method to list the resources from the API
|
hypertrack/resource.py
|
list
|
Instawork/hypertrack-python
| 0 |
python
|
def list(self, **params):
'\n \n '
url = self.get_class_url()
resp = self._make_request('get', url, params=params)
return ListObject(self._object_class, **resp.json())
|
def list(self, **params):
'\n \n '
url = self.get_class_url()
resp = self._make_request('get', url, params=params)
return ListObject(self._object_class, **resp.json())<|docstring|>Mixin method to list the resources from the API<|endoftext|>
|
4eb96892218c0e1b9135895c0bd6d6e205f9a03b97763efb33b8e7f6f965204e
|
def next_page(self):
'\n Returns a list object for the next page\n '
if self.get('next'):
querystring = urlparse.urlparse(self.next).query
params = dict(urlparse.parse_qsl(querystring))
return self.list(**params)
else:
return self.__class__(self._object_class)
|
Returns a list object for the next page
|
hypertrack/resource.py
|
next_page
|
Instawork/hypertrack-python
| 0 |
python
|
def next_page(self):
'\n \n '
if self.get('next'):
querystring = urlparse.urlparse(self.next).query
params = dict(urlparse.parse_qsl(querystring))
return self.list(**params)
else:
return self.__class__(self._object_class)
|
def next_page(self):
'\n \n '
if self.get('next'):
querystring = urlparse.urlparse(self.next).query
params = dict(urlparse.parse_qsl(querystring))
return self.list(**params)
else:
return self.__class__(self._object_class)<|docstring|>Returns a list object for the next page<|endoftext|>
|
5b6160660cc8d9c4e6f5be3d8a6ec46352123aa92de5ed60d66792e56a5badba
|
def previous_page(self):
'\n Returns a list object for the previous page\n '
if self.get('previous'):
querystring = urlparse.urlparse(self.previous).query
params = dict(urlparse.parse_qsl(querystring))
return self.list(**params)
else:
return self.__class__(self._object_class)
|
Returns a list object for the previous page
|
hypertrack/resource.py
|
previous_page
|
Instawork/hypertrack-python
| 0 |
python
|
def previous_page(self):
'\n \n '
if self.get('previous'):
querystring = urlparse.urlparse(self.previous).query
params = dict(urlparse.parse_qsl(querystring))
return self.list(**params)
else:
return self.__class__(self._object_class)
|
def previous_page(self):
'\n \n '
if self.get('previous'):
querystring = urlparse.urlparse(self.previous).query
params = dict(urlparse.parse_qsl(querystring))
return self.list(**params)
else:
return self.__class__(self._object_class)<|docstring|>Returns a list object for the previous page<|endoftext|>
|
86a532bb676204f2c5c402cb6892ebd0e4229771611d72014f40709767e0eae2
|
@classmethod
def create(cls, files=None, **data):
'\n Mixin method to create the resource on the API\n '
url = cls.get_class_url()
resp = cls._make_request('post', url, data=data, files=files)
return cls(**resp.json())
|
Mixin method to create the resource on the API
|
hypertrack/resource.py
|
create
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def create(cls, files=None, **data):
'\n \n '
url = cls.get_class_url()
resp = cls._make_request('post', url, data=data, files=files)
return cls(**resp.json())
|
@classmethod
def create(cls, files=None, **data):
'\n \n '
url = cls.get_class_url()
resp = cls._make_request('post', url, data=data, files=files)
return cls(**resp.json())<|docstring|>Mixin method to create the resource on the API<|endoftext|>
|
0264d99ea3e4c9afd7674f8b4c7f5a4beffade89decd5cfe4933b058001977e6
|
@classmethod
def retrieve(cls, hypertrack_id):
'\n Mixin method to retrieve the resource from the API\n '
url = urlparse.urljoin(cls._get_base_url(), '{resource_url}{resource_id}/'.format(resource_url=cls.resource_url, resource_id=hypertrack_id))
resp = cls._make_request('get', url)
return cls(**resp.json())
|
Mixin method to retrieve the resource from the API
|
hypertrack/resource.py
|
retrieve
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def retrieve(cls, hypertrack_id):
'\n \n '
url = urlparse.urljoin(cls._get_base_url(), '{resource_url}{resource_id}/'.format(resource_url=cls.resource_url, resource_id=hypertrack_id))
resp = cls._make_request('get', url)
return cls(**resp.json())
|
@classmethod
def retrieve(cls, hypertrack_id):
'\n \n '
url = urlparse.urljoin(cls._get_base_url(), '{resource_url}{resource_id}/'.format(resource_url=cls.resource_url, resource_id=hypertrack_id))
resp = cls._make_request('get', url)
return cls(**resp.json())<|docstring|>Mixin method to retrieve the resource from the API<|endoftext|>
|
09982c7683fe7e337a2cadeeb2491c62a8ccf447b9529b29ba502ffc2d495fbc
|
@classmethod
def list(cls, **params):
'\n Mixin method to list the resources from the API\n '
url = cls.get_class_url()
resp = cls._make_request('get', url, params=params)
return ListObject(cls, **resp.json())
|
Mixin method to list the resources from the API
|
hypertrack/resource.py
|
list
|
Instawork/hypertrack-python
| 0 |
python
|
@classmethod
def list(cls, **params):
'\n \n '
url = cls.get_class_url()
resp = cls._make_request('get', url, params=params)
return ListObject(cls, **resp.json())
|
@classmethod
def list(cls, **params):
'\n \n '
url = cls.get_class_url()
resp = cls._make_request('get', url, params=params)
return ListObject(cls, **resp.json())<|docstring|>Mixin method to list the resources from the API<|endoftext|>
|
c9abb396a5516fc37d0e7cf00b39755341e48eedbeafce29b8e931d35624da8c
|
def save(self, files=None):
'\n Mixin method to update the resource on the API\n '
url = self.get_instance_url()
data = dict([(k, getattr(self, k)) for k in self._unsaved_keys])
resp = self._make_request('patch', url, data=data, files=files)
self._unsaved_keys = set()
return self.__class__(**resp.json())
|
Mixin method to update the resource on the API
|
hypertrack/resource.py
|
save
|
Instawork/hypertrack-python
| 0 |
python
|
def save(self, files=None):
'\n \n '
url = self.get_instance_url()
data = dict([(k, getattr(self, k)) for k in self._unsaved_keys])
resp = self._make_request('patch', url, data=data, files=files)
self._unsaved_keys = set()
return self.__class__(**resp.json())
|
def save(self, files=None):
'\n \n '
url = self.get_instance_url()
data = dict([(k, getattr(self, k)) for k in self._unsaved_keys])
resp = self._make_request('patch', url, data=data, files=files)
self._unsaved_keys = set()
return self.__class__(**resp.json())<|docstring|>Mixin method to update the resource on the API<|endoftext|>
|
9db6d5265318b7d72fda6a23e2c0c675faa41f6f214fdc15ba0b6faf5bd296a6
|
def delete(self):
'\n Mixin method to update the resource on the API\n '
url = self.get_instance_url()
self._make_request('delete', url)
return self
|
Mixin method to update the resource on the API
|
hypertrack/resource.py
|
delete
|
Instawork/hypertrack-python
| 0 |
python
|
def delete(self):
'\n \n '
url = self.get_instance_url()
self._make_request('delete', url)
return self
|
def delete(self):
'\n \n '
url = self.get_instance_url()
self._make_request('delete', url)
return self<|docstring|>Mixin method to update the resource on the API<|endoftext|>
|
1d2eb5d982cb44d9f1110a85ad32a6387dcd4bee7344ec1692362c11c85ad7b5
|
def __init__(self, id=None, notification_id=None, email=None, created=None, modified=None):
'NotificationRecipient - a model defined in Swagger'
self._id = None
self._notification_id = None
self._email = None
self._created = None
self._modified = None
self.discriminator = None
if (id is not None):
self.id = id
if (notification_id is not None):
self.notification_id = notification_id
if (email is not None):
self.email = email
if (created is not None):
self.created = created
if (modified is not None):
self.modified = modified
|
NotificationRecipient - a model defined in Swagger
|
exavault/models/notification_recipient.py
|
__init__
|
ExaVault/evapi-python
| 0 |
python
|
def __init__(self, id=None, notification_id=None, email=None, created=None, modified=None):
self._id = None
self._notification_id = None
self._email = None
self._created = None
self._modified = None
self.discriminator = None
if (id is not None):
self.id = id
if (notification_id is not None):
self.notification_id = notification_id
if (email is not None):
self.email = email
if (created is not None):
self.created = created
if (modified is not None):
self.modified = modified
|
def __init__(self, id=None, notification_id=None, email=None, created=None, modified=None):
self._id = None
self._notification_id = None
self._email = None
self._created = None
self._modified = None
self.discriminator = None
if (id is not None):
self.id = id
if (notification_id is not None):
self.notification_id = notification_id
if (email is not None):
self.email = email
if (created is not None):
self.created = created
if (modified is not None):
self.modified = modified<|docstring|>NotificationRecipient - a model defined in Swagger<|endoftext|>
|
94334c0ed6b83991dc8bf5d5f15d86ccefdb85ef04fdbfd49a206c404530c510
|
@property
def id(self):
'Gets the id of this NotificationRecipient. # noqa: E501\n\n ID of the recipient. # noqa: E501\n\n :return: The id of this NotificationRecipient. # noqa: E501\n :rtype: int\n '
return self._id
|
Gets the id of this NotificationRecipient. # noqa: E501
ID of the recipient. # noqa: E501
:return: The id of this NotificationRecipient. # noqa: E501
:rtype: int
|
exavault/models/notification_recipient.py
|
id
|
ExaVault/evapi-python
| 0 |
python
|
@property
def id(self):
'Gets the id of this NotificationRecipient. # noqa: E501\n\n ID of the recipient. # noqa: E501\n\n :return: The id of this NotificationRecipient. # noqa: E501\n :rtype: int\n '
return self._id
|
@property
def id(self):
'Gets the id of this NotificationRecipient. # noqa: E501\n\n ID of the recipient. # noqa: E501\n\n :return: The id of this NotificationRecipient. # noqa: E501\n :rtype: int\n '
return self._id<|docstring|>Gets the id of this NotificationRecipient. # noqa: E501
ID of the recipient. # noqa: E501
:return: The id of this NotificationRecipient. # noqa: E501
:rtype: int<|endoftext|>
|
4e1a5d02a8215faea66d62680f774ca7fc370410620b5e0ecfd337e1a81a06d2
|
@id.setter
def id(self, id):
'Sets the id of this NotificationRecipient.\n\n ID of the recipient. # noqa: E501\n\n :param id: The id of this NotificationRecipient. # noqa: E501\n :type: int\n '
self._id = id
|
Sets the id of this NotificationRecipient.
ID of the recipient. # noqa: E501
:param id: The id of this NotificationRecipient. # noqa: E501
:type: int
|
exavault/models/notification_recipient.py
|
id
|
ExaVault/evapi-python
| 0 |
python
|
@id.setter
def id(self, id):
'Sets the id of this NotificationRecipient.\n\n ID of the recipient. # noqa: E501\n\n :param id: The id of this NotificationRecipient. # noqa: E501\n :type: int\n '
self._id = id
|
@id.setter
def id(self, id):
'Sets the id of this NotificationRecipient.\n\n ID of the recipient. # noqa: E501\n\n :param id: The id of this NotificationRecipient. # noqa: E501\n :type: int\n '
self._id = id<|docstring|>Sets the id of this NotificationRecipient.
ID of the recipient. # noqa: E501
:param id: The id of this NotificationRecipient. # noqa: E501
:type: int<|endoftext|>
|
f4333d17d07b96eac6269725f2367cef637b58786b385227571ed1f81018794b
|
@property
def notification_id(self):
'Gets the notification_id of this NotificationRecipient. # noqa: E501\n\n ID of the notification that the recipient belongs to. # noqa: E501\n\n :return: The notification_id of this NotificationRecipient. # noqa: E501\n :rtype: int\n '
return self._notification_id
|
Gets the notification_id of this NotificationRecipient. # noqa: E501
ID of the notification that the recipient belongs to. # noqa: E501
:return: The notification_id of this NotificationRecipient. # noqa: E501
:rtype: int
|
exavault/models/notification_recipient.py
|
notification_id
|
ExaVault/evapi-python
| 0 |
python
|
@property
def notification_id(self):
'Gets the notification_id of this NotificationRecipient. # noqa: E501\n\n ID of the notification that the recipient belongs to. # noqa: E501\n\n :return: The notification_id of this NotificationRecipient. # noqa: E501\n :rtype: int\n '
return self._notification_id
|
@property
def notification_id(self):
'Gets the notification_id of this NotificationRecipient. # noqa: E501\n\n ID of the notification that the recipient belongs to. # noqa: E501\n\n :return: The notification_id of this NotificationRecipient. # noqa: E501\n :rtype: int\n '
return self._notification_id<|docstring|>Gets the notification_id of this NotificationRecipient. # noqa: E501
ID of the notification that the recipient belongs to. # noqa: E501
:return: The notification_id of this NotificationRecipient. # noqa: E501
:rtype: int<|endoftext|>
|
5b1dbbd1fd45c09aa72d23e4dbe09a36bc64df738686a7271bc84439a4fb4d84
|
@notification_id.setter
def notification_id(self, notification_id):
'Sets the notification_id of this NotificationRecipient.\n\n ID of the notification that the recipient belongs to. # noqa: E501\n\n :param notification_id: The notification_id of this NotificationRecipient. # noqa: E501\n :type: int\n '
self._notification_id = notification_id
|
Sets the notification_id of this NotificationRecipient.
ID of the notification that the recipient belongs to. # noqa: E501
:param notification_id: The notification_id of this NotificationRecipient. # noqa: E501
:type: int
|
exavault/models/notification_recipient.py
|
notification_id
|
ExaVault/evapi-python
| 0 |
python
|
@notification_id.setter
def notification_id(self, notification_id):
'Sets the notification_id of this NotificationRecipient.\n\n ID of the notification that the recipient belongs to. # noqa: E501\n\n :param notification_id: The notification_id of this NotificationRecipient. # noqa: E501\n :type: int\n '
self._notification_id = notification_id
|
@notification_id.setter
def notification_id(self, notification_id):
'Sets the notification_id of this NotificationRecipient.\n\n ID of the notification that the recipient belongs to. # noqa: E501\n\n :param notification_id: The notification_id of this NotificationRecipient. # noqa: E501\n :type: int\n '
self._notification_id = notification_id<|docstring|>Sets the notification_id of this NotificationRecipient.
ID of the notification that the recipient belongs to. # noqa: E501
:param notification_id: The notification_id of this NotificationRecipient. # noqa: E501
:type: int<|endoftext|>
|
d6d51b579321b18f52cb9e38621ae7eb2075182c52fd0eeaac34bf3db2045165
|
@property
def email(self):
'Gets the email of this NotificationRecipient. # noqa: E501\n\n Recipient email. # noqa: E501\n\n :return: The email of this NotificationRecipient. # noqa: E501\n :rtype: str\n '
return self._email
|
Gets the email of this NotificationRecipient. # noqa: E501
Recipient email. # noqa: E501
:return: The email of this NotificationRecipient. # noqa: E501
:rtype: str
|
exavault/models/notification_recipient.py
|
email
|
ExaVault/evapi-python
| 0 |
python
|
@property
def email(self):
'Gets the email of this NotificationRecipient. # noqa: E501\n\n Recipient email. # noqa: E501\n\n :return: The email of this NotificationRecipient. # noqa: E501\n :rtype: str\n '
return self._email
|
@property
def email(self):
'Gets the email of this NotificationRecipient. # noqa: E501\n\n Recipient email. # noqa: E501\n\n :return: The email of this NotificationRecipient. # noqa: E501\n :rtype: str\n '
return self._email<|docstring|>Gets the email of this NotificationRecipient. # noqa: E501
Recipient email. # noqa: E501
:return: The email of this NotificationRecipient. # noqa: E501
:rtype: str<|endoftext|>
|
693555572cfcbb9f16885f955efe1575bf7d8c9018dee77d0c283f952d3b5f0d
|
@email.setter
def email(self, email):
'Sets the email of this NotificationRecipient.\n\n Recipient email. # noqa: E501\n\n :param email: The email of this NotificationRecipient. # noqa: E501\n :type: str\n '
self._email = email
|
Sets the email of this NotificationRecipient.
Recipient email. # noqa: E501
:param email: The email of this NotificationRecipient. # noqa: E501
:type: str
|
exavault/models/notification_recipient.py
|
email
|
ExaVault/evapi-python
| 0 |
python
|
@email.setter
def email(self, email):
'Sets the email of this NotificationRecipient.\n\n Recipient email. # noqa: E501\n\n :param email: The email of this NotificationRecipient. # noqa: E501\n :type: str\n '
self._email = email
|
@email.setter
def email(self, email):
'Sets the email of this NotificationRecipient.\n\n Recipient email. # noqa: E501\n\n :param email: The email of this NotificationRecipient. # noqa: E501\n :type: str\n '
self._email = email<|docstring|>Sets the email of this NotificationRecipient.
Recipient email. # noqa: E501
:param email: The email of this NotificationRecipient. # noqa: E501
:type: str<|endoftext|>
|
1f9d879c7037cbcb8d519d079a2950cb89d15c67355ba711e5c431d6840153a7
|
@property
def created(self):
'Gets the created of this NotificationRecipient. # noqa: E501\n\n Timestamp of adding notification recipient. # noqa: E501\n\n :return: The created of this NotificationRecipient. # noqa: E501\n :rtype: datetime\n '
return self._created
|
Gets the created of this NotificationRecipient. # noqa: E501
Timestamp of adding notification recipient. # noqa: E501
:return: The created of this NotificationRecipient. # noqa: E501
:rtype: datetime
|
exavault/models/notification_recipient.py
|
created
|
ExaVault/evapi-python
| 0 |
python
|
@property
def created(self):
'Gets the created of this NotificationRecipient. # noqa: E501\n\n Timestamp of adding notification recipient. # noqa: E501\n\n :return: The created of this NotificationRecipient. # noqa: E501\n :rtype: datetime\n '
return self._created
|
@property
def created(self):
'Gets the created of this NotificationRecipient. # noqa: E501\n\n Timestamp of adding notification recipient. # noqa: E501\n\n :return: The created of this NotificationRecipient. # noqa: E501\n :rtype: datetime\n '
return self._created<|docstring|>Gets the created of this NotificationRecipient. # noqa: E501
Timestamp of adding notification recipient. # noqa: E501
:return: The created of this NotificationRecipient. # noqa: E501
:rtype: datetime<|endoftext|>
|
d35a5bc153092fd5a2fb1cda38f50d440109dc431ada1dba3e4020a3c63a9beb
|
@created.setter
def created(self, created):
'Sets the created of this NotificationRecipient.\n\n Timestamp of adding notification recipient. # noqa: E501\n\n :param created: The created of this NotificationRecipient. # noqa: E501\n :type: datetime\n '
self._created = created
|
Sets the created of this NotificationRecipient.
Timestamp of adding notification recipient. # noqa: E501
:param created: The created of this NotificationRecipient. # noqa: E501
:type: datetime
|
exavault/models/notification_recipient.py
|
created
|
ExaVault/evapi-python
| 0 |
python
|
@created.setter
def created(self, created):
'Sets the created of this NotificationRecipient.\n\n Timestamp of adding notification recipient. # noqa: E501\n\n :param created: The created of this NotificationRecipient. # noqa: E501\n :type: datetime\n '
self._created = created
|
@created.setter
def created(self, created):
'Sets the created of this NotificationRecipient.\n\n Timestamp of adding notification recipient. # noqa: E501\n\n :param created: The created of this NotificationRecipient. # noqa: E501\n :type: datetime\n '
self._created = created<|docstring|>Sets the created of this NotificationRecipient.
Timestamp of adding notification recipient. # noqa: E501
:param created: The created of this NotificationRecipient. # noqa: E501
:type: datetime<|endoftext|>
|
f6c623574577d91dafe319afb3fcda5eff1575c3ac9fa0d39f9a36feb4739a69
|
@property
def modified(self):
'Gets the modified of this NotificationRecipient. # noqa: E501\n\n Timestamp of notification recipient modification. # noqa: E501\n\n :return: The modified of this NotificationRecipient. # noqa: E501\n :rtype: datetime\n '
return self._modified
|
Gets the modified of this NotificationRecipient. # noqa: E501
Timestamp of notification recipient modification. # noqa: E501
:return: The modified of this NotificationRecipient. # noqa: E501
:rtype: datetime
|
exavault/models/notification_recipient.py
|
modified
|
ExaVault/evapi-python
| 0 |
python
|
@property
def modified(self):
'Gets the modified of this NotificationRecipient. # noqa: E501\n\n Timestamp of notification recipient modification. # noqa: E501\n\n :return: The modified of this NotificationRecipient. # noqa: E501\n :rtype: datetime\n '
return self._modified
|
@property
def modified(self):
'Gets the modified of this NotificationRecipient. # noqa: E501\n\n Timestamp of notification recipient modification. # noqa: E501\n\n :return: The modified of this NotificationRecipient. # noqa: E501\n :rtype: datetime\n '
return self._modified<|docstring|>Gets the modified of this NotificationRecipient. # noqa: E501
Timestamp of notification recipient modification. # noqa: E501
:return: The modified of this NotificationRecipient. # noqa: E501
:rtype: datetime<|endoftext|>
|
107d295b5dbee870c81860e141d8c6df29f2ebb7ef136c924404cc450d106be4
|
@modified.setter
def modified(self, modified):
'Sets the modified of this NotificationRecipient.\n\n Timestamp of notification recipient modification. # noqa: E501\n\n :param modified: The modified of this NotificationRecipient. # noqa: E501\n :type: datetime\n '
self._modified = modified
|
Sets the modified of this NotificationRecipient.
Timestamp of notification recipient modification. # noqa: E501
:param modified: The modified of this NotificationRecipient. # noqa: E501
:type: datetime
|
exavault/models/notification_recipient.py
|
modified
|
ExaVault/evapi-python
| 0 |
python
|
@modified.setter
def modified(self, modified):
'Sets the modified of this NotificationRecipient.\n\n Timestamp of notification recipient modification. # noqa: E501\n\n :param modified: The modified of this NotificationRecipient. # noqa: E501\n :type: datetime\n '
self._modified = modified
|
@modified.setter
def modified(self, modified):
'Sets the modified of this NotificationRecipient.\n\n Timestamp of notification recipient modification. # noqa: E501\n\n :param modified: The modified of this NotificationRecipient. # noqa: E501\n :type: datetime\n '
self._modified = modified<|docstring|>Sets the modified of this NotificationRecipient.
Timestamp of notification recipient modification. # noqa: E501
:param modified: The modified of this NotificationRecipient. # noqa: E501
:type: datetime<|endoftext|>
|
3f745e0026807be9d19faf7b67794aeab2722ed4315b714e68f77ab3ffa84849
|
def to_dict(self):
'Returns the model properties as a dict'
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(NotificationRecipient, dict):
for (key, value) in self.items():
result[key] = value
return result
|
Returns the model properties as a dict
|
exavault/models/notification_recipient.py
|
to_dict
|
ExaVault/evapi-python
| 0 |
python
|
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(NotificationRecipient, dict):
for (key, value) in self.items():
result[key] = value
return result
|
def to_dict(self):
result = {}
for (attr, _) in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map((lambda x: (x.to_dict() if hasattr(x, 'to_dict') else x)), value))
elif hasattr(value, 'to_dict'):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map((lambda item: ((item[0], item[1].to_dict()) if hasattr(item[1], 'to_dict') else item)), value.items()))
else:
result[attr] = value
if issubclass(NotificationRecipient, dict):
for (key, value) in self.items():
result[key] = value
return result<|docstring|>Returns the model properties as a dict<|endoftext|>
|
cbb19eaa2fc8a113d9e32f924ef280a7e97563f8915f94f65dab438997af2e99
|
def to_str(self):
'Returns the string representation of the model'
return pprint.pformat(self.to_dict())
|
Returns the string representation of the model
|
exavault/models/notification_recipient.py
|
to_str
|
ExaVault/evapi-python
| 0 |
python
|
def to_str(self):
return pprint.pformat(self.to_dict())
|
def to_str(self):
return pprint.pformat(self.to_dict())<|docstring|>Returns the string representation of the model<|endoftext|>
|
772243a2c2b3261a9b954d07aaf295e3c1242a579a495e2d6a5679c677861703
|
def __repr__(self):
'For `print` and `pprint`'
return self.to_str()
|
For `print` and `pprint`
|
exavault/models/notification_recipient.py
|
__repr__
|
ExaVault/evapi-python
| 0 |
python
|
def __repr__(self):
return self.to_str()
|
def __repr__(self):
return self.to_str()<|docstring|>For `print` and `pprint`<|endoftext|>
|
e6a2c3acca138354e64f3bc2bbff991819a7a8289f678f1d18721a0da766636e
|
def __eq__(self, other):
'Returns true if both objects are equal'
if (not isinstance(other, NotificationRecipient)):
return False
return (self.__dict__ == other.__dict__)
|
Returns true if both objects are equal
|
exavault/models/notification_recipient.py
|
__eq__
|
ExaVault/evapi-python
| 0 |
python
|
def __eq__(self, other):
if (not isinstance(other, NotificationRecipient)):
return False
return (self.__dict__ == other.__dict__)
|
def __eq__(self, other):
if (not isinstance(other, NotificationRecipient)):
return False
return (self.__dict__ == other.__dict__)<|docstring|>Returns true if both objects are equal<|endoftext|>
|
43dc6740163eb9fc1161d09cb2208a64c7ad0cc8d9c8637ac3264522d3ec7e42
|
def __ne__(self, other):
'Returns true if both objects are not equal'
return (not (self == other))
|
Returns true if both objects are not equal
|
exavault/models/notification_recipient.py
|
__ne__
|
ExaVault/evapi-python
| 0 |
python
|
def __ne__(self, other):
return (not (self == other))
|
def __ne__(self, other):
return (not (self == other))<|docstring|>Returns true if both objects are not equal<|endoftext|>
|
337206defd6f75d0dcbdb9e36c6e86ed984904baafa12dfffec3e84afa481c75
|
def get_classification(self, image):
'Determines the color of the traffic light in the image\n\n Args:\n image (cv::Mat): image containing the traffic light\n\n Returns:\n int: ID of traffic light color (specified in styx_msgs/TrafficLight)\n\n '
image = cv2.resize(image, (384, 288))
images = np.expand_dims(image, 0)
ret = self.sess.run(self.fetch_dict, {self.input_node: images})
state = ret['light_state']
if (state in self.light_map):
return self.light_map[state]
else:
return TrafficLight.UNKNOWN
|
Determines the color of the traffic light in the image
Args:
image (cv::Mat): image containing the traffic light
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)
|
ros/src/tl_detector/light_classification/tl_classifier.py
|
get_classification
|
zfsang/udsdc_p9_system_integration
| 0 |
python
|
def get_classification(self, image):
'Determines the color of the traffic light in the image\n\n Args:\n image (cv::Mat): image containing the traffic light\n\n Returns:\n int: ID of traffic light color (specified in styx_msgs/TrafficLight)\n\n '
image = cv2.resize(image, (384, 288))
images = np.expand_dims(image, 0)
ret = self.sess.run(self.fetch_dict, {self.input_node: images})
state = ret['light_state']
if (state in self.light_map):
return self.light_map[state]
else:
return TrafficLight.UNKNOWN
|
def get_classification(self, image):
'Determines the color of the traffic light in the image\n\n Args:\n image (cv::Mat): image containing the traffic light\n\n Returns:\n int: ID of traffic light color (specified in styx_msgs/TrafficLight)\n\n '
image = cv2.resize(image, (384, 288))
images = np.expand_dims(image, 0)
ret = self.sess.run(self.fetch_dict, {self.input_node: images})
state = ret['light_state']
if (state in self.light_map):
return self.light_map[state]
else:
return TrafficLight.UNKNOWN<|docstring|>Determines the color of the traffic light in the image
Args:
image (cv::Mat): image containing the traffic light
Returns:
int: ID of traffic light color (specified in styx_msgs/TrafficLight)<|endoftext|>
|
eb816dcb72e48d20efae1cf872b91d73cb4c17bfc105983974eabf07758a47fd
|
def __init__(self, app, nworkers):
"Start the job runner with 'nworkers' worker threads"
super().__init__(app, nworkers)
self._init_worker_threads()
|
Start the job runner with 'nworkers' worker threads
|
lib/galaxy/jobs/runners/tasks.py
|
__init__
|
knutwa-ext/galaxy
| 1,085 |
python
|
def __init__(self, app, nworkers):
super().__init__(app, nworkers)
self._init_worker_threads()
|
def __init__(self, app, nworkers):
super().__init__(app, nworkers)
self._init_worker_threads()<|docstring|>Start the job runner with 'nworkers' worker threads<|endoftext|>
|
fa24f8a6a759725f4784f277e45012765496989f2fae50d3ca545ffe36c8d5b6
|
def _cancel_job(self, job_wrapper, task_wrappers):
"\n Cancel the given job. The job's state will be set to ERROR.\n Any running tasks will be cancelled, and any queued/pending\n tasks will be marked as DELETED so that runners know not\n to run those tasks.\n "
job = job_wrapper.get_job()
job.set_state(model.Job.states.ERROR)
for task_wrapper in task_wrappers:
task = task_wrapper.get_task()
task_state = task.get_state()
if (model.Task.states.QUEUED == task_state):
log.debug(('_cancel_job for job %d: Task %d is not running; setting state to DELETED' % (job.id, task.id)))
task_wrapper.change_state(task.states.DELETED)
sleep(5)
for task_wrapper in task_wrappers:
if (model.Task.states.RUNNING == task_wrapper.get_state()):
task = task_wrapper.get_task()
log.debug(('_cancel_job for job %d: Stopping running task %d' % (job.id, task.id)))
job_wrapper.app.job_manager.job_handler.dispatcher.stop(task)
|
Cancel the given job. The job's state will be set to ERROR.
Any running tasks will be cancelled, and any queued/pending
tasks will be marked as DELETED so that runners know not
to run those tasks.
|
lib/galaxy/jobs/runners/tasks.py
|
_cancel_job
|
knutwa-ext/galaxy
| 1,085 |
python
|
def _cancel_job(self, job_wrapper, task_wrappers):
"\n Cancel the given job. The job's state will be set to ERROR.\n Any running tasks will be cancelled, and any queued/pending\n tasks will be marked as DELETED so that runners know not\n to run those tasks.\n "
job = job_wrapper.get_job()
job.set_state(model.Job.states.ERROR)
for task_wrapper in task_wrappers:
task = task_wrapper.get_task()
task_state = task.get_state()
if (model.Task.states.QUEUED == task_state):
log.debug(('_cancel_job for job %d: Task %d is not running; setting state to DELETED' % (job.id, task.id)))
task_wrapper.change_state(task.states.DELETED)
sleep(5)
for task_wrapper in task_wrappers:
if (model.Task.states.RUNNING == task_wrapper.get_state()):
task = task_wrapper.get_task()
log.debug(('_cancel_job for job %d: Stopping running task %d' % (job.id, task.id)))
job_wrapper.app.job_manager.job_handler.dispatcher.stop(task)
|
def _cancel_job(self, job_wrapper, task_wrappers):
"\n Cancel the given job. The job's state will be set to ERROR.\n Any running tasks will be cancelled, and any queued/pending\n tasks will be marked as DELETED so that runners know not\n to run those tasks.\n "
job = job_wrapper.get_job()
job.set_state(model.Job.states.ERROR)
for task_wrapper in task_wrappers:
task = task_wrapper.get_task()
task_state = task.get_state()
if (model.Task.states.QUEUED == task_state):
log.debug(('_cancel_job for job %d: Task %d is not running; setting state to DELETED' % (job.id, task.id)))
task_wrapper.change_state(task.states.DELETED)
sleep(5)
for task_wrapper in task_wrappers:
if (model.Task.states.RUNNING == task_wrapper.get_state()):
task = task_wrapper.get_task()
log.debug(('_cancel_job for job %d: Stopping running task %d' % (job.id, task.id)))
job_wrapper.app.job_manager.job_handler.dispatcher.stop(task)<|docstring|>Cancel the given job. The job's state will be set to ERROR.
Any running tasks will be cancelled, and any queued/pending
tasks will be marked as DELETED so that runners know not
to run those tasks.<|endoftext|>
|
2881128db6a7574c317eb2c8f06dc21d090e084e3a60a34a353a21dc98dff1ba
|
def _stop_pid(self, pid, job_id):
"\n This method stops the given process id whether it's a task or job.\n It is meant to be a private helper method, but it is mostly reusable.\n The first argument is the process id to stop, and the second id is the\n job's id (which is used for logging messages only right now).\n "
pid = int(pid)
log.debug(f'Stopping pid {pid}')
if (not self._check_pid(pid)):
log.warning(("_stop_pid(): %s: PID %d was already dead or can't be signaled" % (job_id, pid)))
return
for sig in [15, 9]:
try:
os.killpg(pid, sig)
except OSError as e:
log.warning(('_stop_pid(): %s: Got errno %s when attempting to signal %d to PID %d: %s' % (job_id, errno.errorcode[e.errno], sig, pid, e.strerror)))
return
sleep(2)
if (not self._check_pid(pid)):
log.debug(('_stop_pid(): %s: PID %d successfully killed with signal %d' % (job_id, pid, sig)))
return
else:
log.warning(('_stop_pid(): %s: PID %d refuses to die after signaling TERM/KILL' % (job_id, pid)))
|
This method stops the given process id whether it's a task or job.
It is meant to be a private helper method, but it is mostly reusable.
The first argument is the process id to stop, and the second id is the
job's id (which is used for logging messages only right now).
|
lib/galaxy/jobs/runners/tasks.py
|
_stop_pid
|
knutwa-ext/galaxy
| 1,085 |
python
|
def _stop_pid(self, pid, job_id):
"\n This method stops the given process id whether it's a task or job.\n It is meant to be a private helper method, but it is mostly reusable.\n The first argument is the process id to stop, and the second id is the\n job's id (which is used for logging messages only right now).\n "
pid = int(pid)
log.debug(f'Stopping pid {pid}')
if (not self._check_pid(pid)):
log.warning(("_stop_pid(): %s: PID %d was already dead or can't be signaled" % (job_id, pid)))
return
for sig in [15, 9]:
try:
os.killpg(pid, sig)
except OSError as e:
log.warning(('_stop_pid(): %s: Got errno %s when attempting to signal %d to PID %d: %s' % (job_id, errno.errorcode[e.errno], sig, pid, e.strerror)))
return
sleep(2)
if (not self._check_pid(pid)):
log.debug(('_stop_pid(): %s: PID %d successfully killed with signal %d' % (job_id, pid, sig)))
return
else:
log.warning(('_stop_pid(): %s: PID %d refuses to die after signaling TERM/KILL' % (job_id, pid)))
|
def _stop_pid(self, pid, job_id):
"\n This method stops the given process id whether it's a task or job.\n It is meant to be a private helper method, but it is mostly reusable.\n The first argument is the process id to stop, and the second id is the\n job's id (which is used for logging messages only right now).\n "
pid = int(pid)
log.debug(f'Stopping pid {pid}')
if (not self._check_pid(pid)):
log.warning(("_stop_pid(): %s: PID %d was already dead or can't be signaled" % (job_id, pid)))
return
for sig in [15, 9]:
try:
os.killpg(pid, sig)
except OSError as e:
log.warning(('_stop_pid(): %s: Got errno %s when attempting to signal %d to PID %d: %s' % (job_id, errno.errorcode[e.errno], sig, pid, e.strerror)))
return
sleep(2)
if (not self._check_pid(pid)):
log.debug(('_stop_pid(): %s: PID %d successfully killed with signal %d' % (job_id, pid, sig)))
return
else:
log.warning(('_stop_pid(): %s: PID %d refuses to die after signaling TERM/KILL' % (job_id, pid)))<|docstring|>This method stops the given process id whether it's a task or job.
It is meant to be a private helper method, but it is mostly reusable.
The first argument is the process id to stop, and the second id is the
job's id (which is used for logging messages only right now).<|endoftext|>
|
0ca3c1f6da2bac2c30e22d0d08a42ba3049cf27420c8553f7e9ce73430ec2f30
|
def test_create_file(self):
'Test the creation of a simple XlsxWriter file.'
filename = self.got_filename
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
chart1 = workbook.add_chart({'type': 'bar'})
chart2 = workbook.add_chart({'type': 'bar'})
chart1.axis_ids = [64265216, 64447616]
chart2.axis_ids = [86048128, 86058112]
data = [[1, 2, 3, 4, 5], [2, 4, 6, 8, 10], [3, 6, 9, 12, 15]]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart1.add_series({'categories': '=Sheet1!$A$1:$A$5', 'values': '=Sheet1!$B$1:$B$5'})
chart1.add_series({'categories': '=Sheet1!$A$1:$A$5', 'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart1)
chart2.add_series({'categories': '=Sheet1!$A$1:$A$4', 'values': '=Sheet1!$B$1:$B$4'})
chart2.add_series({'categories': '=Sheet1!$A$1:$A$4', 'values': '=Sheet1!$C$1:$C$4'})
worksheet.insert_chart('F25', chart2)
workbook.close()
(got, exp) = _compare_xlsx_files(self.got_filename, self.exp_filename, self.ignore_files, self.ignore_elements)
self.assertEqual(got, exp)
|
Test the creation of a simple XlsxWriter file.
|
xlsxwriter/test/comparison/test_chart_bar03.py
|
test_create_file
|
sontek/XlsxWriter
| 1 |
python
|
def test_create_file(self):
filename = self.got_filename
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
chart1 = workbook.add_chart({'type': 'bar'})
chart2 = workbook.add_chart({'type': 'bar'})
chart1.axis_ids = [64265216, 64447616]
chart2.axis_ids = [86048128, 86058112]
data = [[1, 2, 3, 4, 5], [2, 4, 6, 8, 10], [3, 6, 9, 12, 15]]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart1.add_series({'categories': '=Sheet1!$A$1:$A$5', 'values': '=Sheet1!$B$1:$B$5'})
chart1.add_series({'categories': '=Sheet1!$A$1:$A$5', 'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart1)
chart2.add_series({'categories': '=Sheet1!$A$1:$A$4', 'values': '=Sheet1!$B$1:$B$4'})
chart2.add_series({'categories': '=Sheet1!$A$1:$A$4', 'values': '=Sheet1!$C$1:$C$4'})
worksheet.insert_chart('F25', chart2)
workbook.close()
(got, exp) = _compare_xlsx_files(self.got_filename, self.exp_filename, self.ignore_files, self.ignore_elements)
self.assertEqual(got, exp)
|
def test_create_file(self):
filename = self.got_filename
workbook = Workbook(filename)
worksheet = workbook.add_worksheet()
chart1 = workbook.add_chart({'type': 'bar'})
chart2 = workbook.add_chart({'type': 'bar'})
chart1.axis_ids = [64265216, 64447616]
chart2.axis_ids = [86048128, 86058112]
data = [[1, 2, 3, 4, 5], [2, 4, 6, 8, 10], [3, 6, 9, 12, 15]]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart1.add_series({'categories': '=Sheet1!$A$1:$A$5', 'values': '=Sheet1!$B$1:$B$5'})
chart1.add_series({'categories': '=Sheet1!$A$1:$A$5', 'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart1)
chart2.add_series({'categories': '=Sheet1!$A$1:$A$4', 'values': '=Sheet1!$B$1:$B$4'})
chart2.add_series({'categories': '=Sheet1!$A$1:$A$4', 'values': '=Sheet1!$C$1:$C$4'})
worksheet.insert_chart('F25', chart2)
workbook.close()
(got, exp) = _compare_xlsx_files(self.got_filename, self.exp_filename, self.ignore_files, self.ignore_elements)
self.assertEqual(got, exp)<|docstring|>Test the creation of a simple XlsxWriter file.<|endoftext|>
|
2bee709912e96b65c67491c30ad85904d39758a88f2267f04d5cda78e52a70bc
|
def mobilenet_v2(inputs, original_stride, weights_decay=0):
' Contains MobileNet_v2 definition.\n\n This is NOT original MobileNet_v2.\n * Conv2D biases are ON\n * Extra 1x1 convs are added (SeparableConv2D instead of DepthwiseConv2D)\n * First mobile_net_block contains more layers than original MobileNet_v2.\n\n '
def mobile_net_block(inputs, expand_to, strided, num_outputs):
net = Conv2D(filters=expand_to, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = SeparableConv2D(filters=expand_to, kernel_size=3, strides=(2 if strided else 1), padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = Conv2D(filters=num_outputs, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
if ((not strided) and (net.get_shape().as_list()[(- 1)] == inputs.get_shape().as_list()[(- 1)])):
return tf.keras.layers.Add()([inputs, net])
return net
end_points = {}
net = BatchNormalization(name='data_bn')(inputs)
net = Conv2D(filters=32, kernel_size=3, strides=2, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = mobile_net_block(net, strided=False, expand_to=32, num_outputs=16)
end_points['2x'] = net
net = mobile_net_block(net, strided=True, expand_to=96, num_outputs=24)
net = mobile_net_block(net, strided=False, expand_to=144, num_outputs=24)
end_points['4x'] = net
net = mobile_net_block(net, strided=True, expand_to=144, num_outputs=32)
net = mobile_net_block(net, strided=False, expand_to=192, num_outputs=32)
net = mobile_net_block(net, strided=False, expand_to=192, num_outputs=32)
if original_stride:
end_points['8x'] = net
net = mobile_net_block(net, strided=original_stride, expand_to=192, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
if (not original_stride):
end_points['8x'] = net
net = mobile_net_block(net, strided=(not original_stride), expand_to=384, num_outputs=96)
net = mobile_net_block(net, strided=False, expand_to=576, num_outputs=96)
net = mobile_net_block(net, strided=False, expand_to=576, num_outputs=96)
end_points['16x'] = net
net = mobile_net_block(net, strided=True, expand_to=576, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=320)
net = Conv2D(filters=1280, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
end_points['32x'] = net
return end_points
|
Contains MobileNet_v2 definition.
This is NOT original MobileNet_v2.
* Conv2D biases are ON
* Extra 1x1 convs are added (SeparableConv2D instead of DepthwiseConv2D)
* First mobile_net_block contains more layers than original MobileNet_v2.
|
tensorflow_toolkit/text_detection/text_detection/model.py
|
mobilenet_v2
|
vshampor/openvino_training_extensions
| 256 |
python
|
def mobilenet_v2(inputs, original_stride, weights_decay=0):
' Contains MobileNet_v2 definition.\n\n This is NOT original MobileNet_v2.\n * Conv2D biases are ON\n * Extra 1x1 convs are added (SeparableConv2D instead of DepthwiseConv2D)\n * First mobile_net_block contains more layers than original MobileNet_v2.\n\n '
def mobile_net_block(inputs, expand_to, strided, num_outputs):
net = Conv2D(filters=expand_to, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = SeparableConv2D(filters=expand_to, kernel_size=3, strides=(2 if strided else 1), padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = Conv2D(filters=num_outputs, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
if ((not strided) and (net.get_shape().as_list()[(- 1)] == inputs.get_shape().as_list()[(- 1)])):
return tf.keras.layers.Add()([inputs, net])
return net
end_points = {}
net = BatchNormalization(name='data_bn')(inputs)
net = Conv2D(filters=32, kernel_size=3, strides=2, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = mobile_net_block(net, strided=False, expand_to=32, num_outputs=16)
end_points['2x'] = net
net = mobile_net_block(net, strided=True, expand_to=96, num_outputs=24)
net = mobile_net_block(net, strided=False, expand_to=144, num_outputs=24)
end_points['4x'] = net
net = mobile_net_block(net, strided=True, expand_to=144, num_outputs=32)
net = mobile_net_block(net, strided=False, expand_to=192, num_outputs=32)
net = mobile_net_block(net, strided=False, expand_to=192, num_outputs=32)
if original_stride:
end_points['8x'] = net
net = mobile_net_block(net, strided=original_stride, expand_to=192, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
if (not original_stride):
end_points['8x'] = net
net = mobile_net_block(net, strided=(not original_stride), expand_to=384, num_outputs=96)
net = mobile_net_block(net, strided=False, expand_to=576, num_outputs=96)
net = mobile_net_block(net, strided=False, expand_to=576, num_outputs=96)
end_points['16x'] = net
net = mobile_net_block(net, strided=True, expand_to=576, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=320)
net = Conv2D(filters=1280, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
end_points['32x'] = net
return end_points
|
def mobilenet_v2(inputs, original_stride, weights_decay=0):
' Contains MobileNet_v2 definition.\n\n This is NOT original MobileNet_v2.\n * Conv2D biases are ON\n * Extra 1x1 convs are added (SeparableConv2D instead of DepthwiseConv2D)\n * First mobile_net_block contains more layers than original MobileNet_v2.\n\n '
def mobile_net_block(inputs, expand_to, strided, num_outputs):
net = Conv2D(filters=expand_to, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = SeparableConv2D(filters=expand_to, kernel_size=3, strides=(2 if strided else 1), padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = Conv2D(filters=num_outputs, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
if ((not strided) and (net.get_shape().as_list()[(- 1)] == inputs.get_shape().as_list()[(- 1)])):
return tf.keras.layers.Add()([inputs, net])
return net
end_points = {}
net = BatchNormalization(name='data_bn')(inputs)
net = Conv2D(filters=32, kernel_size=3, strides=2, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
net = mobile_net_block(net, strided=False, expand_to=32, num_outputs=16)
end_points['2x'] = net
net = mobile_net_block(net, strided=True, expand_to=96, num_outputs=24)
net = mobile_net_block(net, strided=False, expand_to=144, num_outputs=24)
end_points['4x'] = net
net = mobile_net_block(net, strided=True, expand_to=144, num_outputs=32)
net = mobile_net_block(net, strided=False, expand_to=192, num_outputs=32)
net = mobile_net_block(net, strided=False, expand_to=192, num_outputs=32)
if original_stride:
end_points['8x'] = net
net = mobile_net_block(net, strided=original_stride, expand_to=192, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
net = mobile_net_block(net, strided=False, expand_to=384, num_outputs=64)
if (not original_stride):
end_points['8x'] = net
net = mobile_net_block(net, strided=(not original_stride), expand_to=384, num_outputs=96)
net = mobile_net_block(net, strided=False, expand_to=576, num_outputs=96)
net = mobile_net_block(net, strided=False, expand_to=576, num_outputs=96)
end_points['16x'] = net
net = mobile_net_block(net, strided=True, expand_to=576, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=160)
net = mobile_net_block(net, strided=False, expand_to=960, num_outputs=320)
net = Conv2D(filters=1280, kernel_size=1, padding='same', kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(net)
net = BatchNormalization()(net)
net = ReLU(max_value=6)(net)
end_points['32x'] = net
return end_points<|docstring|>Contains MobileNet_v2 definition.
This is NOT original MobileNet_v2.
* Conv2D biases are ON
* Extra 1x1 convs are added (SeparableConv2D instead of DepthwiseConv2D)
* First mobile_net_block contains more layers than original MobileNet_v2.<|endoftext|>
|
851e8771cdc2239501e771a7ebc926a040c681f4669ae60838235fca92c05c09
|
def fcn_head(inputs, num_classes, name, weights_decay=0):
' Defines FCN head. '
x32 = tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['32x'])
x32_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x32)
x16 = tf.keras.layers.Add()([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['16x']), x32_upscaled])
x16_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x16)
x08 = tf.keras.layers.Add()([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['8x']), x16_upscaled])
x08_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x08)
x04 = tf.keras.layers.Add(name=name)([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['4x']), x08_upscaled])
return x04
|
Defines FCN head.
|
tensorflow_toolkit/text_detection/text_detection/model.py
|
fcn_head
|
vshampor/openvino_training_extensions
| 256 |
python
|
def fcn_head(inputs, num_classes, name, weights_decay=0):
' '
x32 = tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['32x'])
x32_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x32)
x16 = tf.keras.layers.Add()([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['16x']), x32_upscaled])
x16_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x16)
x08 = tf.keras.layers.Add()([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['8x']), x16_upscaled])
x08_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x08)
x04 = tf.keras.layers.Add(name=name)([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['4x']), x08_upscaled])
return x04
|
def fcn_head(inputs, num_classes, name, weights_decay=0):
' '
x32 = tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['32x'])
x32_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x32)
x16 = tf.keras.layers.Add()([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['16x']), x32_upscaled])
x16_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x16)
x08 = tf.keras.layers.Add()([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['8x']), x16_upscaled])
x08_upscaled = tf.keras.layers.UpSampling2D(interpolation='bilinear')(x08)
x04 = tf.keras.layers.Add(name=name)([tf.keras.layers.Conv2D(filters=num_classes, strides=1, kernel_size=1, kernel_regularizer=tf.keras.regularizers.l2(weights_decay))(inputs['4x']), x08_upscaled])
return x04<|docstring|>Defines FCN head.<|endoftext|>
|
d668761fc986d3f3c25f51787f86963832d86dabaeeae3dfaedc933acc93d0c4
|
def pixel_link_model(inputs, config):
' PixelLink architecture. '
if (config['model_type'] == 'mobilenet_v2_ext'):
backbone = mobilenet_v2(inputs, original_stride=False, weights_decay=config['weights_decay'])
elif (config['model_type'] == 'ka_resnet50'):
backbone = keras_applications_resnet50(inputs)
elif (config['model_type'] == 'ka_vgg16'):
backbone = keras_applications_vgg16(inputs)
elif (config['model_type'] == 'ka_mobilenet_v2_1_0'):
backbone = keras_applications_mobilenetv2(inputs, alpha=1.0)
elif (config['model_type'] == 'ka_mobilenet_v2_1_4'):
backbone = keras_applications_mobilenetv2(inputs, alpha=1.4)
elif (config['model_type'] == 'ka_xception'):
backbone = keras_applications_xception(inputs)
segm_logits = fcn_head(backbone, num_classes=2, name='segm_logits', weights_decay=config['weights_decay'])
link_logits = fcn_head(backbone, num_classes=16, name='link_logits_', weights_decay=config['weights_decay'])
new_shape = (tf.shape(link_logits)[1], tf.shape(link_logits)[2], 8, 2)
link_logits = tf.keras.layers.Reshape(new_shape, name='link_logits')(link_logits)
return tf.keras.Model(inputs, [segm_logits, link_logits])
|
PixelLink architecture.
|
tensorflow_toolkit/text_detection/text_detection/model.py
|
pixel_link_model
|
vshampor/openvino_training_extensions
| 256 |
python
|
def pixel_link_model(inputs, config):
' '
if (config['model_type'] == 'mobilenet_v2_ext'):
backbone = mobilenet_v2(inputs, original_stride=False, weights_decay=config['weights_decay'])
elif (config['model_type'] == 'ka_resnet50'):
backbone = keras_applications_resnet50(inputs)
elif (config['model_type'] == 'ka_vgg16'):
backbone = keras_applications_vgg16(inputs)
elif (config['model_type'] == 'ka_mobilenet_v2_1_0'):
backbone = keras_applications_mobilenetv2(inputs, alpha=1.0)
elif (config['model_type'] == 'ka_mobilenet_v2_1_4'):
backbone = keras_applications_mobilenetv2(inputs, alpha=1.4)
elif (config['model_type'] == 'ka_xception'):
backbone = keras_applications_xception(inputs)
segm_logits = fcn_head(backbone, num_classes=2, name='segm_logits', weights_decay=config['weights_decay'])
link_logits = fcn_head(backbone, num_classes=16, name='link_logits_', weights_decay=config['weights_decay'])
new_shape = (tf.shape(link_logits)[1], tf.shape(link_logits)[2], 8, 2)
link_logits = tf.keras.layers.Reshape(new_shape, name='link_logits')(link_logits)
return tf.keras.Model(inputs, [segm_logits, link_logits])
|
def pixel_link_model(inputs, config):
' '
if (config['model_type'] == 'mobilenet_v2_ext'):
backbone = mobilenet_v2(inputs, original_stride=False, weights_decay=config['weights_decay'])
elif (config['model_type'] == 'ka_resnet50'):
backbone = keras_applications_resnet50(inputs)
elif (config['model_type'] == 'ka_vgg16'):
backbone = keras_applications_vgg16(inputs)
elif (config['model_type'] == 'ka_mobilenet_v2_1_0'):
backbone = keras_applications_mobilenetv2(inputs, alpha=1.0)
elif (config['model_type'] == 'ka_mobilenet_v2_1_4'):
backbone = keras_applications_mobilenetv2(inputs, alpha=1.4)
elif (config['model_type'] == 'ka_xception'):
backbone = keras_applications_xception(inputs)
segm_logits = fcn_head(backbone, num_classes=2, name='segm_logits', weights_decay=config['weights_decay'])
link_logits = fcn_head(backbone, num_classes=16, name='link_logits_', weights_decay=config['weights_decay'])
new_shape = (tf.shape(link_logits)[1], tf.shape(link_logits)[2], 8, 2)
link_logits = tf.keras.layers.Reshape(new_shape, name='link_logits')(link_logits)
return tf.keras.Model(inputs, [segm_logits, link_logits])<|docstring|>PixelLink architecture.<|endoftext|>
|
c8469b9f043ac4f20913e6adfb45301c89daf397cde950d9557b7172d978b255
|
@pytest.mark.integration
def test_slack_callback_bot_message_is_ignored(mocker, client, session, patch_slack, create_slack_headers):
'Bot get notified of its own DM replies to users... ignore'
patched_slack = patch_slack('busy_beaver.blueprints.slack.event_subscription')
data = {'type': 'unknown todo', 'event': {'type': 'message', 'subtype': 'bot_message'}}
headers = create_slack_headers(100000000, data)
resp = client.post('/slack/event-subscription', headers=headers, json=data)
assert (resp.status_code == 200)
assert (len(patched_slack.mock.mock_calls) == 0)
|
Bot get notified of its own DM replies to users... ignore
|
tests/blueprints/slack/event_subscription_test.py
|
test_slack_callback_bot_message_is_ignored
|
Thornycrackers-Forks/busy-beaver
| 0 |
python
|
@pytest.mark.integration
def test_slack_callback_bot_message_is_ignored(mocker, client, session, patch_slack, create_slack_headers):
patched_slack = patch_slack('busy_beaver.blueprints.slack.event_subscription')
data = {'type': 'unknown todo', 'event': {'type': 'message', 'subtype': 'bot_message'}}
headers = create_slack_headers(100000000, data)
resp = client.post('/slack/event-subscription', headers=headers, json=data)
assert (resp.status_code == 200)
assert (len(patched_slack.mock.mock_calls) == 0)
|
@pytest.mark.integration
def test_slack_callback_bot_message_is_ignored(mocker, client, session, patch_slack, create_slack_headers):
patched_slack = patch_slack('busy_beaver.blueprints.slack.event_subscription')
data = {'type': 'unknown todo', 'event': {'type': 'message', 'subtype': 'bot_message'}}
headers = create_slack_headers(100000000, data)
resp = client.post('/slack/event-subscription', headers=headers, json=data)
assert (resp.status_code == 200)
assert (len(patched_slack.mock.mock_calls) == 0)<|docstring|>Bot get notified of its own DM replies to users... ignore<|endoftext|>
|
e5389fec53c48f733f4639385642f7ec83da932b31dcbfc7f18a5b480c5dde2b
|
@pytest.mark.integration
def test_slack_callback_user_dms_bot_reply(mocker, client, session, factory, patch_slack, create_slack_headers):
'When user messages bot, reply with help text'
patched_slack = patch_slack('busy_beaver.blueprints.slack.event_subscription')
factory.SlackInstallation(workspace_id='team_id')
channel = 5
data = {'type': 'event_callback', 'team_id': 'team_id', 'event': {'type': 'message', 'subtype': 'not bot_message', 'channel_type': 'im', 'text': 'random', 'user': 'random_user', 'channel': channel}}
headers = create_slack_headers(100000000, data)
resp = client.post('/slack/event-subscription', headers=headers, json=data)
assert (resp.status_code == 200)
assert (len(patched_slack.mock.mock_calls) == 2)
(args, kwargs) = patched_slack.mock.call_args
assert ('/busybeaver help' in args[0])
assert (kwargs['channel'] == channel)
|
When user messages bot, reply with help text
|
tests/blueprints/slack/event_subscription_test.py
|
test_slack_callback_user_dms_bot_reply
|
Thornycrackers-Forks/busy-beaver
| 0 |
python
|
@pytest.mark.integration
def test_slack_callback_user_dms_bot_reply(mocker, client, session, factory, patch_slack, create_slack_headers):
patched_slack = patch_slack('busy_beaver.blueprints.slack.event_subscription')
factory.SlackInstallation(workspace_id='team_id')
channel = 5
data = {'type': 'event_callback', 'team_id': 'team_id', 'event': {'type': 'message', 'subtype': 'not bot_message', 'channel_type': 'im', 'text': 'random', 'user': 'random_user', 'channel': channel}}
headers = create_slack_headers(100000000, data)
resp = client.post('/slack/event-subscription', headers=headers, json=data)
assert (resp.status_code == 200)
assert (len(patched_slack.mock.mock_calls) == 2)
(args, kwargs) = patched_slack.mock.call_args
assert ('/busybeaver help' in args[0])
assert (kwargs['channel'] == channel)
|
@pytest.mark.integration
def test_slack_callback_user_dms_bot_reply(mocker, client, session, factory, patch_slack, create_slack_headers):
patched_slack = patch_slack('busy_beaver.blueprints.slack.event_subscription')
factory.SlackInstallation(workspace_id='team_id')
channel = 5
data = {'type': 'event_callback', 'team_id': 'team_id', 'event': {'type': 'message', 'subtype': 'not bot_message', 'channel_type': 'im', 'text': 'random', 'user': 'random_user', 'channel': channel}}
headers = create_slack_headers(100000000, data)
resp = client.post('/slack/event-subscription', headers=headers, json=data)
assert (resp.status_code == 200)
assert (len(patched_slack.mock.mock_calls) == 2)
(args, kwargs) = patched_slack.mock.call_args
assert ('/busybeaver help' in args[0])
assert (kwargs['channel'] == channel)<|docstring|>When user messages bot, reply with help text<|endoftext|>
|
d67a15f29e9c78430fbb54a440f84cd81d36d9fee7b4b423d04036032cdf7946
|
@pytest.mark.end2end
def test_slack_onboarding_invite_bot_to_channel(client, session, factory, patch_slack, create_slack_headers):
'TODO deal with situation where bot is invited to multiple channels'
patched_slack = patch_slack('busy_beaver.apps.external_integrations.workflow')
workspace_id = 'TXXXXXXXXX'
authorizing_user_id = 'alysivji'
bot_id = 'test_bot'
channel = 'busy-beaver'
installation = factory.SlackInstallation(authorizing_user_id=authorizing_user_id, state='user_welcomed', workspace_id=workspace_id, workspace_name='Test', bot_user_id=bot_id)
data = {'type': 'event_callback', 'team_id': workspace_id, 'event': {'type': 'member_joined_channel', 'channel_type': 'im', 'user': bot_id, 'channel': channel}}
headers = create_slack_headers(100000000, data)
client.post('/slack/event-subscription', headers=headers, json=data)
installation = SlackInstallation.query.first()
assert (installation.state == 'config_requested')
github_summary_config = GitHubSummaryConfiguration.query.first()
assert (github_summary_config.channel == channel)
(args, kwargs) = patched_slack.mock.call_args
assert ('What time should I post' in args[0])
assert (kwargs['user_id'] == authorizing_user_id)
|
TODO deal with situation where bot is invited to multiple channels
|
tests/blueprints/slack/event_subscription_test.py
|
test_slack_onboarding_invite_bot_to_channel
|
Thornycrackers-Forks/busy-beaver
| 0 |
python
|
@pytest.mark.end2end
def test_slack_onboarding_invite_bot_to_channel(client, session, factory, patch_slack, create_slack_headers):
patched_slack = patch_slack('busy_beaver.apps.external_integrations.workflow')
workspace_id = 'TXXXXXXXXX'
authorizing_user_id = 'alysivji'
bot_id = 'test_bot'
channel = 'busy-beaver'
installation = factory.SlackInstallation(authorizing_user_id=authorizing_user_id, state='user_welcomed', workspace_id=workspace_id, workspace_name='Test', bot_user_id=bot_id)
data = {'type': 'event_callback', 'team_id': workspace_id, 'event': {'type': 'member_joined_channel', 'channel_type': 'im', 'user': bot_id, 'channel': channel}}
headers = create_slack_headers(100000000, data)
client.post('/slack/event-subscription', headers=headers, json=data)
installation = SlackInstallation.query.first()
assert (installation.state == 'config_requested')
github_summary_config = GitHubSummaryConfiguration.query.first()
assert (github_summary_config.channel == channel)
(args, kwargs) = patched_slack.mock.call_args
assert ('What time should I post' in args[0])
assert (kwargs['user_id'] == authorizing_user_id)
|
@pytest.mark.end2end
def test_slack_onboarding_invite_bot_to_channel(client, session, factory, patch_slack, create_slack_headers):
patched_slack = patch_slack('busy_beaver.apps.external_integrations.workflow')
workspace_id = 'TXXXXXXXXX'
authorizing_user_id = 'alysivji'
bot_id = 'test_bot'
channel = 'busy-beaver'
installation = factory.SlackInstallation(authorizing_user_id=authorizing_user_id, state='user_welcomed', workspace_id=workspace_id, workspace_name='Test', bot_user_id=bot_id)
data = {'type': 'event_callback', 'team_id': workspace_id, 'event': {'type': 'member_joined_channel', 'channel_type': 'im', 'user': bot_id, 'channel': channel}}
headers = create_slack_headers(100000000, data)
client.post('/slack/event-subscription', headers=headers, json=data)
installation = SlackInstallation.query.first()
assert (installation.state == 'config_requested')
github_summary_config = GitHubSummaryConfiguration.query.first()
assert (github_summary_config.channel == channel)
(args, kwargs) = patched_slack.mock.call_args
assert ('What time should I post' in args[0])
assert (kwargs['user_id'] == authorizing_user_id)<|docstring|>TODO deal with situation where bot is invited to multiple channels<|endoftext|>
|
040b1c6bca9e0f31b32732ee643ee43f2be1087bc06bef19624112ce86c028a9
|
@pytest.mark.end2end
def test_slack_onboarding_send_bot_configuration(client, session, factory, patch_slack, create_slack_headers):
'TODO deal with situation where bad input is sent'
tz = TimezoneInfo(tz='America/Chicago', label='Central Daylight Time', offset=(- 18000))
patched_slack = patch_slack('busy_beaver.apps.external_integrations.workflow', timezone_info=tz)
workspace_id = 'TXXXXXXXXX'
authorizing_user_id = 'alysivji'
bot_id = 'test_bot'
channel = 'busy-beaver'
time_to_post = '2:00pm'
installation = factory.SlackInstallation(authorizing_user_id=authorizing_user_id, state='config_requested', workspace_id=workspace_id, workspace_name='Test', bot_user_id=bot_id)
github_summary_config = factory.GitHubSummaryConfiguration(channel=channel, slack_installation=installation)
data = {'type': 'event_callback', 'team_id': workspace_id, 'event': {'type': 'message', 'channel_type': 'im', 'text': time_to_post, 'user': authorizing_user_id, 'channel': channel}}
headers = create_slack_headers(100000000, data)
client.post('/slack/event-subscription', headers=headers, json=data)
installation = SlackInstallation.query.first()
assert (installation.state == 'active')
github_summary_config = GitHubSummaryConfiguration.query.first()
assert (github_summary_config.channel == channel)
(args, kwargs) = patched_slack.mock.call_args
assert ('Busy Beaver is now active!' in args[0])
assert (kwargs['user_id'] == authorizing_user_id)
|
TODO deal with situation where bad input is sent
|
tests/blueprints/slack/event_subscription_test.py
|
test_slack_onboarding_send_bot_configuration
|
Thornycrackers-Forks/busy-beaver
| 0 |
python
|
@pytest.mark.end2end
def test_slack_onboarding_send_bot_configuration(client, session, factory, patch_slack, create_slack_headers):
tz = TimezoneInfo(tz='America/Chicago', label='Central Daylight Time', offset=(- 18000))
patched_slack = patch_slack('busy_beaver.apps.external_integrations.workflow', timezone_info=tz)
workspace_id = 'TXXXXXXXXX'
authorizing_user_id = 'alysivji'
bot_id = 'test_bot'
channel = 'busy-beaver'
time_to_post = '2:00pm'
installation = factory.SlackInstallation(authorizing_user_id=authorizing_user_id, state='config_requested', workspace_id=workspace_id, workspace_name='Test', bot_user_id=bot_id)
github_summary_config = factory.GitHubSummaryConfiguration(channel=channel, slack_installation=installation)
data = {'type': 'event_callback', 'team_id': workspace_id, 'event': {'type': 'message', 'channel_type': 'im', 'text': time_to_post, 'user': authorizing_user_id, 'channel': channel}}
headers = create_slack_headers(100000000, data)
client.post('/slack/event-subscription', headers=headers, json=data)
installation = SlackInstallation.query.first()
assert (installation.state == 'active')
github_summary_config = GitHubSummaryConfiguration.query.first()
assert (github_summary_config.channel == channel)
(args, kwargs) = patched_slack.mock.call_args
assert ('Busy Beaver is now active!' in args[0])
assert (kwargs['user_id'] == authorizing_user_id)
|
@pytest.mark.end2end
def test_slack_onboarding_send_bot_configuration(client, session, factory, patch_slack, create_slack_headers):
tz = TimezoneInfo(tz='America/Chicago', label='Central Daylight Time', offset=(- 18000))
patched_slack = patch_slack('busy_beaver.apps.external_integrations.workflow', timezone_info=tz)
workspace_id = 'TXXXXXXXXX'
authorizing_user_id = 'alysivji'
bot_id = 'test_bot'
channel = 'busy-beaver'
time_to_post = '2:00pm'
installation = factory.SlackInstallation(authorizing_user_id=authorizing_user_id, state='config_requested', workspace_id=workspace_id, workspace_name='Test', bot_user_id=bot_id)
github_summary_config = factory.GitHubSummaryConfiguration(channel=channel, slack_installation=installation)
data = {'type': 'event_callback', 'team_id': workspace_id, 'event': {'type': 'message', 'channel_type': 'im', 'text': time_to_post, 'user': authorizing_user_id, 'channel': channel}}
headers = create_slack_headers(100000000, data)
client.post('/slack/event-subscription', headers=headers, json=data)
installation = SlackInstallation.query.first()
assert (installation.state == 'active')
github_summary_config = GitHubSummaryConfiguration.query.first()
assert (github_summary_config.channel == channel)
(args, kwargs) = patched_slack.mock.call_args
assert ('Busy Beaver is now active!' in args[0])
assert (kwargs['user_id'] == authorizing_user_id)<|docstring|>TODO deal with situation where bad input is sent<|endoftext|>
|
eccf832d4c98c024c8721f248408c18e7e9d943622616241862f8a434a274853
|
def __init__(self, connection_string):
'\n Connection uses provided connection string\n :param connection_string:\n '
self.connection_string = connection_string
self.connection = None
self.cursor = None
|
Connection uses provided connection string
:param connection_string:
|
Model/ud_dashboard/lib/python/user_dataset_dashboard/oracle.py
|
__init__
|
EuPathDB-Infra/ApiCommonModel
| 0 |
python
|
def __init__(self, connection_string):
'\n Connection uses provided connection string\n :param connection_string:\n '
self.connection_string = connection_string
self.connection = None
self.cursor = None
|
def __init__(self, connection_string):
'\n Connection uses provided connection string\n :param connection_string:\n '
self.connection_string = connection_string
self.connection = None
self.cursor = None<|docstring|>Connection uses provided connection string
:param connection_string:<|endoftext|>
|
7a84b3223cf490046e9b34e1245294b9ec49fda3a53a72850da18accabb0ebf4
|
def connect(self):
'\n This object instance will create and hold references to the connection and cursor objects. Presumably, no finally\n needed by the calling function since the connection will not have been made if an error is raised here.\n '
try:
self.connection = cx_Oracle.connect(self.connection_string)
except cx_Oracle.DatabaseError as e:
sys.stderr.write('Unable to connect to the account database: {}'.format(e))
raise
self.cursor = self.connection.cursor()
|
This object instance will create and hold references to the connection and cursor objects. Presumably, no finally
needed by the calling function since the connection will not have been made if an error is raised here.
|
Model/ud_dashboard/lib/python/user_dataset_dashboard/oracle.py
|
connect
|
EuPathDB-Infra/ApiCommonModel
| 0 |
python
|
def connect(self):
'\n This object instance will create and hold references to the connection and cursor objects. Presumably, no finally\n needed by the calling function since the connection will not have been made if an error is raised here.\n '
try:
self.connection = cx_Oracle.connect(self.connection_string)
except cx_Oracle.DatabaseError as e:
sys.stderr.write('Unable to connect to the account database: {}'.format(e))
raise
self.cursor = self.connection.cursor()
|
def connect(self):
'\n This object instance will create and hold references to the connection and cursor objects. Presumably, no finally\n needed by the calling function since the connection will not have been made if an error is raised here.\n '
try:
self.connection = cx_Oracle.connect(self.connection_string)
except cx_Oracle.DatabaseError as e:
sys.stderr.write('Unable to connect to the account database: {}'.format(e))
raise
self.cursor = self.connection.cursor()<|docstring|>This object instance will create and hold references to the connection and cursor objects. Presumably, no finally
needed by the calling function since the connection will not have been made if an error is raised here.<|endoftext|>
|
1fb762db4e09599477e074d58b0c794822ca0fc21d2d146451995337ddb2944c
|
def disconnect(self):
'\n This will close cursor and connection objects for this object instance and should be applied in a finally\n clause by the calling function.\n '
try:
self.cursor.close()
self.connection.close()
except cx_Oracle.DatabaseError as e:
pass
|
This will close cursor and connection objects for this object instance and should be applied in a finally
clause by the calling function.
|
Model/ud_dashboard/lib/python/user_dataset_dashboard/oracle.py
|
disconnect
|
EuPathDB-Infra/ApiCommonModel
| 0 |
python
|
def disconnect(self):
'\n This will close cursor and connection objects for this object instance and should be applied in a finally\n clause by the calling function.\n '
try:
self.cursor.close()
self.connection.close()
except cx_Oracle.DatabaseError as e:
pass
|
def disconnect(self):
'\n This will close cursor and connection objects for this object instance and should be applied in a finally\n clause by the calling function.\n '
try:
self.cursor.close()
self.connection.close()
except cx_Oracle.DatabaseError as e:
pass<|docstring|>This will close cursor and connection objects for this object instance and should be applied in a finally
clause by the calling function.<|endoftext|>
|
b11f7b68dbae008ac382d30a43bb2e53a17b6b6c23e8e6669b4e8ce4c8ccba1e
|
def execute(self, sql, bindvars=None):
'\n Exexutes the provided sql using the provided bind variables (in dictionary form). Note that, as this dashboard\n is meant to be read only, there is no need for any commit.\n :param sql:\n :param bindvars:\n '
try:
if bindvars:
self.cursor.execute(sql, bindvars)
else:
self.cursor.execute(sql)
except cx_Oracle.DatabaseError as e:
sys.stderr.write('Unable to execute the query {}'.format(e))
raise
|
Exexutes the provided sql using the provided bind variables (in dictionary form). Note that, as this dashboard
is meant to be read only, there is no need for any commit.
:param sql:
:param bindvars:
|
Model/ud_dashboard/lib/python/user_dataset_dashboard/oracle.py
|
execute
|
EuPathDB-Infra/ApiCommonModel
| 0 |
python
|
def execute(self, sql, bindvars=None):
'\n Exexutes the provided sql using the provided bind variables (in dictionary form). Note that, as this dashboard\n is meant to be read only, there is no need for any commit.\n :param sql:\n :param bindvars:\n '
try:
if bindvars:
self.cursor.execute(sql, bindvars)
else:
self.cursor.execute(sql)
except cx_Oracle.DatabaseError as e:
sys.stderr.write('Unable to execute the query {}'.format(e))
raise
|
def execute(self, sql, bindvars=None):
'\n Exexutes the provided sql using the provided bind variables (in dictionary form). Note that, as this dashboard\n is meant to be read only, there is no need for any commit.\n :param sql:\n :param bindvars:\n '
try:
if bindvars:
self.cursor.execute(sql, bindvars)
else:
self.cursor.execute(sql)
except cx_Oracle.DatabaseError as e:
sys.stderr.write('Unable to execute the query {}'.format(e))
raise<|docstring|>Exexutes the provided sql using the provided bind variables (in dictionary form). Note that, as this dashboard
is meant to be read only, there is no need for any commit.
:param sql:
:param bindvars:<|endoftext|>
|
1647bdc63f111d984a005644ceee2d7ce59f8f5dd6537c488c0b0a533504f173
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)
|
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target character class.
|
datasets/omniglot.py
|
__getitem__
|
alessiabertugli/FUSION
| 13 |
python
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)<|docstring|>Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target character class.<|endoftext|>
|
1647bdc63f111d984a005644ceee2d7ce59f8f5dd6537c488c0b0a533504f173
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)
|
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target character class.
|
datasets/omniglot.py
|
__getitem__
|
alessiabertugli/FUSION
| 13 |
python
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)<|docstring|>Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target character class.<|endoftext|>
|
1647bdc63f111d984a005644ceee2d7ce59f8f5dd6537c488c0b0a533504f173
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)
|
Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target character class.
|
datasets/omniglot.py
|
__getitem__
|
alessiabertugli/FUSION
| 13 |
python
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)
|
def __getitem__(self, index):
'\n Args:\n index (int): Index\n Returns:\n tuple: (image, target) where target is index of the target character class.\n '
image = self.data[index]
target = self.targets[index]
return (image, target)<|docstring|>Args:
index (int): Index
Returns:
tuple: (image, target) where target is index of the target character class.<|endoftext|>
|
affdf5063e2c57d553597bad404b7ca62548d87d51c1c2b6a18ca82491458fc2
|
@classmethod
def create_channel(cls, host: str='composer.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: Optional[str]=None, scopes: Optional[Sequence[str]]=None, quota_project_id: Optional[str]=None, **kwargs) -> aio.Channel:
'Create and return a gRPC AsyncIO channel object.\n Args:\n host (Optional[str]): The host for the channel to use.\n credentials (Optional[~.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify this application to the service. If\n none are specified, the client will attempt to ascertain\n the credentials from the environment.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is ignored if ``channel`` is provided.\n scopes (Optional[Sequence[str]]): A optional list of scopes needed for this\n service. These are only used when credentials are not specified and\n are passed to :func:`google.auth.default`.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n kwargs (Optional[dict]): Keyword arguments, which are passed to the\n channel creation.\n Returns:\n aio.Channel: A gRPC AsyncIO channel object.\n '
return grpc_helpers_async.create_channel(host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs)
|
Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
create_channel
|
googleapis/googleapis-gen
| 7 |
python
|
@classmethod
def create_channel(cls, host: str='composer.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: Optional[str]=None, scopes: Optional[Sequence[str]]=None, quota_project_id: Optional[str]=None, **kwargs) -> aio.Channel:
'Create and return a gRPC AsyncIO channel object.\n Args:\n host (Optional[str]): The host for the channel to use.\n credentials (Optional[~.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify this application to the service. If\n none are specified, the client will attempt to ascertain\n the credentials from the environment.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is ignored if ``channel`` is provided.\n scopes (Optional[Sequence[str]]): A optional list of scopes needed for this\n service. These are only used when credentials are not specified and\n are passed to :func:`google.auth.default`.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n kwargs (Optional[dict]): Keyword arguments, which are passed to the\n channel creation.\n Returns:\n aio.Channel: A gRPC AsyncIO channel object.\n '
return grpc_helpers_async.create_channel(host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs)
|
@classmethod
def create_channel(cls, host: str='composer.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: Optional[str]=None, scopes: Optional[Sequence[str]]=None, quota_project_id: Optional[str]=None, **kwargs) -> aio.Channel:
'Create and return a gRPC AsyncIO channel object.\n Args:\n host (Optional[str]): The host for the channel to use.\n credentials (Optional[~.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify this application to the service. If\n none are specified, the client will attempt to ascertain\n the credentials from the environment.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is ignored if ``channel`` is provided.\n scopes (Optional[Sequence[str]]): A optional list of scopes needed for this\n service. These are only used when credentials are not specified and\n are passed to :func:`google.auth.default`.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n kwargs (Optional[dict]): Keyword arguments, which are passed to the\n channel creation.\n Returns:\n aio.Channel: A gRPC AsyncIO channel object.\n '
return grpc_helpers_async.create_channel(host, credentials=credentials, credentials_file=credentials_file, quota_project_id=quota_project_id, default_scopes=cls.AUTH_SCOPES, scopes=scopes, default_host=cls.DEFAULT_HOST, **kwargs)<|docstring|>Create and return a gRPC AsyncIO channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
aio.Channel: A gRPC AsyncIO channel object.<|endoftext|>
|
60268c63c732b47769722c02875930f2cae4e1a972e0dc389728bac97027a208
|
def __init__(self, *, host: str='composer.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: Optional[str]=None, scopes: Optional[Sequence[str]]=None, channel: aio.Channel=None, api_mtls_endpoint: str=None, client_cert_source: Callable[([], Tuple[(bytes, bytes)])]=None, ssl_channel_credentials: grpc.ChannelCredentials=None, client_cert_source_for_mtls: Callable[([], Tuple[(bytes, bytes)])]=None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False) -> None:
"Instantiate the transport.\n\n Args:\n host (Optional[str]):\n The hostname to connect to.\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n This argument is ignored if ``channel`` is provided.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is ignored if ``channel`` is provided.\n scopes (Optional[Sequence[str]]): A optional list of scopes needed for this\n service. These are only used when credentials are not specified and\n are passed to :func:`google.auth.default`.\n channel (Optional[aio.Channel]): A ``Channel`` instance through\n which to make calls.\n api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.\n If provided, it overrides the ``host`` argument and tries to create\n a mutual TLS channel with client SSL credentials from\n ``client_cert_source`` or application default SSL credentials.\n client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):\n Deprecated. A callback to provide client SSL certificate bytes and\n private key bytes, both in PEM format. It is ignored if\n ``api_mtls_endpoint`` is None.\n ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials\n for the grpc channel. It is ignored if ``channel`` is provided.\n client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):\n A callback to provide client certificate bytes and private key bytes,\n both in PEM format. It is used to configure a mutual TLS channel. It is\n ignored if ``channel`` or ``ssl_channel_credentials`` is provided.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you're developing\n your own client library.\n always_use_jwt_access (Optional[bool]): Whether self signed JWT should\n be used for service account credentials.\n\n Raises:\n google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport\n creation failed for any reason.\n google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``\n and ``credentials_file`` are passed.\n "
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[(str, Callable)] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn('api_mtls_endpoint is deprecated', DeprecationWarning)
if client_cert_source:
warnings.warn('client_cert_source is deprecated', DeprecationWarning)
if channel:
credentials = False
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
host = api_mtls_endpoint
if client_cert_source:
(cert, key) = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
elif (client_cert_source_for_mtls and (not ssl_channel_credentials)):
(cert, key) = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key)
super().__init__(host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access)
if (not self._grpc_channel):
self._grpc_channel = type(self).create_channel(self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[('grpc.max_send_message_length', (- 1)), ('grpc.max_receive_message_length', (- 1))])
self._prep_wrapped_messages(client_info)
|
Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
__init__
|
googleapis/googleapis-gen
| 7 |
python
|
def __init__(self, *, host: str='composer.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: Optional[str]=None, scopes: Optional[Sequence[str]]=None, channel: aio.Channel=None, api_mtls_endpoint: str=None, client_cert_source: Callable[([], Tuple[(bytes, bytes)])]=None, ssl_channel_credentials: grpc.ChannelCredentials=None, client_cert_source_for_mtls: Callable[([], Tuple[(bytes, bytes)])]=None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False) -> None:
"Instantiate the transport.\n\n Args:\n host (Optional[str]):\n The hostname to connect to.\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n This argument is ignored if ``channel`` is provided.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is ignored if ``channel`` is provided.\n scopes (Optional[Sequence[str]]): A optional list of scopes needed for this\n service. These are only used when credentials are not specified and\n are passed to :func:`google.auth.default`.\n channel (Optional[aio.Channel]): A ``Channel`` instance through\n which to make calls.\n api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.\n If provided, it overrides the ``host`` argument and tries to create\n a mutual TLS channel with client SSL credentials from\n ``client_cert_source`` or application default SSL credentials.\n client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):\n Deprecated. A callback to provide client SSL certificate bytes and\n private key bytes, both in PEM format. It is ignored if\n ``api_mtls_endpoint`` is None.\n ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials\n for the grpc channel. It is ignored if ``channel`` is provided.\n client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):\n A callback to provide client certificate bytes and private key bytes,\n both in PEM format. It is used to configure a mutual TLS channel. It is\n ignored if ``channel`` or ``ssl_channel_credentials`` is provided.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you're developing\n your own client library.\n always_use_jwt_access (Optional[bool]): Whether self signed JWT should\n be used for service account credentials.\n\n Raises:\n google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport\n creation failed for any reason.\n google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``\n and ``credentials_file`` are passed.\n "
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[(str, Callable)] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn('api_mtls_endpoint is deprecated', DeprecationWarning)
if client_cert_source:
warnings.warn('client_cert_source is deprecated', DeprecationWarning)
if channel:
credentials = False
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
host = api_mtls_endpoint
if client_cert_source:
(cert, key) = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
elif (client_cert_source_for_mtls and (not ssl_channel_credentials)):
(cert, key) = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key)
super().__init__(host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access)
if (not self._grpc_channel):
self._grpc_channel = type(self).create_channel(self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[('grpc.max_send_message_length', (- 1)), ('grpc.max_receive_message_length', (- 1))])
self._prep_wrapped_messages(client_info)
|
def __init__(self, *, host: str='composer.googleapis.com', credentials: ga_credentials.Credentials=None, credentials_file: Optional[str]=None, scopes: Optional[Sequence[str]]=None, channel: aio.Channel=None, api_mtls_endpoint: str=None, client_cert_source: Callable[([], Tuple[(bytes, bytes)])]=None, ssl_channel_credentials: grpc.ChannelCredentials=None, client_cert_source_for_mtls: Callable[([], Tuple[(bytes, bytes)])]=None, quota_project_id=None, client_info: gapic_v1.client_info.ClientInfo=DEFAULT_CLIENT_INFO, always_use_jwt_access: Optional[bool]=False) -> None:
"Instantiate the transport.\n\n Args:\n host (Optional[str]):\n The hostname to connect to.\n credentials (Optional[google.auth.credentials.Credentials]): The\n authorization credentials to attach to requests. These\n credentials identify the application to the service; if none\n are specified, the client will attempt to ascertain the\n credentials from the environment.\n This argument is ignored if ``channel`` is provided.\n credentials_file (Optional[str]): A file with credentials that can\n be loaded with :func:`google.auth.load_credentials_from_file`.\n This argument is ignored if ``channel`` is provided.\n scopes (Optional[Sequence[str]]): A optional list of scopes needed for this\n service. These are only used when credentials are not specified and\n are passed to :func:`google.auth.default`.\n channel (Optional[aio.Channel]): A ``Channel`` instance through\n which to make calls.\n api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.\n If provided, it overrides the ``host`` argument and tries to create\n a mutual TLS channel with client SSL credentials from\n ``client_cert_source`` or application default SSL credentials.\n client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):\n Deprecated. A callback to provide client SSL certificate bytes and\n private key bytes, both in PEM format. It is ignored if\n ``api_mtls_endpoint`` is None.\n ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials\n for the grpc channel. It is ignored if ``channel`` is provided.\n client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):\n A callback to provide client certificate bytes and private key bytes,\n both in PEM format. It is used to configure a mutual TLS channel. It is\n ignored if ``channel`` or ``ssl_channel_credentials`` is provided.\n quota_project_id (Optional[str]): An optional project to use for billing\n and quota.\n client_info (google.api_core.gapic_v1.client_info.ClientInfo):\n The client info used to send a user-agent string along with\n API requests. If ``None``, then default info will be used.\n Generally, you only need to set this if you're developing\n your own client library.\n always_use_jwt_access (Optional[bool]): Whether self signed JWT should\n be used for service account credentials.\n\n Raises:\n google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport\n creation failed for any reason.\n google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``\n and ``credentials_file`` are passed.\n "
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[(str, Callable)] = {}
self._operations_client = None
if api_mtls_endpoint:
warnings.warn('api_mtls_endpoint is deprecated', DeprecationWarning)
if client_cert_source:
warnings.warn('client_cert_source is deprecated', DeprecationWarning)
if channel:
credentials = False
self._grpc_channel = channel
self._ssl_channel_credentials = None
elif api_mtls_endpoint:
host = api_mtls_endpoint
if client_cert_source:
(cert, key) = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
elif (client_cert_source_for_mtls and (not ssl_channel_credentials)):
(cert, key) = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(certificate_chain=cert, private_key=key)
super().__init__(host=host, credentials=credentials, credentials_file=credentials_file, scopes=scopes, quota_project_id=quota_project_id, client_info=client_info, always_use_jwt_access=always_use_jwt_access)
if (not self._grpc_channel):
self._grpc_channel = type(self).create_channel(self._host, credentials=self._credentials, credentials_file=credentials_file, scopes=self._scopes, ssl_credentials=self._ssl_channel_credentials, quota_project_id=quota_project_id, options=[('grpc.max_send_message_length', (- 1)), ('grpc.max_receive_message_length', (- 1))])
self._prep_wrapped_messages(client_info)<|docstring|>Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
channel (Optional[aio.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or application default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for the grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure a mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.<|endoftext|>
|
b4706b112bdf5213bba9b3170f5e35e2694e9dbcf95da980c35d623cbf73ef4a
|
@property
def grpc_channel(self) -> aio.Channel:
'Create the channel designed to connect to this service.\n\n This property caches on the instance; repeated calls return\n the same channel.\n '
return self._grpc_channel
|
Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
grpc_channel
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def grpc_channel(self) -> aio.Channel:
'Create the channel designed to connect to this service.\n\n This property caches on the instance; repeated calls return\n the same channel.\n '
return self._grpc_channel
|
@property
def grpc_channel(self) -> aio.Channel:
'Create the channel designed to connect to this service.\n\n This property caches on the instance; repeated calls return\n the same channel.\n '
return self._grpc_channel<|docstring|>Create the channel designed to connect to this service.
This property caches on the instance; repeated calls return
the same channel.<|endoftext|>
|
23f84c769da8432ba634129faf15e1d0bf2c4264956bb081e11a64c5222ec789
|
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
'Create the client designed to process long-running operations.\n\n This property caches on the instance; repeated calls return the same\n client.\n '
if (self._operations_client is None):
self._operations_client = operations_v1.OperationsAsyncClient(self.grpc_channel)
return self._operations_client
|
Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
operations_client
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
'Create the client designed to process long-running operations.\n\n This property caches on the instance; repeated calls return the same\n client.\n '
if (self._operations_client is None):
self._operations_client = operations_v1.OperationsAsyncClient(self.grpc_channel)
return self._operations_client
|
@property
def operations_client(self) -> operations_v1.OperationsAsyncClient:
'Create the client designed to process long-running operations.\n\n This property caches on the instance; repeated calls return the same\n client.\n '
if (self._operations_client is None):
self._operations_client = operations_v1.OperationsAsyncClient(self.grpc_channel)
return self._operations_client<|docstring|>Create the client designed to process long-running operations.
This property caches on the instance; repeated calls return the same
client.<|endoftext|>
|
3286d0ac68f3efaa6e0d0c40520eb65383cd5e236fd71b2a42e3997914f2aa0d
|
@property
def create_environment(self) -> Callable[([environments.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the create environment method over gRPC.\n\n Create a new environment.\n\n Returns:\n Callable[[~.CreateEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('create_environment' not in self._stubs):
self._stubs['create_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/CreateEnvironment', request_serializer=environments.CreateEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['create_environment']
|
Return a callable for the create environment method over gRPC.
Create a new environment.
Returns:
Callable[[~.CreateEnvironmentRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
create_environment
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def create_environment(self) -> Callable[([environments.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the create environment method over gRPC.\n\n Create a new environment.\n\n Returns:\n Callable[[~.CreateEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('create_environment' not in self._stubs):
self._stubs['create_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/CreateEnvironment', request_serializer=environments.CreateEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['create_environment']
|
@property
def create_environment(self) -> Callable[([environments.CreateEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the create environment method over gRPC.\n\n Create a new environment.\n\n Returns:\n Callable[[~.CreateEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('create_environment' not in self._stubs):
self._stubs['create_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/CreateEnvironment', request_serializer=environments.CreateEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['create_environment']<|docstring|>Return a callable for the create environment method over gRPC.
Create a new environment.
Returns:
Callable[[~.CreateEnvironmentRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.<|endoftext|>
|
045d32454f05a4846d17ec7b14c9b4691cbe732419df4fc7d189fe41fda7f15d
|
@property
def get_environment(self) -> Callable[([environments.GetEnvironmentRequest], Awaitable[environments.Environment])]:
'Return a callable for the get environment method over gRPC.\n\n Get an existing environment.\n\n Returns:\n Callable[[~.GetEnvironmentRequest],\n Awaitable[~.Environment]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('get_environment' not in self._stubs):
self._stubs['get_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/GetEnvironment', request_serializer=environments.GetEnvironmentRequest.serialize, response_deserializer=environments.Environment.deserialize)
return self._stubs['get_environment']
|
Return a callable for the get environment method over gRPC.
Get an existing environment.
Returns:
Callable[[~.GetEnvironmentRequest],
Awaitable[~.Environment]]:
A function that, when called, will call the underlying RPC
on the server.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
get_environment
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def get_environment(self) -> Callable[([environments.GetEnvironmentRequest], Awaitable[environments.Environment])]:
'Return a callable for the get environment method over gRPC.\n\n Get an existing environment.\n\n Returns:\n Callable[[~.GetEnvironmentRequest],\n Awaitable[~.Environment]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('get_environment' not in self._stubs):
self._stubs['get_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/GetEnvironment', request_serializer=environments.GetEnvironmentRequest.serialize, response_deserializer=environments.Environment.deserialize)
return self._stubs['get_environment']
|
@property
def get_environment(self) -> Callable[([environments.GetEnvironmentRequest], Awaitable[environments.Environment])]:
'Return a callable for the get environment method over gRPC.\n\n Get an existing environment.\n\n Returns:\n Callable[[~.GetEnvironmentRequest],\n Awaitable[~.Environment]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('get_environment' not in self._stubs):
self._stubs['get_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/GetEnvironment', request_serializer=environments.GetEnvironmentRequest.serialize, response_deserializer=environments.Environment.deserialize)
return self._stubs['get_environment']<|docstring|>Return a callable for the get environment method over gRPC.
Get an existing environment.
Returns:
Callable[[~.GetEnvironmentRequest],
Awaitable[~.Environment]]:
A function that, when called, will call the underlying RPC
on the server.<|endoftext|>
|
356f28469b1063eb4bde2c4840d14e3c19256b3f1b6ec0ca9031771f268a736a
|
@property
def list_environments(self) -> Callable[([environments.ListEnvironmentsRequest], Awaitable[environments.ListEnvironmentsResponse])]:
'Return a callable for the list environments method over gRPC.\n\n List environments.\n\n Returns:\n Callable[[~.ListEnvironmentsRequest],\n Awaitable[~.ListEnvironmentsResponse]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('list_environments' not in self._stubs):
self._stubs['list_environments'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/ListEnvironments', request_serializer=environments.ListEnvironmentsRequest.serialize, response_deserializer=environments.ListEnvironmentsResponse.deserialize)
return self._stubs['list_environments']
|
Return a callable for the list environments method over gRPC.
List environments.
Returns:
Callable[[~.ListEnvironmentsRequest],
Awaitable[~.ListEnvironmentsResponse]]:
A function that, when called, will call the underlying RPC
on the server.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
list_environments
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def list_environments(self) -> Callable[([environments.ListEnvironmentsRequest], Awaitable[environments.ListEnvironmentsResponse])]:
'Return a callable for the list environments method over gRPC.\n\n List environments.\n\n Returns:\n Callable[[~.ListEnvironmentsRequest],\n Awaitable[~.ListEnvironmentsResponse]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('list_environments' not in self._stubs):
self._stubs['list_environments'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/ListEnvironments', request_serializer=environments.ListEnvironmentsRequest.serialize, response_deserializer=environments.ListEnvironmentsResponse.deserialize)
return self._stubs['list_environments']
|
@property
def list_environments(self) -> Callable[([environments.ListEnvironmentsRequest], Awaitable[environments.ListEnvironmentsResponse])]:
'Return a callable for the list environments method over gRPC.\n\n List environments.\n\n Returns:\n Callable[[~.ListEnvironmentsRequest],\n Awaitable[~.ListEnvironmentsResponse]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('list_environments' not in self._stubs):
self._stubs['list_environments'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/ListEnvironments', request_serializer=environments.ListEnvironmentsRequest.serialize, response_deserializer=environments.ListEnvironmentsResponse.deserialize)
return self._stubs['list_environments']<|docstring|>Return a callable for the list environments method over gRPC.
List environments.
Returns:
Callable[[~.ListEnvironmentsRequest],
Awaitable[~.ListEnvironmentsResponse]]:
A function that, when called, will call the underlying RPC
on the server.<|endoftext|>
|
c4c6d154279140e3e8ae91525269fc9ae7e327a070e77baf3cc73818e202c405
|
@property
def update_environment(self) -> Callable[([environments.UpdateEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the update environment method over gRPC.\n\n Update an environment.\n\n Returns:\n Callable[[~.UpdateEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('update_environment' not in self._stubs):
self._stubs['update_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/UpdateEnvironment', request_serializer=environments.UpdateEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['update_environment']
|
Return a callable for the update environment method over gRPC.
Update an environment.
Returns:
Callable[[~.UpdateEnvironmentRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
update_environment
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def update_environment(self) -> Callable[([environments.UpdateEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the update environment method over gRPC.\n\n Update an environment.\n\n Returns:\n Callable[[~.UpdateEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('update_environment' not in self._stubs):
self._stubs['update_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/UpdateEnvironment', request_serializer=environments.UpdateEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['update_environment']
|
@property
def update_environment(self) -> Callable[([environments.UpdateEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the update environment method over gRPC.\n\n Update an environment.\n\n Returns:\n Callable[[~.UpdateEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('update_environment' not in self._stubs):
self._stubs['update_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/UpdateEnvironment', request_serializer=environments.UpdateEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['update_environment']<|docstring|>Return a callable for the update environment method over gRPC.
Update an environment.
Returns:
Callable[[~.UpdateEnvironmentRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.<|endoftext|>
|
a1250e3038466b24092d704667ecd777d2f3f10badc554608772862040c2a7b3
|
@property
def delete_environment(self) -> Callable[([environments.DeleteEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the delete environment method over gRPC.\n\n Delete an environment.\n\n Returns:\n Callable[[~.DeleteEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('delete_environment' not in self._stubs):
self._stubs['delete_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/DeleteEnvironment', request_serializer=environments.DeleteEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['delete_environment']
|
Return a callable for the delete environment method over gRPC.
Delete an environment.
Returns:
Callable[[~.DeleteEnvironmentRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
delete_environment
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def delete_environment(self) -> Callable[([environments.DeleteEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the delete environment method over gRPC.\n\n Delete an environment.\n\n Returns:\n Callable[[~.DeleteEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('delete_environment' not in self._stubs):
self._stubs['delete_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/DeleteEnvironment', request_serializer=environments.DeleteEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['delete_environment']
|
@property
def delete_environment(self) -> Callable[([environments.DeleteEnvironmentRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the delete environment method over gRPC.\n\n Delete an environment.\n\n Returns:\n Callable[[~.DeleteEnvironmentRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('delete_environment' not in self._stubs):
self._stubs['delete_environment'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/DeleteEnvironment', request_serializer=environments.DeleteEnvironmentRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['delete_environment']<|docstring|>Return a callable for the delete environment method over gRPC.
Delete an environment.
Returns:
Callable[[~.DeleteEnvironmentRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.<|endoftext|>
|
9674e4b62e87f8c9bce79c5463428a7511706d2280a902548dab11a32ec38d08
|
@property
def restart_web_server(self) -> Callable[([environments.RestartWebServerRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the restart web server method over gRPC.\n\n Restart Airflow web server.\n\n Returns:\n Callable[[~.RestartWebServerRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('restart_web_server' not in self._stubs):
self._stubs['restart_web_server'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/RestartWebServer', request_serializer=environments.RestartWebServerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['restart_web_server']
|
Return a callable for the restart web server method over gRPC.
Restart Airflow web server.
Returns:
Callable[[~.RestartWebServerRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
restart_web_server
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def restart_web_server(self) -> Callable[([environments.RestartWebServerRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the restart web server method over gRPC.\n\n Restart Airflow web server.\n\n Returns:\n Callable[[~.RestartWebServerRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('restart_web_server' not in self._stubs):
self._stubs['restart_web_server'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/RestartWebServer', request_serializer=environments.RestartWebServerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['restart_web_server']
|
@property
def restart_web_server(self) -> Callable[([environments.RestartWebServerRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the restart web server method over gRPC.\n\n Restart Airflow web server.\n\n Returns:\n Callable[[~.RestartWebServerRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('restart_web_server' not in self._stubs):
self._stubs['restart_web_server'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/RestartWebServer', request_serializer=environments.RestartWebServerRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['restart_web_server']<|docstring|>Return a callable for the restart web server method over gRPC.
Restart Airflow web server.
Returns:
Callable[[~.RestartWebServerRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.<|endoftext|>
|
a7a8ef7ebe8d2341b3cf36531829bc412a0b8e19d5680a4ced37134c589d3002
|
@property
def check_upgrade(self) -> Callable[([environments.CheckUpgradeRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the check upgrade method over gRPC.\n\n Check if an upgrade operation on the environment will\n succeed.\n In case of problems detailed info can be found in the\n returned Operation.\n\n Returns:\n Callable[[~.CheckUpgradeRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('check_upgrade' not in self._stubs):
self._stubs['check_upgrade'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/CheckUpgrade', request_serializer=environments.CheckUpgradeRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['check_upgrade']
|
Return a callable for the check upgrade method over gRPC.
Check if an upgrade operation on the environment will
succeed.
In case of problems detailed info can be found in the
returned Operation.
Returns:
Callable[[~.CheckUpgradeRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.
|
google/cloud/orchestration/airflow/service/v1beta1/airflow-service-v1beta1-py/google/cloud/orchestration/airflow/service_v1beta1/services/environments/transports/grpc_asyncio.py
|
check_upgrade
|
googleapis/googleapis-gen
| 7 |
python
|
@property
def check_upgrade(self) -> Callable[([environments.CheckUpgradeRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the check upgrade method over gRPC.\n\n Check if an upgrade operation on the environment will\n succeed.\n In case of problems detailed info can be found in the\n returned Operation.\n\n Returns:\n Callable[[~.CheckUpgradeRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('check_upgrade' not in self._stubs):
self._stubs['check_upgrade'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/CheckUpgrade', request_serializer=environments.CheckUpgradeRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['check_upgrade']
|
@property
def check_upgrade(self) -> Callable[([environments.CheckUpgradeRequest], Awaitable[operations_pb2.Operation])]:
'Return a callable for the check upgrade method over gRPC.\n\n Check if an upgrade operation on the environment will\n succeed.\n In case of problems detailed info can be found in the\n returned Operation.\n\n Returns:\n Callable[[~.CheckUpgradeRequest],\n Awaitable[~.Operation]]:\n A function that, when called, will call the underlying RPC\n on the server.\n '
if ('check_upgrade' not in self._stubs):
self._stubs['check_upgrade'] = self.grpc_channel.unary_unary('/google.cloud.orchestration.airflow.service.v1beta1.Environments/CheckUpgrade', request_serializer=environments.CheckUpgradeRequest.serialize, response_deserializer=operations_pb2.Operation.FromString)
return self._stubs['check_upgrade']<|docstring|>Return a callable for the check upgrade method over gRPC.
Check if an upgrade operation on the environment will
succeed.
In case of problems detailed info can be found in the
returned Operation.
Returns:
Callable[[~.CheckUpgradeRequest],
Awaitable[~.Operation]]:
A function that, when called, will call the underlying RPC
on the server.<|endoftext|>
|
747dfbf26dc0f55a8f88df962a2df92dd3a9c87492431a49aeb58f9869c64ffb
|
def sort(self, xs: List[C], inplace: bool=False) -> List[C]:
'\n Generic, non-mutating sort method which allows for inplace modification of the inplace option.\n\n :param xs: The array to be sorted\n :param inplace: if set to True, we make modification inplace.\n :return: The sorted result, by default leaving xs unchanged.\n '
self.get_helper().set_n(len(xs))
result = (xs if inplace else xs[:])
self._sort(result, 0, len(result))
return result
|
Generic, non-mutating sort method which allows for inplace modification of the inplace option.
:param xs: The array to be sorted
:param inplace: if set to True, we make modification inplace.
:return: The sorted result, by default leaving xs unchanged.
|
Python/src/sort/simple/sort.py
|
sort
|
dancincloud/info6205
| 3 |
python
|
def sort(self, xs: List[C], inplace: bool=False) -> List[C]:
'\n Generic, non-mutating sort method which allows for inplace modification of the inplace option.\n\n :param xs: The array to be sorted\n :param inplace: if set to True, we make modification inplace.\n :return: The sorted result, by default leaving xs unchanged.\n '
self.get_helper().set_n(len(xs))
result = (xs if inplace else xs[:])
self._sort(result, 0, len(result))
return result
|
def sort(self, xs: List[C], inplace: bool=False) -> List[C]:
'\n Generic, non-mutating sort method which allows for inplace modification of the inplace option.\n\n :param xs: The array to be sorted\n :param inplace: if set to True, we make modification inplace.\n :return: The sorted result, by default leaving xs unchanged.\n '
self.get_helper().set_n(len(xs))
result = (xs if inplace else xs[:])
self._sort(result, 0, len(result))
return result<|docstring|>Generic, non-mutating sort method which allows for inplace modification of the inplace option.
:param xs: The array to be sorted
:param inplace: if set to True, we make modification inplace.
:return: The sorted result, by default leaving xs unchanged.<|endoftext|>
|
eece40d9a7c0bfde013e916bcef4b16cb1f0b799fa956299aaff2ce6c6b05711
|
@abstractmethod
def _sort(self, xs: List[C], _from: int, _to: int) -> None:
'\n Generic, mutating sort method which operates on a sub-array.\n\n :param xs: Sort the array xs from "from" to "to".\n :param _from: the index of the first element to sort\n :param _to: the index of the first element not to sort\n '
pass
|
Generic, mutating sort method which operates on a sub-array.
:param xs: Sort the array xs from "from" to "to".
:param _from: the index of the first element to sort
:param _to: the index of the first element not to sort
|
Python/src/sort/simple/sort.py
|
_sort
|
dancincloud/info6205
| 3 |
python
|
@abstractmethod
def _sort(self, xs: List[C], _from: int, _to: int) -> None:
'\n Generic, mutating sort method which operates on a sub-array.\n\n :param xs: Sort the array xs from "from" to "to".\n :param _from: the index of the first element to sort\n :param _to: the index of the first element not to sort\n '
pass
|
@abstractmethod
def _sort(self, xs: List[C], _from: int, _to: int) -> None:
'\n Generic, mutating sort method which operates on a sub-array.\n\n :param xs: Sort the array xs from "from" to "to".\n :param _from: the index of the first element to sort\n :param _to: the index of the first element not to sort\n '
pass<|docstring|>Generic, mutating sort method which operates on a sub-array.
:param xs: Sort the array xs from "from" to "to".
:param _from: the index of the first element to sort
:param _to: the index of the first element not to sort<|endoftext|>
|
1f68624aef7751493aa89af5d61847c6519a6a99d0fce53a2561b8b6a6d757de
|
@abstractmethod
def get_helper(self) -> Helper[C]:
'\n Get the Helper associated with this Sort.\n\n :return: the Helper\n '
pass
|
Get the Helper associated with this Sort.
:return: the Helper
|
Python/src/sort/simple/sort.py
|
get_helper
|
dancincloud/info6205
| 3 |
python
|
@abstractmethod
def get_helper(self) -> Helper[C]:
'\n Get the Helper associated with this Sort.\n\n :return: the Helper\n '
pass
|
@abstractmethod
def get_helper(self) -> Helper[C]:
'\n Get the Helper associated with this Sort.\n\n :return: the Helper\n '
pass<|docstring|>Get the Helper associated with this Sort.
:return: the Helper<|endoftext|>
|
c631403a158ec69ad1e948941f11185a77b3d4cbc4f4957e8d9d26bc53cf9cbf
|
def url_for_version(self, version):
'Handle ParaView version-based custom URLs.'
if (version < Version('5.1.0')):
return self._urlfmt_gz.format(version.up_to(2), version, '-source')
elif (version < Version('5.6.0')):
return self._urlfmt_gz.format(version.up_to(2), version, '')
else:
return self._urlfmt_xz.format(version.up_to(2), version, '')
|
Handle ParaView version-based custom URLs.
|
var/spack/repos/builtin/packages/catalyst/package.py
|
url_for_version
|
jdeaton/spack
| 2 |
python
|
def url_for_version(self, version):
if (version < Version('5.1.0')):
return self._urlfmt_gz.format(version.up_to(2), version, '-source')
elif (version < Version('5.6.0')):
return self._urlfmt_gz.format(version.up_to(2), version, )
else:
return self._urlfmt_xz.format(version.up_to(2), version, )
|
def url_for_version(self, version):
if (version < Version('5.1.0')):
return self._urlfmt_gz.format(version.up_to(2), version, '-source')
elif (version < Version('5.6.0')):
return self._urlfmt_gz.format(version.up_to(2), version, )
else:
return self._urlfmt_xz.format(version.up_to(2), version, )<|docstring|>Handle ParaView version-based custom URLs.<|endoftext|>
|
37566c27ced018738da63a48704310549174b09f539778c303cf96470b3e7489
|
def do_stage(self, mirror_only=False):
'Unpacks and expands the fetched tarball.\n Then, generate the catalyst source files.'
super(Catalyst, self).do_stage(mirror_only)
paraview_dir = os.path.join(self.stage.path, ('ParaView-v' + str(self.version)))
catalyst_script = os.path.join(paraview_dir, 'Catalyst', 'catalyze.py')
catalyst_source_dir = os.path.abspath(self.root_cmakelists_dir)
command = ['python', catalyst_script, '-r', paraview_dir]
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Base')
command.append('-i')
command.append(catalyst_edition)
if ('+python' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Enable-Python')
command.append('-i')
command.append(catalyst_edition)
if ('+essentials' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Essentials')
command.append('-i')
command.append(catalyst_edition)
if ('+extras' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Extras')
command.append('-i')
command.append(catalyst_edition)
if ('+rendering' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Rendering-Base')
command.append('-i')
command.append(catalyst_edition)
command.append('-o')
command.append(catalyst_source_dir)
if (not os.path.isdir(catalyst_source_dir)):
os.mkdir(catalyst_source_dir)
subprocess.check_call(command)
tty.msg(('Generated catalyst source in %s' % self.stage.path))
else:
tty.msg(('Already generated %s in %s' % (self.name, self.stage.path)))
|
Unpacks and expands the fetched tarball.
Then, generate the catalyst source files.
|
var/spack/repos/builtin/packages/catalyst/package.py
|
do_stage
|
jdeaton/spack
| 2 |
python
|
def do_stage(self, mirror_only=False):
'Unpacks and expands the fetched tarball.\n Then, generate the catalyst source files.'
super(Catalyst, self).do_stage(mirror_only)
paraview_dir = os.path.join(self.stage.path, ('ParaView-v' + str(self.version)))
catalyst_script = os.path.join(paraview_dir, 'Catalyst', 'catalyze.py')
catalyst_source_dir = os.path.abspath(self.root_cmakelists_dir)
command = ['python', catalyst_script, '-r', paraview_dir]
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Base')
command.append('-i')
command.append(catalyst_edition)
if ('+python' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Enable-Python')
command.append('-i')
command.append(catalyst_edition)
if ('+essentials' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Essentials')
command.append('-i')
command.append(catalyst_edition)
if ('+extras' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Extras')
command.append('-i')
command.append(catalyst_edition)
if ('+rendering' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Rendering-Base')
command.append('-i')
command.append(catalyst_edition)
command.append('-o')
command.append(catalyst_source_dir)
if (not os.path.isdir(catalyst_source_dir)):
os.mkdir(catalyst_source_dir)
subprocess.check_call(command)
tty.msg(('Generated catalyst source in %s' % self.stage.path))
else:
tty.msg(('Already generated %s in %s' % (self.name, self.stage.path)))
|
def do_stage(self, mirror_only=False):
'Unpacks and expands the fetched tarball.\n Then, generate the catalyst source files.'
super(Catalyst, self).do_stage(mirror_only)
paraview_dir = os.path.join(self.stage.path, ('ParaView-v' + str(self.version)))
catalyst_script = os.path.join(paraview_dir, 'Catalyst', 'catalyze.py')
catalyst_source_dir = os.path.abspath(self.root_cmakelists_dir)
command = ['python', catalyst_script, '-r', paraview_dir]
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Base')
command.append('-i')
command.append(catalyst_edition)
if ('+python' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Enable-Python')
command.append('-i')
command.append(catalyst_edition)
if ('+essentials' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Essentials')
command.append('-i')
command.append(catalyst_edition)
if ('+extras' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Extras')
command.append('-i')
command.append(catalyst_edition)
if ('+rendering' in self.spec):
catalyst_edition = os.path.join(paraview_dir, 'Catalyst', 'Editions', 'Rendering-Base')
command.append('-i')
command.append(catalyst_edition)
command.append('-o')
command.append(catalyst_source_dir)
if (not os.path.isdir(catalyst_source_dir)):
os.mkdir(catalyst_source_dir)
subprocess.check_call(command)
tty.msg(('Generated catalyst source in %s' % self.stage.path))
else:
tty.msg(('Already generated %s in %s' % (self.name, self.stage.path)))<|docstring|>Unpacks and expands the fetched tarball.
Then, generate the catalyst source files.<|endoftext|>
|
8a7fe5a39b55ae15e407f2f161531a1d1b7b5570226ac768bc034171616f43cf
|
@property
def root_cmakelists_dir(self):
'The relative path to the directory containing CMakeLists.txt\n\n This path is relative to the root of the extracted tarball,\n not to the ``build_directory``. Defaults to the current directory.\n\n :return: directory containing CMakeLists.txt\n '
return os.path.join(self.stage.path, ('Catalyst-v' + str(self.version)))
|
The relative path to the directory containing CMakeLists.txt
This path is relative to the root of the extracted tarball,
not to the ``build_directory``. Defaults to the current directory.
:return: directory containing CMakeLists.txt
|
var/spack/repos/builtin/packages/catalyst/package.py
|
root_cmakelists_dir
|
jdeaton/spack
| 2 |
python
|
@property
def root_cmakelists_dir(self):
'The relative path to the directory containing CMakeLists.txt\n\n This path is relative to the root of the extracted tarball,\n not to the ``build_directory``. Defaults to the current directory.\n\n :return: directory containing CMakeLists.txt\n '
return os.path.join(self.stage.path, ('Catalyst-v' + str(self.version)))
|
@property
def root_cmakelists_dir(self):
'The relative path to the directory containing CMakeLists.txt\n\n This path is relative to the root of the extracted tarball,\n not to the ``build_directory``. Defaults to the current directory.\n\n :return: directory containing CMakeLists.txt\n '
return os.path.join(self.stage.path, ('Catalyst-v' + str(self.version)))<|docstring|>The relative path to the directory containing CMakeLists.txt
This path is relative to the root of the extracted tarball,
not to the ``build_directory``. Defaults to the current directory.
:return: directory containing CMakeLists.txt<|endoftext|>
|
e1118e3d145493e4689b70c50a842ab05cb8366bb74ffc1de83eed375185dd4e
|
@property
def build_directory(self):
'Returns the directory to use when building the package\n\n :return: directory where to build the package\n '
return join_path(os.path.abspath(self.root_cmakelists_dir), 'spack-build')
|
Returns the directory to use when building the package
:return: directory where to build the package
|
var/spack/repos/builtin/packages/catalyst/package.py
|
build_directory
|
jdeaton/spack
| 2 |
python
|
@property
def build_directory(self):
'Returns the directory to use when building the package\n\n :return: directory where to build the package\n '
return join_path(os.path.abspath(self.root_cmakelists_dir), 'spack-build')
|
@property
def build_directory(self):
'Returns the directory to use when building the package\n\n :return: directory where to build the package\n '
return join_path(os.path.abspath(self.root_cmakelists_dir), 'spack-build')<|docstring|>Returns the directory to use when building the package
:return: directory where to build the package<|endoftext|>
|
1aec423d98fcd8e53955475d26f1ab08224896dcfafb7e5099c8d856bd3620ca
|
def cmake_args(self):
'Populate cmake arguments for Catalyst.'
cmake_args = [('-DPARAVIEW_GIT_DESCRIBE=v%s' % str(self.version))]
return cmake_args
|
Populate cmake arguments for Catalyst.
|
var/spack/repos/builtin/packages/catalyst/package.py
|
cmake_args
|
jdeaton/spack
| 2 |
python
|
def cmake_args(self):
cmake_args = [('-DPARAVIEW_GIT_DESCRIBE=v%s' % str(self.version))]
return cmake_args
|
def cmake_args(self):
cmake_args = [('-DPARAVIEW_GIT_DESCRIBE=v%s' % str(self.version))]
return cmake_args<|docstring|>Populate cmake arguments for Catalyst.<|endoftext|>
|
0e99e738d81f68157409409896e2c761affda865bdf3414eea8d4e6379496b4d
|
def cmake(self, spec, prefix):
'Runs ``cmake`` in the build directory through the cmake.sh script'
cmake_script_path = os.path.join(os.path.abspath(self.root_cmakelists_dir), 'cmake.sh')
with working_dir(self.build_directory, create=True):
subprocess.check_call((([cmake_script_path, os.path.abspath(self.root_cmakelists_dir)] + self.cmake_args()) + self.std_cmake_args))
|
Runs ``cmake`` in the build directory through the cmake.sh script
|
var/spack/repos/builtin/packages/catalyst/package.py
|
cmake
|
jdeaton/spack
| 2 |
python
|
def cmake(self, spec, prefix):
cmake_script_path = os.path.join(os.path.abspath(self.root_cmakelists_dir), 'cmake.sh')
with working_dir(self.build_directory, create=True):
subprocess.check_call((([cmake_script_path, os.path.abspath(self.root_cmakelists_dir)] + self.cmake_args()) + self.std_cmake_args))
|
def cmake(self, spec, prefix):
cmake_script_path = os.path.join(os.path.abspath(self.root_cmakelists_dir), 'cmake.sh')
with working_dir(self.build_directory, create=True):
subprocess.check_call((([cmake_script_path, os.path.abspath(self.root_cmakelists_dir)] + self.cmake_args()) + self.std_cmake_args))<|docstring|>Runs ``cmake`` in the build directory through the cmake.sh script<|endoftext|>
|
82a34a37c7668040177f172b0f3a760913706ab2152891322aa500773bb3510e
|
@pytest.mark.online
def test_bad_keyid(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
'Test docker-sign can handle invalid keyids.'
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', 'invalidkeyid', str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input='invalidpassword\n')
assert result.exception
assert ('Integrity check passed.' in caplog.text)
assert ('Failed to create signature!' in caplog.text)
|
Test docker-sign can handle invalid keyids.
|
tests/test_script_docker_sign_registry.py
|
test_bad_keyid
|
crashvb/docker-sign-verify
| 4 |
python
|
@pytest.mark.online
def test_bad_keyid(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', 'invalidkeyid', str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input='invalidpassword\n')
assert result.exception
assert ('Integrity check passed.' in caplog.text)
assert ('Failed to create signature!' in caplog.text)
|
@pytest.mark.online
def test_bad_keyid(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', 'invalidkeyid', str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input='invalidpassword\n')
assert result.exception
assert ('Integrity check passed.' in caplog.text)
assert ('Failed to create signature!' in caplog.text)<|docstring|>Test docker-sign can handle invalid keyids.<|endoftext|>
|
002e291635def7baa3f57ea58a624d727eb38079c2938deddefe3140013d9ac8
|
def test_empty_args(clirunner):
'Test docker-sign CLI can be invoked.'
result = clirunner.invoke(cli, ['registry'], catch_exceptions=False)
assert ('Usage:' in result.stdout)
assert (result.exit_code != 0)
|
Test docker-sign CLI can be invoked.
|
tests/test_script_docker_sign_registry.py
|
test_empty_args
|
crashvb/docker-sign-verify
| 4 |
python
|
def test_empty_args(clirunner):
result = clirunner.invoke(cli, ['registry'], catch_exceptions=False)
assert ('Usage:' in result.stdout)
assert (result.exit_code != 0)
|
def test_empty_args(clirunner):
result = clirunner.invoke(cli, ['registry'], catch_exceptions=False)
assert ('Usage:' in result.stdout)
assert (result.exit_code != 0)<|docstring|>Test docker-sign CLI can be invoked.<|endoftext|>
|
0646fb4ae142bdad128bf66da30a83d279cf3f197afcaa90019000abc4e00b99
|
@pytest.mark.online
def test_forced_digest_value(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
'Test docker-sign can handle a forced digest value.'
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'''{gpgsigner.passphrase}
''')
assert (not result.exception)
assert ('It is not possible to store a signed image to a predetermined digest' in caplog.text)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
destination.digest = None
assert (str(destination) in caplog.text)
|
Test docker-sign can handle a forced digest value.
|
tests/test_script_docker_sign_registry.py
|
test_forced_digest_value
|
crashvb/docker-sign-verify
| 4 |
python
|
@pytest.mark.online
def test_forced_digest_value(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('It is not possible to store a signed image to a predetermined digest' in caplog.text)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
destination.digest = None
assert (str(destination) in caplog.text)
|
@pytest.mark.online
def test_forced_digest_value(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('It is not possible to store a signed image to a predetermined digest' in caplog.text)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
destination.digest = None
assert (str(destination) in caplog.text)<|docstring|>Test docker-sign can handle a forced digest value.<|endoftext|>
|
784f280184f7e3e010b52879efc4e5680bee94691c67c4a89930039447e134c8
|
@pytest.mark.online
def test_no_signatures_endorse(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
'Test docker-sign can endorse images without existing signatures.'
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'endorse', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'''{gpgsigner.passphrase}
''')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)
|
Test docker-sign can endorse images without existing signatures.
|
tests/test_script_docker_sign_registry.py
|
test_no_signatures_endorse
|
crashvb/docker-sign-verify
| 4 |
python
|
@pytest.mark.online
def test_no_signatures_endorse(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'endorse', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)
|
@pytest.mark.online
def test_no_signatures_endorse(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'endorse', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)<|docstring|>Test docker-sign can endorse images without existing signatures.<|endoftext|>
|
04c764b036fff5b14cee43f2eece3128776d06bc645adc5af4127e04164af2b2
|
@pytest.mark.online
def test_no_signatures_sign(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
'Test docker-sign can sign images without existing signatures.'
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'sign', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'''{gpgsigner.passphrase}
''')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)
|
Test docker-sign can sign images without existing signatures.
|
tests/test_script_docker_sign_registry.py
|
test_no_signatures_sign
|
crashvb/docker-sign-verify
| 4 |
python
|
@pytest.mark.online
def test_no_signatures_sign(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'sign', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)
|
@pytest.mark.online
def test_no_signatures_sign(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'sign', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)<|docstring|>Test docker-sign can sign images without existing signatures.<|endoftext|>
|
606dd4bde0a008a2e167eeee05c63c6bdd7e25acf2350db9aa2c366cdafe0aba
|
@pytest.mark.online
def test_no_signatures_sign_implicit(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
'Test docker-sign can sign (implicit) images without existing signatures.'
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'''{gpgsigner.passphrase}
''')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)
|
Test docker-sign can sign (implicit) images without existing signatures.
|
tests/test_script_docker_sign_registry.py
|
test_no_signatures_sign_implicit
|
crashvb/docker-sign-verify
| 4 |
python
|
@pytest.mark.online
def test_no_signatures_sign_implicit(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)
|
@pytest.mark.online
def test_no_signatures_sign_implicit(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)<|docstring|>Test docker-sign can sign (implicit) images without existing signatures.<|endoftext|>
|
575aa8176041d2838ad20cedc88d9619ffe822b55f154afd3b6329c417e6d7c3
|
@pytest.mark.online
def test_no_signatures_resign(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
'Test docker-sign can resign images without existing signatures.'
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'resign', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'''{gpgsigner.passphrase}
''')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)
|
Test docker-sign can resign images without existing signatures.
|
tests/test_script_docker_sign_registry.py
|
test_no_signatures_resign
|
crashvb/docker-sign-verify
| 4 |
python
|
@pytest.mark.online
def test_no_signatures_resign(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'resign', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)
|
@pytest.mark.online
def test_no_signatures_resign(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
source = known_good_image['image_name']
destination = source.clone()
destination.digest = None
destination.tag += __name__
with ca_trust_store(docker_registry_secure.cacerts), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['--signature-type', 'resign', 'registry', '--keyid', gpgsigner.keyid, str(source), str(destination)], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert (not result.exception)
assert ('Integrity check passed.' in caplog.text)
assert ('Created new image' in caplog.text)
assert (str(destination) in caplog.text)<|docstring|>Test docker-sign can resign images without existing signatures.<|endoftext|>
|
584df8104dd7a6ce185a341cc212aa6337a3c904e5acd7e37d206d66ffb929d1
|
@pytest.mark.online
@pytest.mark.skip('TODO: Figure out why the hybrid CA trust store is not working.')
def test_unauthorized_destination(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
'Test docker-sign can handle incorrect credentials.'
caplog.clear()
caplog.set_level(logging.DEBUG)
with hybrid_trust_store(docker_registry_secure) as path, ca_trust_store(path), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(known_good_image['image_name']), f'{Indices.DOCKERHUB}/dummy:dummy'], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'''{gpgsigner.passphrase}
''')
assert result.exception
assert ('Integrity check passed.' in caplog.text)
assert ('401' in caplog.text)
assert ('Unauthorized' in caplog.text)
|
Test docker-sign can handle incorrect credentials.
|
tests/test_script_docker_sign_registry.py
|
test_unauthorized_destination
|
crashvb/docker-sign-verify
| 4 |
python
|
@pytest.mark.online
@pytest.mark.skip('TODO: Figure out why the hybrid CA trust store is not working.')
def test_unauthorized_destination(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
with hybrid_trust_store(docker_registry_secure) as path, ca_trust_store(path), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(known_good_image['image_name']), f'{Indices.DOCKERHUB}/dummy:dummy'], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert result.exception
assert ('Integrity check passed.' in caplog.text)
assert ('401' in caplog.text)
assert ('Unauthorized' in caplog.text)
|
@pytest.mark.online
@pytest.mark.skip('TODO: Figure out why the hybrid CA trust store is not working.')
def test_unauthorized_destination(caplog: LogCaptureFixture, clirunner, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
with hybrid_trust_store(docker_registry_secure) as path, ca_trust_store(path), registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, str(known_good_image['image_name']), f'{Indices.DOCKERHUB}/dummy:dummy'], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input=f'{gpgsigner.passphrase}
')
assert result.exception
assert ('Integrity check passed.' in caplog.text)
assert ('401' in caplog.text)
assert ('Unauthorized' in caplog.text)<|docstring|>Test docker-sign can handle incorrect credentials.<|endoftext|>
|
54be5c9d9adc6851c3fd849ffbc8f92465b4cabf869e8e48bad762bcb2d2f522
|
@pytest.mark.online
def test_unauthorized_source(clirunner, caplog: LogCaptureFixture, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
'Test docker-sign can handle incorrect credentials.'
caplog.clear()
caplog.set_level(logging.DEBUG)
with registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, f'{Indices.DOCKERHUB}/dummy:dummy', str(known_good_image['image_name'])], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input='\n')
assert result.exception
assert ('401' in caplog.text)
assert ('Unauthorized' in caplog.text)
|
Test docker-sign can handle incorrect credentials.
|
tests/test_script_docker_sign_registry.py
|
test_unauthorized_source
|
crashvb/docker-sign-verify
| 4 |
python
|
@pytest.mark.online
def test_unauthorized_source(clirunner, caplog: LogCaptureFixture, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
with registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, f'{Indices.DOCKERHUB}/dummy:dummy', str(known_good_image['image_name'])], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input='\n')
assert result.exception
assert ('401' in caplog.text)
assert ('Unauthorized' in caplog.text)
|
@pytest.mark.online
def test_unauthorized_source(clirunner, caplog: LogCaptureFixture, docker_registry_secure: DockerRegistrySecure, gpgsigner: GPGSigner, known_good_image: TypingKnownGoodImage):
caplog.clear()
caplog.set_level(logging.DEBUG)
with registry_credentials(docker_registry_secure):
result = clirunner.invoke(cli, args=['registry', '--keyid', gpgsigner.keyid, f'{Indices.DOCKERHUB}/dummy:dummy', str(known_good_image['image_name'])], env={'DSV_GPG_DATASTORE': str(gpgsigner.homedir)}, input='\n')
assert result.exception
assert ('401' in caplog.text)
assert ('Unauthorized' in caplog.text)<|docstring|>Test docker-sign can handle incorrect credentials.<|endoftext|>
|
dd5a453c08a1a8d39ed6f23bca0126ada5ee506cbc2a670eaf0709087eab97e7
|
def sillouette_score(X_transform, range_start=2, range_end=11):
'\n\tMetrica usada para calcular o coeficiente medio da silhueta de todas as amostras.\n\tO melhor valor é 1 e o pior valor é -1. Valores próximos a 0 indicam clusters sobrepostos. \n\tValores negativos geralmente indicam que uma amostra foi atribuída ao cluster errado, \n\tpois um cluster diferente é mais semelhante.\n\n\t----------\n\tparameters:\n\t\tX_transform: Dados transformado em versao numerica\n\t\trange_start: intervalo inicial de quantas vezes o kmeans irá rodar\n\t\trange_end: intervalo final de quantas vezes o kmeans irá rodar\n\t'
for i in range(range_start, range_end):
cluster = KMeans(n_clusters=i)
preds = cluster.fit_predict(X_transform)
score = silhouette_score(X_transform, preds)
print(((('Silhueta para ' + str(i)) + ' clusters : ') + str(score)))
|
Metrica usada para calcular o coeficiente medio da silhueta de todas as amostras.
O melhor valor é 1 e o pior valor é -1. Valores próximos a 0 indicam clusters sobrepostos.
Valores negativos geralmente indicam que uma amostra foi atribuída ao cluster errado,
pois um cluster diferente é mais semelhante.
----------
parameters:
X_transform: Dados transformado em versao numerica
range_start: intervalo inicial de quantas vezes o kmeans irá rodar
range_end: intervalo final de quantas vezes o kmeans irá rodar
|
projeto/utils/metrics.py
|
sillouette_score
|
IgoPereiraBarros/PIBIC-and-TCC
| 6 |
python
|
def sillouette_score(X_transform, range_start=2, range_end=11):
'\n\tMetrica usada para calcular o coeficiente medio da silhueta de todas as amostras.\n\tO melhor valor é 1 e o pior valor é -1. Valores próximos a 0 indicam clusters sobrepostos. \n\tValores negativos geralmente indicam que uma amostra foi atribuída ao cluster errado, \n\tpois um cluster diferente é mais semelhante.\n\n\t----------\n\tparameters:\n\t\tX_transform: Dados transformado em versao numerica\n\t\trange_start: intervalo inicial de quantas vezes o kmeans irá rodar\n\t\trange_end: intervalo final de quantas vezes o kmeans irá rodar\n\t'
for i in range(range_start, range_end):
cluster = KMeans(n_clusters=i)
preds = cluster.fit_predict(X_transform)
score = silhouette_score(X_transform, preds)
print(((('Silhueta para ' + str(i)) + ' clusters : ') + str(score)))
|
def sillouette_score(X_transform, range_start=2, range_end=11):
'\n\tMetrica usada para calcular o coeficiente medio da silhueta de todas as amostras.\n\tO melhor valor é 1 e o pior valor é -1. Valores próximos a 0 indicam clusters sobrepostos. \n\tValores negativos geralmente indicam que uma amostra foi atribuída ao cluster errado, \n\tpois um cluster diferente é mais semelhante.\n\n\t----------\n\tparameters:\n\t\tX_transform: Dados transformado em versao numerica\n\t\trange_start: intervalo inicial de quantas vezes o kmeans irá rodar\n\t\trange_end: intervalo final de quantas vezes o kmeans irá rodar\n\t'
for i in range(range_start, range_end):
cluster = KMeans(n_clusters=i)
preds = cluster.fit_predict(X_transform)
score = silhouette_score(X_transform, preds)
print(((('Silhueta para ' + str(i)) + ' clusters : ') + str(score)))<|docstring|>Metrica usada para calcular o coeficiente medio da silhueta de todas as amostras.
O melhor valor é 1 e o pior valor é -1. Valores próximos a 0 indicam clusters sobrepostos.
Valores negativos geralmente indicam que uma amostra foi atribuída ao cluster errado,
pois um cluster diferente é mais semelhante.
----------
parameters:
X_transform: Dados transformado em versao numerica
range_start: intervalo inicial de quantas vezes o kmeans irá rodar
range_end: intervalo final de quantas vezes o kmeans irá rodar<|endoftext|>
|
409c42f484d0296c196be7c699df8b8fcc88cd1554ba7447dcc2d97f0e87b7aa
|
@forbidden_view_config(path_info='/api/', renderer='json')
@notfound_view_config(path_info='/api/', renderer='json')
def api_notfound(request):
'Handle a request for an unknown/forbidden resource within the API.'
request.response.status_code = 404
message = _("Either the resource you requested doesn't exist, or you are not currently authorized to see it.")
return {'status': 'failure', 'reason': message}
|
Handle a request for an unknown/forbidden resource within the API.
|
h/views/api_exceptions.py
|
api_notfound
|
discodavey/h
| 2 |
python
|
@forbidden_view_config(path_info='/api/', renderer='json')
@notfound_view_config(path_info='/api/', renderer='json')
def api_notfound(request):
request.response.status_code = 404
message = _("Either the resource you requested doesn't exist, or you are not currently authorized to see it.")
return {'status': 'failure', 'reason': message}
|
@forbidden_view_config(path_info='/api/', renderer='json')
@notfound_view_config(path_info='/api/', renderer='json')
def api_notfound(request):
request.response.status_code = 404
message = _("Either the resource you requested doesn't exist, or you are not currently authorized to see it.")
return {'status': 'failure', 'reason': message}<|docstring|>Handle a request for an unknown/forbidden resource within the API.<|endoftext|>
|
125e27954d64cbdc4126f3219ac873761519503d12558ba23b897498205bb51f
|
@json_view(context=APIError)
def api_error(context, request):
'Handle an expected/deliberately thrown API exception.'
request.response.status_code = context.status_code
return {'status': 'failure', 'reason': context.message}
|
Handle an expected/deliberately thrown API exception.
|
h/views/api_exceptions.py
|
api_error
|
discodavey/h
| 2 |
python
|
@json_view(context=APIError)
def api_error(context, request):
request.response.status_code = context.status_code
return {'status': 'failure', 'reason': context.message}
|
@json_view(context=APIError)
def api_error(context, request):
request.response.status_code = context.status_code
return {'status': 'failure', 'reason': context.message}<|docstring|>Handle an expected/deliberately thrown API exception.<|endoftext|>
|
4b352560448f60c5a5ed6ddb9dbb6d29bd92c9cc5e6b04f1759397cf84783a3c
|
@json_view(context=Exception)
def json_error(request):
'Handle an unexpected exception where the request asked for JSON.'
handle_exception(request)
message = _("Uh-oh, something went wrong! We're very sorry, our application wasn't able to load this page. The team has been notified and we'll fix it shortly. If the problem persists or you'd like more information please email [email protected] with the subject 'Internal Server Error'.")
return {'status': 'failure', 'reason': message}
|
Handle an unexpected exception where the request asked for JSON.
|
h/views/api_exceptions.py
|
json_error
|
discodavey/h
| 2 |
python
|
@json_view(context=Exception)
def json_error(request):
handle_exception(request)
message = _("Uh-oh, something went wrong! We're very sorry, our application wasn't able to load this page. The team has been notified and we'll fix it shortly. If the problem persists or you'd like more information please email [email protected] with the subject 'Internal Server Error'.")
return {'status': 'failure', 'reason': message}
|
@json_view(context=Exception)
def json_error(request):
handle_exception(request)
message = _("Uh-oh, something went wrong! We're very sorry, our application wasn't able to load this page. The team has been notified and we'll fix it shortly. If the problem persists or you'd like more information please email [email protected] with the subject 'Internal Server Error'.")
return {'status': 'failure', 'reason': message}<|docstring|>Handle an unexpected exception where the request asked for JSON.<|endoftext|>
|
954bcd9bfc2cf6dac98328f513af226bffcfc701542f4f43abc72c79bf86674d
|
def test_organizer_permissions_newsmodel(self):
'Check default permissions of organizer for NewsModel'
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_newsmodel'))
|
Check default permissions of organizer for NewsModel
|
RNAPuzzles/rnapuzzles/testfiles/test_organizerpermission.py
|
test_organizer_permissions_newsmodel
|
whinyadventure/RNA-Puzzles
| 0 |
python
|
def test_organizer_permissions_newsmodel(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_newsmodel'))
|
def test_organizer_permissions_newsmodel(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_newsmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_newsmodel'))<|docstring|>Check default permissions of organizer for NewsModel<|endoftext|>
|
9c9bbc78e734e979e8ee9e062d038cdf5b9c9f9c9998fa1c2bd925648ca2efef
|
def test_organizer_permissions_puzzleinfo(self):
'Check default permissions of organizer for PuzzleInfo'
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_puzzleinfo'))
self.assertTrue(self.organizer.has_perm('rnapuzzles.add_puzzleinfo'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_puzzleinfo'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_puzzleinfo'))
|
Check default permissions of organizer for PuzzleInfo
|
RNAPuzzles/rnapuzzles/testfiles/test_organizerpermission.py
|
test_organizer_permissions_puzzleinfo
|
whinyadventure/RNA-Puzzles
| 0 |
python
|
def test_organizer_permissions_puzzleinfo(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_puzzleinfo'))
self.assertTrue(self.organizer.has_perm('rnapuzzles.add_puzzleinfo'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_puzzleinfo'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_puzzleinfo'))
|
def test_organizer_permissions_puzzleinfo(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_puzzleinfo'))
self.assertTrue(self.organizer.has_perm('rnapuzzles.add_puzzleinfo'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_puzzleinfo'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_puzzleinfo'))<|docstring|>Check default permissions of organizer for PuzzleInfo<|endoftext|>
|
aadadf82d5f5d9c0a46f22b295739eb57c8146c9051389af5e97e174e4c1d083
|
def test_organizer_permissions_challenge(self):
'Check default permissions of organizer for PuzzleInfo'
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_challenge'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_challenge'))
|
Check default permissions of organizer for PuzzleInfo
|
RNAPuzzles/rnapuzzles/testfiles/test_organizerpermission.py
|
test_organizer_permissions_challenge
|
whinyadventure/RNA-Puzzles
| 0 |
python
|
def test_organizer_permissions_challenge(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_challenge'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_challenge'))
|
def test_organizer_permissions_challenge(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_challenge'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_challenge'))<|docstring|>Check default permissions of organizer for PuzzleInfo<|endoftext|>
|
925556706eeb7cebe41a875f582c835c3ab5b43f495c47cb9436474e95dac5c9
|
def test_organizer_permissions_group(self):
'Check default permissions of organizer for Group'
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_group'))
|
Check default permissions of organizer for Group
|
RNAPuzzles/rnapuzzles/testfiles/test_organizerpermission.py
|
test_organizer_permissions_group
|
whinyadventure/RNA-Puzzles
| 0 |
python
|
def test_organizer_permissions_group(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_group'))
|
def test_organizer_permissions_group(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_group'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_group'))<|docstring|>Check default permissions of organizer for Group<|endoftext|>
|
e53da157075a17196e91ee1b268f5444c65869725e6e6c024ee3b27193156085
|
def test_organizer_permissions_resources(self):
'Check default permissions of organizer for Resources'
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_resourcesmodel'))
|
Check default permissions of organizer for Resources
|
RNAPuzzles/rnapuzzles/testfiles/test_organizerpermission.py
|
test_organizer_permissions_resources
|
whinyadventure/RNA-Puzzles
| 0 |
python
|
def test_organizer_permissions_resources(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_resourcesmodel'))
|
def test_organizer_permissions_resources(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_resourcesmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_resourcesmodel'))<|docstring|>Check default permissions of organizer for Resources<|endoftext|>
|
c9e069f6b8a43559884e47f3d765e0266c0ede893cca429a405c75e97303645d
|
def test_organizer_permissions_faq(self):
'Check default permissions of organizer for Faq'
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_faqmodel'))
|
Check default permissions of organizer for Faq
|
RNAPuzzles/rnapuzzles/testfiles/test_organizerpermission.py
|
test_organizer_permissions_faq
|
whinyadventure/RNA-Puzzles
| 0 |
python
|
def test_organizer_permissions_faq(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_faqmodel'))
|
def test_organizer_permissions_faq(self):
self.assertFalse(self.organizer.has_perm('rnapuzzles.view_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.add_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.delete_faqmodel'))
self.assertFalse(self.organizer.has_perm('rnapuzzles.change_faqmodel'))<|docstring|>Check default permissions of organizer for Faq<|endoftext|>
|
95f4654a0d0035c34a0cb0ef2920a0114cf056a8397f68d9d41048477a6e6079
|
def defringeflat(flat_file, wbin=10, start_col=10, end_col=980, clip1=0, diagnostic=True, save_to_path=None, filename=None):
'\n\tThis function is to remove the fringe pattern using\n\tthe method described in Rojo and Harrington (2006).\n\n\tUse a fifth order polynomial to remove the continuum.\n\n\tParameters\n\t----------\n\tflat_file \t\t: \tfits\n\t\t\t\t\t\toriginal flat file\n\n\tOptional Parameters\n\t-------------------\n\twbin \t\t\t:\tint\n\t\t\t\t\t\tthe bin width to calculate each \n\t\t\t\t\t\tenhance row\n\t\t\t\t\t\tDefault is 32\n\n\tstart_col \t\t: \tint\n\t\t\t\t\t\tstarting column number for the\n\t\t\t\t\t\twavelet analysis\n\t\t\t\t\t\tDefault is 10\n\n\tend_col \t\t: \tint\n\t\t\t\t\t\tending column number for the\n\t\t\t\t\t\twavelet analysis\n\t\t\t\t\t\tDefault is 980\n\n\tdiagnostic \t\t: \tboolean\n\t\t\t\t\t\toutput the diagnostic plots\n\t\t\t\t\t\tDefault is True\n\n\tReturns\n\t-------\n\tdefringe file \t: \tfits\n\t\t\t\t\t\tdefringed flat file\n\n\t'
data = fits.open(flat_file, ignore_missing_end=True)
data_length = len(data[0].data)
date = Time(data[0].header['DATE-OBS'], scale='utc')
jd = date.jd
if (jd >= 2458401.5):
data[0].data = np.rot90(data[0].data, k=3)
(hist, bins) = np.histogram(data[0].data.flatten(), bins=int(np.sqrt(len(data[0].data.flatten()))))
bins = bins[0:(- 1)]
index1 = np.where(((bins > np.percentile(data[0].data.flatten(), 10)) & (bins < np.percentile(data[0].data.flatten(), 30))))
try:
lowval = bins[index1][np.where((hist[index1] == np.min(hist[index1])))]
if (len(lowval) >= 2):
lowval = np.min(lowval)
except:
lowval = 0
flat = data
if (diagnostic is True):
save_to_image_path = (save_to_path + '/images/')
if (not os.path.exists(save_to_image_path)):
os.makedirs(save_to_image_path)
fig = plt.figure(figsize=(8, 8))
fig.suptitle('original flat', fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[6, 1])
ax0 = plt.subplot(gs[0])
norm = ImageNormalize(flat[0].data, interval=ZScaleInterval())
ax0.imshow(flat[0].data, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax0.set_ylabel('Row number')
ax1 = plt.subplot(gs[1], sharex=ax0)
ax1.plot(flat[0].data[(60, :)], 'k-', alpha=0.5, label='60th row profile')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.savefig((save_to_image_path + 'defringeflat_{}_0_original_flat.png'.format(filename)), bbox_inches='tight')
plt.close()
defringeflat_img = data
defringe_data = np.array(defringeflat_img[0].data, dtype=float)
for k in np.arange(0, (data_length - wbin), wbin):
'\n\t\t# Use the data to figure out the values to mask through the image (low counts/order edges)\n\t\thist, bins = np.histogram(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), \n\t\t\t bins=int(np.sqrt(len(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten()))))\n\t\tbins = bins[0:-1]\n\t\tindex1 = np.where( (bins > np.percentile(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), 10)) & \n\t\t\t (bins < np.percentile(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), 30)) )\n\t\tlowval = bins[index1][np.where(hist[index1] == np.min(hist[index1]))]\n\t\t\n\t\t#print(lowval, len(lowval))\n\t\tif len(lowval) >= 2: lowval = np.min(lowval)\n\t\t'
mask = np.zeros(flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))].shape)
baddata = np.where((flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))] <= lowval))
mask[baddata] = 1
flat_patch = np.array(flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))])
flat_patch_median = np.ma.median(flat_patch, axis=0)
smoothed = sp.ndimage.uniform_filter1d(flat_patch_median, 30)
splinefit = sp.interpolate.interp1d(np.arange(len(smoothed)), smoothed, kind='cubic')
cont_fit = splinefit(np.arange(0, (data_length - clip1)))
enhance_row = (flat_patch_median - cont_fit)
dt = 0.1
wa = WaveletAnalysis(enhance_row[start_col:end_col], dt=dt)
power = wa.wavelet_power
cales = wa.scales
t = wa.time
rx = wa.reconstruction()
reconstruct_image = np.zeros(defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))].shape)
for i in range((wbin + 1)):
for j in np.arange(start_col, end_col):
reconstruct_image[(i, j)] = rx[(j - start_col)]
defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))] -= reconstruct_image[0:(data_length - clip1)]
defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))][baddata] = flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))][baddata]
if (diagnostic is True):
print('Generating diagnostic plots')
fig = plt.figure(figsize=(10, 6))
fig.suptitle('middle cut at row {}'.format((k + (wbin // 2))), fontsize=12)
ax1 = fig.add_subplot(2, 1, 1)
norm = ImageNormalize(flat_patch, interval=ZScaleInterval())
ax1.imshow(flat_patch, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax1.set_ylabel('Row number')
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
ax2.plot(flat_patch[((wbin // 2), :)], 'k-', alpha=0.5)
ax2.set_ylabel('Amp (DN)')
ax2.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_flat_start_row_{}_middle_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
fig = plt.figure(figsize=(10, 6))
fig.suptitle('continuum fit row {}-{}'.format(k, (k + wbin)), fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
ax0 = plt.subplot(gs[0])
ax0.plot(flat_patch_median, 'k-', alpha=0.5, label='mean average patch')
ax0.plot(cont_fit, 'r-', alpha=0.5, label='continuum fit')
ax0.set_ylabel('Amp (DN)')
plt.legend()
ax1 = plt.subplot(gs[1])
ax1.plot((flat_patch_median - cont_fit), 'k-', alpha=0.5, label='residual')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_continuum_fit.png'.format(filename, k)), bbox_inches='tight')
plt.close()
try:
fig = plt.figure(figsize=(10, 6))
fig.suptitle('reconstruct fringe comparison row {}-{}'.format(k, (k + wbin)), fontsize=10)
ax1 = fig.add_subplot(3, 1, 1)
ax1.set_title('enhance_row start row')
ax1.plot(enhance_row, 'k-', alpha=0.5, label='enhance_row start row {}'.format(k))
ax1.set_ylabel('Amp (DN)')
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.set_title('reconstructed fringe pattern')
ax2.plot(rx, 'k-', alpha=0.5, label='reconstructed fringe pattern')
ax2.set_ylabel('Amp (DN)')
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.set_title('residual')
ax3.plot((enhance_row[start_col:end_col] - rx), 'k-', alpha=0.5, label='residual')
ax3.set_ylabel('Amp (DN)')
ax3.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_reconstruct_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
except RuntimeError:
print('CANNOT GENERATE THE PLOT defringeflat\t\t\t\t\t_{}_start_row_{}_reconstruct_profile.png'.format(filename, k))
pass
fig = plt.figure(figsize=(10, 6))
fig.suptitle('reconstructed image row {}-{}'.format(k, (k + wbin)), fontsize=12)
ax1 = fig.add_subplot(3, 1, 1)
ax1.set_title('raw flat image')
norm = ImageNormalize(flat_patch, interval=ZScaleInterval())
ax1.imshow(flat_patch, cmap='gray', norm=norm, origin='lower', label='raw flat image', aspect='auto')
ax1.set_ylabel('Row number')
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.set_title('reconstructed fringe image')
norm = ImageNormalize(reconstruct_image, interval=ZScaleInterval())
ax2.imshow(reconstruct_image, cmap='gray', norm=norm, origin='lower', label='reconstructed fringe image', aspect='auto')
ax2.set_ylabel('Row number')
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.set_title('residual')
norm = ImageNormalize((flat_patch - reconstruct_image), interval=ZScaleInterval())
ax3.imshow((flat_patch - reconstruct_image), norm=norm, origin='lower', cmap='gray', label='residual', aspect='auto')
ax3.set_ylabel('Row number')
ax3.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_reconstruct_image.png'.format(filename, k)), bbox_inches='tight')
plt.close()
fig = plt.figure(figsize=(10, 6))
fig.suptitle('middle row comparison row {}-{}'.format(k, (k + wbin)), fontsize=12)
ax1 = fig.add_subplot(3, 1, 1)
ax1.plot(flat_patch[((wbin // 2), :)], 'k-', alpha=0.5, label='original flat row {}'.format((k + (wbin / 2))))
ax1.set_ylabel('Amp (DN)')
plt.legend()
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.plot((flat_patch[((wbin // 2), :)] - reconstruct_image[((wbin // 2), :)]), 'k-', alpha=0.5, label='defringed flat row {}'.format((k + (wbin / 2))))
ax2.set_ylabel('Amp (DN)')
plt.legend()
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.plot(reconstruct_image[((wbin // 2), :)], 'k-', alpha=0.5, label='difference')
ax3.set_ylabel('Amp (DN)')
ax3.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_defringe_middle_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
if (diagnostic is True):
fig = plt.figure(figsize=(8, 8))
fig.suptitle('defringed flat', fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[6, 1])
ax0 = plt.subplot(gs[0])
norm = ImageNormalize(defringe_data, interval=ZScaleInterval())
ax0.imshow(defringe_data, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax0.set_ylabel('Row number')
ax1 = plt.subplot(gs[1], sharex=ax0)
ax1.plot(defringe_data[(60, :)], 'k-', alpha=0.5, label='60th row profile')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_0_defringe_flat.png'.format(filename)), bbox_inches='tight')
plt.close()
if (jd >= 2458401.5):
defringe_data = np.rot90(defringe_data, k=1)
hdu = fits.PrimaryHDU(data=defringe_data)
hdu.header = flat[0].header
return hdu
|
This function is to remove the fringe pattern using
the method described in Rojo and Harrington (2006).
Use a fifth order polynomial to remove the continuum.
Parameters
----------
flat_file : fits
original flat file
Optional Parameters
-------------------
wbin : int
the bin width to calculate each
enhance row
Default is 32
start_col : int
starting column number for the
wavelet analysis
Default is 10
end_col : int
ending column number for the
wavelet analysis
Default is 980
diagnostic : boolean
output the diagnostic plots
Default is True
Returns
-------
defringe file : fits
defringed flat file
|
smart/utils/defringeflat.py
|
defringeflat
|
chihchunhsu/smart
| 10 |
python
|
def defringeflat(flat_file, wbin=10, start_col=10, end_col=980, clip1=0, diagnostic=True, save_to_path=None, filename=None):
'\n\tThis function is to remove the fringe pattern using\n\tthe method described in Rojo and Harrington (2006).\n\n\tUse a fifth order polynomial to remove the continuum.\n\n\tParameters\n\t----------\n\tflat_file \t\t: \tfits\n\t\t\t\t\t\toriginal flat file\n\n\tOptional Parameters\n\t-------------------\n\twbin \t\t\t:\tint\n\t\t\t\t\t\tthe bin width to calculate each \n\t\t\t\t\t\tenhance row\n\t\t\t\t\t\tDefault is 32\n\n\tstart_col \t\t: \tint\n\t\t\t\t\t\tstarting column number for the\n\t\t\t\t\t\twavelet analysis\n\t\t\t\t\t\tDefault is 10\n\n\tend_col \t\t: \tint\n\t\t\t\t\t\tending column number for the\n\t\t\t\t\t\twavelet analysis\n\t\t\t\t\t\tDefault is 980\n\n\tdiagnostic \t\t: \tboolean\n\t\t\t\t\t\toutput the diagnostic plots\n\t\t\t\t\t\tDefault is True\n\n\tReturns\n\t-------\n\tdefringe file \t: \tfits\n\t\t\t\t\t\tdefringed flat file\n\n\t'
data = fits.open(flat_file, ignore_missing_end=True)
data_length = len(data[0].data)
date = Time(data[0].header['DATE-OBS'], scale='utc')
jd = date.jd
if (jd >= 2458401.5):
data[0].data = np.rot90(data[0].data, k=3)
(hist, bins) = np.histogram(data[0].data.flatten(), bins=int(np.sqrt(len(data[0].data.flatten()))))
bins = bins[0:(- 1)]
index1 = np.where(((bins > np.percentile(data[0].data.flatten(), 10)) & (bins < np.percentile(data[0].data.flatten(), 30))))
try:
lowval = bins[index1][np.where((hist[index1] == np.min(hist[index1])))]
if (len(lowval) >= 2):
lowval = np.min(lowval)
except:
lowval = 0
flat = data
if (diagnostic is True):
save_to_image_path = (save_to_path + '/images/')
if (not os.path.exists(save_to_image_path)):
os.makedirs(save_to_image_path)
fig = plt.figure(figsize=(8, 8))
fig.suptitle('original flat', fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[6, 1])
ax0 = plt.subplot(gs[0])
norm = ImageNormalize(flat[0].data, interval=ZScaleInterval())
ax0.imshow(flat[0].data, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax0.set_ylabel('Row number')
ax1 = plt.subplot(gs[1], sharex=ax0)
ax1.plot(flat[0].data[(60, :)], 'k-', alpha=0.5, label='60th row profile')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.savefig((save_to_image_path + 'defringeflat_{}_0_original_flat.png'.format(filename)), bbox_inches='tight')
plt.close()
defringeflat_img = data
defringe_data = np.array(defringeflat_img[0].data, dtype=float)
for k in np.arange(0, (data_length - wbin), wbin):
'\n\t\t# Use the data to figure out the values to mask through the image (low counts/order edges)\n\t\thist, bins = np.histogram(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), \n\t\t\t bins=int(np.sqrt(len(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten()))))\n\t\tbins = bins[0:-1]\n\t\tindex1 = np.where( (bins > np.percentile(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), 10)) & \n\t\t\t (bins < np.percentile(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), 30)) )\n\t\tlowval = bins[index1][np.where(hist[index1] == np.min(hist[index1]))]\n\t\t\n\t\t#print(lowval, len(lowval))\n\t\tif len(lowval) >= 2: lowval = np.min(lowval)\n\t\t'
mask = np.zeros(flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))].shape)
baddata = np.where((flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))] <= lowval))
mask[baddata] = 1
flat_patch = np.array(flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))])
flat_patch_median = np.ma.median(flat_patch, axis=0)
smoothed = sp.ndimage.uniform_filter1d(flat_patch_median, 30)
splinefit = sp.interpolate.interp1d(np.arange(len(smoothed)), smoothed, kind='cubic')
cont_fit = splinefit(np.arange(0, (data_length - clip1)))
enhance_row = (flat_patch_median - cont_fit)
dt = 0.1
wa = WaveletAnalysis(enhance_row[start_col:end_col], dt=dt)
power = wa.wavelet_power
cales = wa.scales
t = wa.time
rx = wa.reconstruction()
reconstruct_image = np.zeros(defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))].shape)
for i in range((wbin + 1)):
for j in np.arange(start_col, end_col):
reconstruct_image[(i, j)] = rx[(j - start_col)]
defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))] -= reconstruct_image[0:(data_length - clip1)]
defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))][baddata] = flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))][baddata]
if (diagnostic is True):
print('Generating diagnostic plots')
fig = plt.figure(figsize=(10, 6))
fig.suptitle('middle cut at row {}'.format((k + (wbin // 2))), fontsize=12)
ax1 = fig.add_subplot(2, 1, 1)
norm = ImageNormalize(flat_patch, interval=ZScaleInterval())
ax1.imshow(flat_patch, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax1.set_ylabel('Row number')
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
ax2.plot(flat_patch[((wbin // 2), :)], 'k-', alpha=0.5)
ax2.set_ylabel('Amp (DN)')
ax2.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_flat_start_row_{}_middle_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
fig = plt.figure(figsize=(10, 6))
fig.suptitle('continuum fit row {}-{}'.format(k, (k + wbin)), fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
ax0 = plt.subplot(gs[0])
ax0.plot(flat_patch_median, 'k-', alpha=0.5, label='mean average patch')
ax0.plot(cont_fit, 'r-', alpha=0.5, label='continuum fit')
ax0.set_ylabel('Amp (DN)')
plt.legend()
ax1 = plt.subplot(gs[1])
ax1.plot((flat_patch_median - cont_fit), 'k-', alpha=0.5, label='residual')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_continuum_fit.png'.format(filename, k)), bbox_inches='tight')
plt.close()
try:
fig = plt.figure(figsize=(10, 6))
fig.suptitle('reconstruct fringe comparison row {}-{}'.format(k, (k + wbin)), fontsize=10)
ax1 = fig.add_subplot(3, 1, 1)
ax1.set_title('enhance_row start row')
ax1.plot(enhance_row, 'k-', alpha=0.5, label='enhance_row start row {}'.format(k))
ax1.set_ylabel('Amp (DN)')
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.set_title('reconstructed fringe pattern')
ax2.plot(rx, 'k-', alpha=0.5, label='reconstructed fringe pattern')
ax2.set_ylabel('Amp (DN)')
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.set_title('residual')
ax3.plot((enhance_row[start_col:end_col] - rx), 'k-', alpha=0.5, label='residual')
ax3.set_ylabel('Amp (DN)')
ax3.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_reconstruct_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
except RuntimeError:
print('CANNOT GENERATE THE PLOT defringeflat\t\t\t\t\t_{}_start_row_{}_reconstruct_profile.png'.format(filename, k))
pass
fig = plt.figure(figsize=(10, 6))
fig.suptitle('reconstructed image row {}-{}'.format(k, (k + wbin)), fontsize=12)
ax1 = fig.add_subplot(3, 1, 1)
ax1.set_title('raw flat image')
norm = ImageNormalize(flat_patch, interval=ZScaleInterval())
ax1.imshow(flat_patch, cmap='gray', norm=norm, origin='lower', label='raw flat image', aspect='auto')
ax1.set_ylabel('Row number')
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.set_title('reconstructed fringe image')
norm = ImageNormalize(reconstruct_image, interval=ZScaleInterval())
ax2.imshow(reconstruct_image, cmap='gray', norm=norm, origin='lower', label='reconstructed fringe image', aspect='auto')
ax2.set_ylabel('Row number')
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.set_title('residual')
norm = ImageNormalize((flat_patch - reconstruct_image), interval=ZScaleInterval())
ax3.imshow((flat_patch - reconstruct_image), norm=norm, origin='lower', cmap='gray', label='residual', aspect='auto')
ax3.set_ylabel('Row number')
ax3.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_reconstruct_image.png'.format(filename, k)), bbox_inches='tight')
plt.close()
fig = plt.figure(figsize=(10, 6))
fig.suptitle('middle row comparison row {}-{}'.format(k, (k + wbin)), fontsize=12)
ax1 = fig.add_subplot(3, 1, 1)
ax1.plot(flat_patch[((wbin // 2), :)], 'k-', alpha=0.5, label='original flat row {}'.format((k + (wbin / 2))))
ax1.set_ylabel('Amp (DN)')
plt.legend()
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.plot((flat_patch[((wbin // 2), :)] - reconstruct_image[((wbin // 2), :)]), 'k-', alpha=0.5, label='defringed flat row {}'.format((k + (wbin / 2))))
ax2.set_ylabel('Amp (DN)')
plt.legend()
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.plot(reconstruct_image[((wbin // 2), :)], 'k-', alpha=0.5, label='difference')
ax3.set_ylabel('Amp (DN)')
ax3.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_defringe_middle_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
if (diagnostic is True):
fig = plt.figure(figsize=(8, 8))
fig.suptitle('defringed flat', fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[6, 1])
ax0 = plt.subplot(gs[0])
norm = ImageNormalize(defringe_data, interval=ZScaleInterval())
ax0.imshow(defringe_data, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax0.set_ylabel('Row number')
ax1 = plt.subplot(gs[1], sharex=ax0)
ax1.plot(defringe_data[(60, :)], 'k-', alpha=0.5, label='60th row profile')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_0_defringe_flat.png'.format(filename)), bbox_inches='tight')
plt.close()
if (jd >= 2458401.5):
defringe_data = np.rot90(defringe_data, k=1)
hdu = fits.PrimaryHDU(data=defringe_data)
hdu.header = flat[0].header
return hdu
|
def defringeflat(flat_file, wbin=10, start_col=10, end_col=980, clip1=0, diagnostic=True, save_to_path=None, filename=None):
'\n\tThis function is to remove the fringe pattern using\n\tthe method described in Rojo and Harrington (2006).\n\n\tUse a fifth order polynomial to remove the continuum.\n\n\tParameters\n\t----------\n\tflat_file \t\t: \tfits\n\t\t\t\t\t\toriginal flat file\n\n\tOptional Parameters\n\t-------------------\n\twbin \t\t\t:\tint\n\t\t\t\t\t\tthe bin width to calculate each \n\t\t\t\t\t\tenhance row\n\t\t\t\t\t\tDefault is 32\n\n\tstart_col \t\t: \tint\n\t\t\t\t\t\tstarting column number for the\n\t\t\t\t\t\twavelet analysis\n\t\t\t\t\t\tDefault is 10\n\n\tend_col \t\t: \tint\n\t\t\t\t\t\tending column number for the\n\t\t\t\t\t\twavelet analysis\n\t\t\t\t\t\tDefault is 980\n\n\tdiagnostic \t\t: \tboolean\n\t\t\t\t\t\toutput the diagnostic plots\n\t\t\t\t\t\tDefault is True\n\n\tReturns\n\t-------\n\tdefringe file \t: \tfits\n\t\t\t\t\t\tdefringed flat file\n\n\t'
data = fits.open(flat_file, ignore_missing_end=True)
data_length = len(data[0].data)
date = Time(data[0].header['DATE-OBS'], scale='utc')
jd = date.jd
if (jd >= 2458401.5):
data[0].data = np.rot90(data[0].data, k=3)
(hist, bins) = np.histogram(data[0].data.flatten(), bins=int(np.sqrt(len(data[0].data.flatten()))))
bins = bins[0:(- 1)]
index1 = np.where(((bins > np.percentile(data[0].data.flatten(), 10)) & (bins < np.percentile(data[0].data.flatten(), 30))))
try:
lowval = bins[index1][np.where((hist[index1] == np.min(hist[index1])))]
if (len(lowval) >= 2):
lowval = np.min(lowval)
except:
lowval = 0
flat = data
if (diagnostic is True):
save_to_image_path = (save_to_path + '/images/')
if (not os.path.exists(save_to_image_path)):
os.makedirs(save_to_image_path)
fig = plt.figure(figsize=(8, 8))
fig.suptitle('original flat', fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[6, 1])
ax0 = plt.subplot(gs[0])
norm = ImageNormalize(flat[0].data, interval=ZScaleInterval())
ax0.imshow(flat[0].data, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax0.set_ylabel('Row number')
ax1 = plt.subplot(gs[1], sharex=ax0)
ax1.plot(flat[0].data[(60, :)], 'k-', alpha=0.5, label='60th row profile')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.savefig((save_to_image_path + 'defringeflat_{}_0_original_flat.png'.format(filename)), bbox_inches='tight')
plt.close()
defringeflat_img = data
defringe_data = np.array(defringeflat_img[0].data, dtype=float)
for k in np.arange(0, (data_length - wbin), wbin):
'\n\t\t# Use the data to figure out the values to mask through the image (low counts/order edges)\n\t\thist, bins = np.histogram(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), \n\t\t\t bins=int(np.sqrt(len(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten()))))\n\t\tbins = bins[0:-1]\n\t\tindex1 = np.where( (bins > np.percentile(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), 10)) & \n\t\t\t (bins < np.percentile(flat[0].data[k:k+wbin+1, 0:data_length-clip1].flatten(), 30)) )\n\t\tlowval = bins[index1][np.where(hist[index1] == np.min(hist[index1]))]\n\t\t\n\t\t#print(lowval, len(lowval))\n\t\tif len(lowval) >= 2: lowval = np.min(lowval)\n\t\t'
mask = np.zeros(flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))].shape)
baddata = np.where((flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))] <= lowval))
mask[baddata] = 1
flat_patch = np.array(flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))])
flat_patch_median = np.ma.median(flat_patch, axis=0)
smoothed = sp.ndimage.uniform_filter1d(flat_patch_median, 30)
splinefit = sp.interpolate.interp1d(np.arange(len(smoothed)), smoothed, kind='cubic')
cont_fit = splinefit(np.arange(0, (data_length - clip1)))
enhance_row = (flat_patch_median - cont_fit)
dt = 0.1
wa = WaveletAnalysis(enhance_row[start_col:end_col], dt=dt)
power = wa.wavelet_power
cales = wa.scales
t = wa.time
rx = wa.reconstruction()
reconstruct_image = np.zeros(defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))].shape)
for i in range((wbin + 1)):
for j in np.arange(start_col, end_col):
reconstruct_image[(i, j)] = rx[(j - start_col)]
defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))] -= reconstruct_image[0:(data_length - clip1)]
defringe_data[(k:((k + wbin) + 1), 0:(data_length - clip1))][baddata] = flat[0].data[(k:((k + wbin) + 1), 0:(data_length - clip1))][baddata]
if (diagnostic is True):
print('Generating diagnostic plots')
fig = plt.figure(figsize=(10, 6))
fig.suptitle('middle cut at row {}'.format((k + (wbin // 2))), fontsize=12)
ax1 = fig.add_subplot(2, 1, 1)
norm = ImageNormalize(flat_patch, interval=ZScaleInterval())
ax1.imshow(flat_patch, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax1.set_ylabel('Row number')
ax2 = fig.add_subplot(2, 1, 2, sharex=ax1)
ax2.plot(flat_patch[((wbin // 2), :)], 'k-', alpha=0.5)
ax2.set_ylabel('Amp (DN)')
ax2.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_flat_start_row_{}_middle_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
fig = plt.figure(figsize=(10, 6))
fig.suptitle('continuum fit row {}-{}'.format(k, (k + wbin)), fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[3, 1])
ax0 = plt.subplot(gs[0])
ax0.plot(flat_patch_median, 'k-', alpha=0.5, label='mean average patch')
ax0.plot(cont_fit, 'r-', alpha=0.5, label='continuum fit')
ax0.set_ylabel('Amp (DN)')
plt.legend()
ax1 = plt.subplot(gs[1])
ax1.plot((flat_patch_median - cont_fit), 'k-', alpha=0.5, label='residual')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_continuum_fit.png'.format(filename, k)), bbox_inches='tight')
plt.close()
try:
fig = plt.figure(figsize=(10, 6))
fig.suptitle('reconstruct fringe comparison row {}-{}'.format(k, (k + wbin)), fontsize=10)
ax1 = fig.add_subplot(3, 1, 1)
ax1.set_title('enhance_row start row')
ax1.plot(enhance_row, 'k-', alpha=0.5, label='enhance_row start row {}'.format(k))
ax1.set_ylabel('Amp (DN)')
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.set_title('reconstructed fringe pattern')
ax2.plot(rx, 'k-', alpha=0.5, label='reconstructed fringe pattern')
ax2.set_ylabel('Amp (DN)')
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.set_title('residual')
ax3.plot((enhance_row[start_col:end_col] - rx), 'k-', alpha=0.5, label='residual')
ax3.set_ylabel('Amp (DN)')
ax3.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_reconstruct_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
except RuntimeError:
print('CANNOT GENERATE THE PLOT defringeflat\t\t\t\t\t_{}_start_row_{}_reconstruct_profile.png'.format(filename, k))
pass
fig = plt.figure(figsize=(10, 6))
fig.suptitle('reconstructed image row {}-{}'.format(k, (k + wbin)), fontsize=12)
ax1 = fig.add_subplot(3, 1, 1)
ax1.set_title('raw flat image')
norm = ImageNormalize(flat_patch, interval=ZScaleInterval())
ax1.imshow(flat_patch, cmap='gray', norm=norm, origin='lower', label='raw flat image', aspect='auto')
ax1.set_ylabel('Row number')
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.set_title('reconstructed fringe image')
norm = ImageNormalize(reconstruct_image, interval=ZScaleInterval())
ax2.imshow(reconstruct_image, cmap='gray', norm=norm, origin='lower', label='reconstructed fringe image', aspect='auto')
ax2.set_ylabel('Row number')
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.set_title('residual')
norm = ImageNormalize((flat_patch - reconstruct_image), interval=ZScaleInterval())
ax3.imshow((flat_patch - reconstruct_image), norm=norm, origin='lower', cmap='gray', label='residual', aspect='auto')
ax3.set_ylabel('Row number')
ax3.set_xlabel('Column number')
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_reconstruct_image.png'.format(filename, k)), bbox_inches='tight')
plt.close()
fig = plt.figure(figsize=(10, 6))
fig.suptitle('middle row comparison row {}-{}'.format(k, (k + wbin)), fontsize=12)
ax1 = fig.add_subplot(3, 1, 1)
ax1.plot(flat_patch[((wbin // 2), :)], 'k-', alpha=0.5, label='original flat row {}'.format((k + (wbin / 2))))
ax1.set_ylabel('Amp (DN)')
plt.legend()
ax2 = fig.add_subplot(3, 1, 2, sharex=ax1)
ax2.plot((flat_patch[((wbin // 2), :)] - reconstruct_image[((wbin // 2), :)]), 'k-', alpha=0.5, label='defringed flat row {}'.format((k + (wbin / 2))))
ax2.set_ylabel('Amp (DN)')
plt.legend()
ax3 = fig.add_subplot(3, 1, 3, sharex=ax1)
ax3.plot(reconstruct_image[((wbin // 2), :)], 'k-', alpha=0.5, label='difference')
ax3.set_ylabel('Amp (DN)')
ax3.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_start_row_{}_defringe_middle_profile.png'.format(filename, k)), bbox_inches='tight')
plt.close()
if (diagnostic is True):
fig = plt.figure(figsize=(8, 8))
fig.suptitle('defringed flat', fontsize=12)
gs = gridspec.GridSpec(2, 1, height_ratios=[6, 1])
ax0 = plt.subplot(gs[0])
norm = ImageNormalize(defringe_data, interval=ZScaleInterval())
ax0.imshow(defringe_data, cmap='gray', norm=norm, origin='lower', aspect='auto')
ax0.set_ylabel('Row number')
ax1 = plt.subplot(gs[1], sharex=ax0)
ax1.plot(defringe_data[(60, :)], 'k-', alpha=0.5, label='60th row profile')
ax1.set_ylabel('Amp (DN)')
ax1.set_xlabel('Column number')
plt.legend()
plt.tight_layout()
plt.subplots_adjust(top=0.85, hspace=0.5)
plt.savefig((save_to_image_path + 'defringeflat_{}_0_defringe_flat.png'.format(filename)), bbox_inches='tight')
plt.close()
if (jd >= 2458401.5):
defringe_data = np.rot90(defringe_data, k=1)
hdu = fits.PrimaryHDU(data=defringe_data)
hdu.header = flat[0].header
return hdu<|docstring|>This function is to remove the fringe pattern using
the method described in Rojo and Harrington (2006).
Use a fifth order polynomial to remove the continuum.
Parameters
----------
flat_file : fits
original flat file
Optional Parameters
-------------------
wbin : int
the bin width to calculate each
enhance row
Default is 32
start_col : int
starting column number for the
wavelet analysis
Default is 10
end_col : int
ending column number for the
wavelet analysis
Default is 980
diagnostic : boolean
output the diagnostic plots
Default is True
Returns
-------
defringe file : fits
defringed flat file<|endoftext|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.