metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "joyguey/pythonpractice",
"score": 4
} |
#### File: joyguey/pythonpractice/storygenerator.py
```python
import re
import time
#introductions
print ("Hello... there!")
print ("What should I call you?")
name = input()
print ("Hello "+ name +", I want to create a story but I need your help.")
print ("Would you like to help me?")
def yes_no_evaluator(answer):
while answer not in ("yes", "no"):
if answer == "yes":
pass
elif answer == "no":
print ("Fine. Goodbye."),
else:
print("Type yes or no, dumdum")
raw_answer = input("Enter yes or no to continue")
answer = raw_answer.lower().replace(" ","")
yes_no_evaluator(answer)
#madlib stories
arcade = '''When I go to the arcade with my <plural noun-1> there are lots of games to play. I spend lots of time there with my friends. In the game X-Men you can be different <plural noun>. The point of the game is to <verb> every robot. You also need to save people. Then you can go to the next level. In the game Star Wars you are Luke Skywalker and you try to destroy every <noun>. In a car racing/motorcycle racing game you need to beat every computerized vehicle that you are <-ing verb> against. There are a whole lot of other cool games. When you play some games you win <plural noun> for certain scores. Once you're done you can cash in your tickets to get a big <noun>. You can save your <plural noun> for another time. When I went to this arcade I didn't believe how much fun it would be. '''
#this solution I found online but uses the re library
def madlibs(arcade):
fields = sorted(set( re.findall('<[^>]+>', arcade) ))
values = input('\nInput a comma-separated list of words to replace the following items'
'\n %s: ' % ','.join(fields)).split(',')
story = arcade
for f,v in zip(fields, values):
story = story.replace(f, v)
time.sleep(3)
print('\nHere is the story: \n\n' + story)
#hardcode method
zoo = "Today I went to the zoo. I saw a(n) (adjective) (animal) jumping up and down in its tree. He (verb, past tense) (adverb) through the large tunnel that led to its (adjective1) (noun). I got some peanuts and passed them through the cage to a gigantic gray (animal2) towering above my head. Feeding that animal made me hungry. I went to get (number) scoop(s) of ice cream. It filled my stomach. Afterwards I had to (verb) (adverb1) to catch our bus."
def madlib(zoo):
adj = str(input("Enter an adjective:"))
animal = str(input("Enter an animal:"))
verbpt = str(input("Enter a verb in past-tense:"))
adverb = str(input ("Enter an adverb:"))
adj1 = str(input("Enter another adjective:"))
noun = str(input("Enter another noun:"))
animal2 = str(input("Enter another animal:"))
num = str(input ("Enter a number:"))
verb = str(input ("Enter a verb:"))
adverb1 = str(input("Enter another adverb:"))
zoo = zoo.replace ("(adjective)", adj)
zoo = zoo.replace ("(animal)", animal)
zoo = zoo.replace ("(verb, past tense)", verbpt)
zoo = zoo.replace ("(adverb)", adverb)
zoo = zoo.replace ("(adjective1)", adj1)
zoo = zoo.replace ("(noun)", noun)
zoo = zoo.replace ("(animal2)", animal2)
zoo = zoo.replace ("(number)", num)
zoo = zoo.replace ("(verb)", verb)
zoo = zoo.replace ("(adverb1)", adverb1)
time.sleep (3)
print("Here is the story: "+ zoo)
#hardcode version 2
disney = "Last month, I went to Disney World with (friend's name). We traveled for (hours) hours by (vehicle). Finally, we arrived and it was very (adjective). There were (adjective1) people (-ing verb) everywhere. There were also people dressed up in (animal) costumes. I wish it had been more (adjective2), but we (past tense verb) anyway. We also went on a ride called Magic (noun). (friend's name) nearly fell off the ride! Later, we went to the hotel and (past tense verb1). Next year, I want to go to (place), where we can (verb)."
def mdlb(disney):
fn = str(input("Enter a friend's name:"))
hr = str(input("Enter a number:"))
vehicle = str(input("Enter a type of vehicle:"))
adj = str(input ("Enter an adjective:"))
adj1 = str(input("Enter another adjective:"))
ing = str(input("Enter a verb ending in -ing:"))
animal = str(input("Enter an animal:"))
adj2 = str(input ("Enter another adjective:"))
pstverb = str(input ("Enter a past tense verb:"))
noun = str(input("Enter a noun:"))
pstverb1 = str(input ("Enter another past tense verb:"))
place = str(input("Enter a place:"))
verb = str(input("Enter a verb:"))
keywords = ["(friend's name)","(hours)","(vehicle)","(adjective)","(adjective1)","(-ing verb)","(animal)","(adjective2)","(past tense verb)","(noun)","(past tense verb1)","(place)","(verb)"]
replacements = [fn,hr,vehicle,adj,adj1,ing,animal,adj2,pstverb,noun,pstverb1,place,verb]
for key,repl in zip(keywords,replacements):
disney = disney.replace(key, repl)
time.sleep(3)
print("Here is the story:"+disney)
#hardcode version 2 continued
park = "Today, my fabulous camp group went to a an amusement park. It was a fun park with lots of cool (plural noun) and enjoyable play structures. When we got there, my kind counselor shouted loudly, 'Everybody off the (noun).' We all pushed out in a terrible hurry. My counselor handed out yellow tickets, and we scurried in. I was so excited! I couldn't figure out what exciting thing to do first. I saw a scary roller coaster I really liked so, I (adverb) ran over to get in the long line that had about (number) people in it. When I finally got on the roller coaster I was (past tense verb). In fact I was so nervous my two knees were knocking together. This was the (-est adjective) ride I had ever been on! In about two minutes I heard the crank and grinding of the gears. That’s when the ride began! When I got to the bottom, I was a little (past tense verb1) but I was proud of myself. The rest of the day went (adverb1). It was a(n) (adjective1) day at the fun park. "
def mdlib(park):
pn = str(input("Enter a plural noun:"))
noun = str(input("Enter a noun:"))
adverb = str(input("Enter an adverb:"))
num = str(input ("Enter a number:"))
pstverb = str(input ("Enter a past tense verb:"))
adj = str(input("Enter an adjective ending in -est:"))
pstverb1 = str(input("Enter another past tense verb:"))
adverb1 = str(input("Enter another adverb:"))
adj1 = str(input ("Enter another adjective:"))
keywords = ["(plural noun)","(noun)","(adverb)","(number)","(past tense verb)","(-est adjective)","(past tense verb1)","(adverb1)","(adjective1)"]
replacements = [pn,noun,adverb,num,pstverb,adj,pstverb1,adverb1,adj1]
for key,repl in zip(keywords,replacements):
park = park.replace(key, repl)
time.sleep(3)
print("Here is the story:"+park)
#continued prompt location selection
while True:
place = input("Where would you like to visit after the pandemic is over? You can type 'arcade', 'zoo', 'park', or 'disney' or 'q' to quit")
if place == "arcade":
madlibs(arcade)
elif place == "zoo":
madlib(zoo)
elif place == "disney":
mdlb(disney)
elif place == "park":
mdlib(park)
elif place == "q":
break
else:
print ("Sorry " +name+ " you can only pick the options listed. I'm too tired to generate more.")
continue
``` |
{
"source": "joy-highfidelity/sumologic-collectd-plugin",
"score": 2
} |
#### File: sumologic-collectd-plugin/sumologic_collectd_metrics/metrics_config.py
```python
from . metrics_util import validate_non_empty, validate_string_type, validate_positive, \
validate_non_negative, validate_field
class ConfigOptions(object):
"""
Config options
"""
types_db = 'TypesDB'
url = 'URL'
# Http header options
dimension_tags = 'Dimensions'
meta_tags = 'Metadata'
source_name = 'SourceName'
host_name = 'SourceHost'
source_category = 'SourceCategory'
# Metrics Batching options
max_batch_size = 'MaxBatchSize'
max_batch_interval = 'MaxBatchInterval'
# Http post request frequency option
http_post_interval = 'HttpPostInterval'
# Http retry options
retry_initial_delay = 'RetryInitialDelay'
retry_max_attempts = 'RetryMaxAttempts'
retry_max_delay = 'RetryMaxDelay'
retry_backoff = 'RetryBackOff'
retry_jitter_min = 'RetryJitterMin'
retry_jitter_max = 'RetryJitterMax'
# Memory option
max_requests_to_buffer = 'MaxRequestsToBuffer'
# Content encoding option
content_encoding = 'ContentEncoding'
# Static option, not configurable yet. Default is application/vnd.sumologic.carbon2
content_type = 'ContentType'
shutdown_max_wait = "ShutdownMaxWait" # seconds
class MetricsConfig:
"""
Configuration for sumologic collectd plugin
"""
_content_encoding_set = frozenset(['deflate', 'gzip', 'none'])
def __init__(self, collectd):
"""
Init MetricsConfig with default config
"""
self.collectd = collectd
self.conf = self.default_config()
self.types = {}
collectd.info('Initialized MetricsConfig with default config %s' % self.conf)
@staticmethod
def default_config():
return {
ConfigOptions.http_post_interval: 0.1,
ConfigOptions.max_batch_size: 5000,
ConfigOptions.max_batch_interval: 1,
ConfigOptions.retry_initial_delay: 0,
ConfigOptions.retry_max_attempts: 10,
ConfigOptions.retry_max_delay: 100,
ConfigOptions.retry_backoff: 2,
ConfigOptions.retry_jitter_min: 0,
ConfigOptions.retry_jitter_max: 10,
ConfigOptions.max_requests_to_buffer: 1000,
ConfigOptions.content_encoding: 'deflate',
ConfigOptions.content_type: 'application/vnd.sumologic.carbon2',
ConfigOptions.shutdown_max_wait: 5
}
def parse_config(self, config):
"""
Parse the python plugin configurations in collectd.conf
"""
try:
for child in config.children:
if child.key == ConfigOptions.types_db:
for v in child.values:
self._parse_types(v)
elif child.key == ConfigOptions.url:
url = child.values[0]
self.conf[child.key] = url
validate_non_empty(url, child.key)
elif child.key in [ConfigOptions.dimension_tags, ConfigOptions.meta_tags]:
self._parse_tags(child)
elif child.key in [ConfigOptions.source_name, ConfigOptions.host_name,
ConfigOptions.source_category]:
s = child.values[0]
validate_non_empty(s, child.key)
validate_string_type(s, child.key, 'Value', 'Key')
self.conf[child.key] = s
elif child.key == ConfigOptions.http_post_interval:
f = float(child.values[0])
validate_positive(f, child.key)
self.conf[child.key] = f
elif child.key in [ConfigOptions.max_batch_size, ConfigOptions.max_batch_interval,
ConfigOptions.retry_max_attempts, ConfigOptions.retry_max_delay,
ConfigOptions.retry_backoff,
ConfigOptions.max_requests_to_buffer]:
i = int(child.values[0])
validate_positive(i, child.key)
self.conf[child.key] = i
elif child.key in [ConfigOptions.retry_initial_delay,
ConfigOptions.retry_jitter_min, ConfigOptions.retry_jitter_max]:
i = int(child.values[0])
validate_non_negative(i, child.key)
self.conf[child.key] = i
elif child.key == ConfigOptions.content_encoding:
s = child.values[0]
validate_non_empty(s, child.key)
validate_string_type(s, child.key, 'Value', 'Key')
content_encoding = s.lower()
if content_encoding not in self._content_encoding_set:
raise Exception('Unknown ContentEncoding %s specified. ContentEncoding '
'must be deflate, gzip, or none' % s)
self.conf[child.key] = content_encoding
else:
self.collectd.warning('Unknown configuration %s, ignored.' % child.key)
except Exception as e:
self.collectd.error('Failed to parse configurations due to %s' % str(e))
raise e
if ConfigOptions.url not in self.conf:
raise Exception('Specify %s in collectd.conf.' % ConfigOptions.url)
if not self.types:
raise Exception('Specify %s in collectd.conf.' % ConfigOptions.types_db)
http_post_interval = self.conf[ConfigOptions.http_post_interval]
max_batch_interval = self.conf[ConfigOptions.max_batch_interval]
if http_post_interval > max_batch_interval:
raise Exception('Specify HttpPostInterval %f as float between 0 and '
'MaxBatchInterval %d' %(http_post_interval, max_batch_interval))
retry_jitter_min = self.conf[ConfigOptions.retry_jitter_min]
retry_jitter_max = self.conf[ConfigOptions.retry_jitter_max]
if retry_jitter_min > retry_jitter_max:
raise Exception('Specify RetryJitterMin %d to be less or equal to RetryJitterMax %d' %
(retry_jitter_min, retry_jitter_max))
self.collectd.info('Updated MetricsConfig %s with config file %s ' % (self.conf, config))
# parse types.db file
def _parse_types(self, db):
try:
f = open(db, 'r')
for line in f:
fields = line.split()
if len(fields) < 2:
continue
type_name = fields[0]
if type_name[0] == '#':
continue
v = []
for ds in fields[1:]:
ds = ds.rstrip(',')
ds_fields = ds.split(':')
if len(ds_fields) != 4:
self.collectd.warning('Cannot parse data source %s on type %s'
% (ds, type_name))
continue
v.append(ds_fields)
self.types[type_name] = v
f.close()
self.collectd.info('Parsed types %s with types_db file %s ' % (self.types, db))
except Exception as e:
self.collectd.error('Parse types %s failed with %s' %(db, str(e)))
raise e
# parse dimension_tags/meta_tags specified in collectd.conf
def _parse_tags(self, child):
if len(child.values) % 2 != 0:
raise Exception('Missing tags key/value in options %s.' % str(child.values))
for v in child.values:
validate_field(v, child.key, 'Value', 'Key')
self.conf[child.key] = zip(*(iter(child.values),) * 2)
self.collectd.info('Parsed %s tags %s' % (child.key, self.conf[child.key]))
``` |
{
"source": "joyhuang-web/flaskbb",
"score": 3
} |
#### File: unit/utils/test_permissions.py
```python
from flaskbb.utils.permissions import *
def test_moderator_permissions_in_forum(
forum, moderator_user, topic, topic_moderator):
"""Test the moderator permissions in a forum where the user is a
moderator.
"""
assert moderator_user in forum.moderators
assert can_post_reply(moderator_user, forum)
assert can_post_topic(moderator_user, forum)
assert can_edit_post(moderator_user, topic.user_id, forum)
assert can_moderate(moderator_user, forum)
assert can_delete_post(moderator_user, topic.user_id, forum)
assert can_delete_topic(moderator_user, topic.user_id, forum)
def test_moderator_permissions_without_forum(
forum, moderator_user, topic, topic_moderator):
"""Test the moderator permissions in a forum where the user is not a
moderator.
"""
forum.moderators.remove(moderator_user)
assert not moderator_user in forum.moderators
assert not can_moderate(moderator_user, forum)
assert can_post_reply(moderator_user, forum)
assert can_post_topic(moderator_user, forum)
assert not can_edit_post(moderator_user, topic.user_id, forum)
assert not can_delete_post(moderator_user, topic.user_id, forum)
assert not can_delete_topic(moderator_user, topic.user_id, forum)
# Test with own topic
assert can_delete_post(moderator_user, topic_moderator.user_id, forum)
assert can_delete_topic(moderator_user, topic_moderator.user_id, forum)
assert can_edit_post(moderator_user, topic_moderator.user_id, forum)
# Test moderator permissions
assert can_edit_user(moderator_user)
assert can_ban_user(moderator_user)
def test_normal_permissions(forum, user, topic):
"""Test the permissions for a normal user."""
assert not can_moderate(user, forum)
assert can_post_reply(user, forum)
assert can_post_topic(user, forum)
assert can_edit_post(user, topic.user_id, forum)
assert not can_delete_post(user, topic.user_id, forum)
assert not can_delete_topic(user, topic.user_id, forum)
assert not can_edit_user(user)
assert not can_ban_user(user)
def test_admin_permissions(forum, admin_user, topic):
"""Test the permissions for a admin user."""
assert can_moderate(admin_user, forum)
assert can_post_reply(admin_user, forum)
assert can_post_topic(admin_user, forum)
assert can_edit_post(admin_user, topic.user_id, forum)
assert can_delete_post(admin_user, topic.user_id, forum)
assert can_delete_topic(admin_user, topic.user_id, forum)
assert can_edit_user(admin_user)
assert can_ban_user(admin_user)
def test_super_moderator_permissions(forum, super_moderator_user, topic):
"""Test the permissions for a super moderator user."""
assert can_moderate(super_moderator_user, forum)
assert can_post_reply(super_moderator_user, forum)
assert can_post_topic(super_moderator_user, forum)
assert can_edit_post(super_moderator_user, topic.user_id, forum)
assert can_delete_post(super_moderator_user, topic.user_id, forum)
assert can_delete_topic(super_moderator_user, topic.user_id, forum)
assert can_edit_user(super_moderator_user)
assert can_ban_user(super_moderator_user)
def test_can_moderate_without_permission(moderator_user):
"""Test can moderate for a moderator_user without a permission."""
assert can_moderate(moderator_user) == False
``` |
{
"source": "JoyHuYY1412/CADR-FixMatch",
"score": 2
} |
#### File: cta/lib/train.py
```python
import numpy as np
from absl import flags
from fully_supervised.lib.train import ClassifyFullySupervised
from libml import data
from libml.augment import AugmentPoolCTA
from libml.ctaugment import CTAugment
from libml.train import ClassifySemi
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_integer('adepth', 2, 'Augmentation depth.')
flags.DEFINE_float('adecay', 0.99, 'Augmentation decay.')
flags.DEFINE_float('ath', 0.80, 'Augmentation threshold.')
class CTAClassifySemi(ClassifySemi):
"""Semi-supervised classification."""
AUGMENTER_CLASS = CTAugment
AUGMENT_POOL_CLASS = AugmentPoolCTA
@classmethod
def cta_name(cls):
return '%s_depth%d_th%.2f_decay%.3f' % (cls.AUGMENTER_CLASS.__name__,
FLAGS.adepth, FLAGS.ath, FLAGS.adecay)
def __init__(self, train_dir: str, dataset: data.DataSets, nclass: int, **kwargs):
ClassifySemi.__init__(self, train_dir, dataset, nclass, **kwargs)
self.augmenter = self.AUGMENTER_CLASS(FLAGS.adepth, FLAGS.ath, FLAGS.adecay)
def gen_labeled_fn(self, data_iterator):
def wrap():
batch = self.session.run(data_iterator)
batch['cta'] = self.augmenter
batch['probe'] = True
return batch
return self.AUGMENT_POOL_CLASS(wrap)
def gen_unlabeled_fn(self, data_iterator):
def wrap():
batch = self.session.run(data_iterator)
batch['cta'] = self.augmenter
batch['probe'] = False
return batch
return self.AUGMENT_POOL_CLASS(wrap)
def train_step(self, train_session, gen_labeled, gen_unlabeled):
x, y = gen_labeled(), gen_unlabeled()
v = train_session.run([self.ops.classify_op, self.ops.train_op, self.ops.update_step, self.ops.update_p],
feed_dict={self.ops.y: y['image'],
self.ops.x: x['probe'],
self.ops.xt: x['image'],
self.ops.label: x['label'],
self.ops.p_cls_ema: self.tmp_p})
self.tmp_p=v[-1]
# self.p_cls_ema = tf.convert_to_tensor(v[-1])
self.tmp.step = v[-2]
lx = v[0]
for p in range(lx.shape[0]):
error = lx[p]
error[x['label'][p]] -= 1
error = np.abs(error).sum()
self.augmenter.update_rates(x['policy'][p], 1 - 0.5 * error)
def eval_stats(self, batch=None, feed_extra=None, classify_op=None, verbose=True):
"""Evaluate model on train, valid and test."""
'''
batch = batch or FLAGS.batch
classify_op = self.ops.classify_op if classify_op is None else classify_op
accuracies = []
for subset in ('train_labeled', 'valid', 'test'):
images, labels = self.tmp.cache[subset]
predicted = []
for x in range(0, images.shape[0], batch):
p = self.session.run(
classify_op,
feed_dict={
self.ops.x: images[x:x + batch],
**(feed_extra or {})
})
predicted.append(p)
predicted = np.concatenate(predicted, axis=0)
accuracies.append((predicted.argmax(1) == labels).mean() * 100)
if verbose:
self.train_print('kimg %-5d accuracy train/valid/test %.2f %.2f %.2f' %
tuple([self.tmp.step >> 10] + accuracies))
self.train_print(self.augmenter.stats())
return np.array(accuracies, 'f')
'''
batch = batch or FLAGS.batch
classify_op = self.ops.classify_op if classify_op is None else classify_op
accuracies = []
for subset in ('train_labeled', 'valid', 'test'):
images, labels = self.tmp.cache[subset]
if subset == 'test':
num_class = self.dataset.nclass
classwise_num = np.zeros(num_class)
classwise_correct = np.zeros(num_class)
for lab_i in labels:
classwise_num[lab_i] = classwise_num[lab_i] + 1
assert np.sum(classwise_num) == labels.shape[0]
predicted = []
for x in range(0, images.shape[0], batch):
p = self.session.run(
classify_op,
feed_dict={
self.ops.x: images[x:x + batch],
**(feed_extra or {})
})
predicted.append(p)
predicted = np.concatenate(predicted, axis=0)
predicted_label = predicted.argmax(1)
# if subset == 'valid':
# # for boxplot only
# predicted_confidence = predicted.max(1)
# a = np.array(range(len(predicted)))
# b = labels
# true_confidence = predicted[a, b]
# vaild_info = dict()
# vaild_info['labels'] = labels.tolist()
# vaild_info['confidence'] = predicted_confidence.tolist()
# vaild_info['true_confidence'] = true_confidence.tolist()
# vaild_info['predict'] = predicted_label.tolist()
# import pickle
# a_file = open("/gruntdata2/xinting/project/tf_fixmatch/experiments/fixmatch/cifar10_LT_50.d.d.d.1@50-50000/CTAugment_depth2_th0.80_decay0.990/data_0_1.pkl", "wb")
# pickle.dump(vaild_info, a_file)
# a_file.close()
# import pdb; pdb.set_trace()
del predicted
if subset == 'test':
for image_i in range(images.shape[0]):
if predicted_label[image_i] == labels[image_i]:
classwise_correct[predicted_label[image_i]] += 1
accuracies.append((predicted_label == labels).mean() * 100)
if subset == 'test':
# claculate geometric mean
classwise_acc = (classwise_correct / classwise_num)
GM = 1
for i in range(num_class):
if classwise_acc[i] == 0:
# To prevent the N/A values, we set the minimum value as 0.001
GM *= (1/(100 * num_class)) ** (1/num_class)
else:
GM *= (classwise_acc[i]) ** (1/num_class)
accuracies.append(GM * 100)
# accuracy per class
accuracies.extend(classwise_acc * 100)
if verbose:
self.train_print('kimg %-5d accuracy train/valid/test/GM %.2f %.2f %.2f %.2f' %
tuple([self.tmp.step >> 10] + accuracies[:4]))
# self.train_print(self.augmenter.stats())
return np.array(accuracies, 'f')
class CTAClassifyFullySupervised(ClassifyFullySupervised, CTAClassifySemi):
"""Fully-supervised classification."""
def train_step(self, train_session, gen_labeled):
x = gen_labeled()
v = train_session.run([self.ops.classify_op, self.ops.train_op, self.ops.update_step],
feed_dict={self.ops.x: x['probe'],
self.ops.xt: x['image'],
self.ops.label: x['label']})
self.tmp.step = v[-1]
lx = v[0]
for p in range(lx.shape[0]):
error = lx[p]
error[x['label'][p]] -= 1
error = np.abs(error).sum()
self.augmenter.update_rates(x['policy'][p], 1 - 0.5 * error)
``` |
{
"source": "JoyHuYY1412/maskrcnn_base",
"score": 2
} |
#### File: JoyHuYY1412/maskrcnn_base/get_finetune_model.py
```python
import torch
from torch import nn
import sys, getopt
def main(argv):
opts, args = getopt.getopt(argv,"hi:o:",["ifile=","ofile="])
for opt, arg in opts:
if opt in ("-i", "--ifile"):
model_path = arg
elif opt in ("-o", "--ofile"):
trimmed_model_path = arg
state_dict = torch.load(model_path, map_location="cpu")
model = state_dict['model']
cls_weight_new = torch.Tensor(271+160*4, 1024)
nn.init.normal_(cls_weight_new, std=0.01)
cls_weight_new[:271+160*3] = model['module.roi_heads.box.predictor.cls_score.weight']
model['module.roi_heads.box.predictor.cls_score.weight'] = cls_weight_new
reg_weight_new = torch.Tensor((271+160*4)*4, 1024)
nn.init.normal_(reg_weight_new, std=0.001)
reg_weight_new[:(271+160*3)*4] = model['module.roi_heads.box.predictor.bbox_pred.weight']
model['module.roi_heads.box.predictor.bbox_pred.weight'] = reg_weight_new
reg_bias_new = torch.Tensor((271+160*4)*4)
nn.init.constant_(reg_bias_new, 0)
reg_bias_new[:(271+160*3)*4] = model['module.roi_heads.box.predictor.bbox_pred.bias']
model['module.roi_heads.box.predictor.bbox_pred.bias'] = reg_bias_new
print("Also deleting optimizer, scheduler, and iteration entries")
del state_dict['optimizer']
del state_dict['scheduler']
del state_dict['iteration']
torch.save(state_dict, trimmed_model_path)
print(f'saved to: {trimmed_model_path}')
if __name__ == "__main__":
main(sys.argv[1:])
```
#### File: evaluation/lvis/__init__.py
```python
from .lvis_eval import do_lvis_evaluation
def lvis_evaluation(
dataset,
predictions,
output_folder,
box_only,
iou_types,
iteration,
# gt_path,
**_
):
return do_lvis_evaluation(
dataset=dataset,
gt_path="datasets/lvis/lvis_v0.5_val.json",
# gt_path="datasets/lvis/lvis_trainval_1230/lvis_v0.5_val_top270.json",
predictions=predictions,
box_only=box_only,
output_folder=output_folder,
iou_types=iou_types,
iteration = iteration,
)
``` |
{
"source": "joyider/OSP",
"score": 2
} |
#### File: OSP/classes/Stream.py
```python
from .shared import db
from .settings import settings
class Stream(db.Model):
__tablename__ = "Stream"
id = db.Column(db.Integer, primary_key=True)
linkedChannel = db.Column(db.Integer,db.ForeignKey('Channel.id'))
streamKey = db.Column(db.String(255))
streamName = db.Column(db.String(255))
topic = db.Column(db.Integer)
currentViewers = db.Column(db.Integer)
totalViewers = db.Column(db.Integer)
upvotes = db.relationship('streamUpvotes', backref='stream', cascade="all, delete-orphan", lazy="joined")
def __init__(self, streamKey, streamName, linkedChannel, topic):
self.streamKey = streamKey
self.streamName = streamName
self.linkedChannel = linkedChannel
self.currentViewers = 0
self.totalViewers = 0
self.topic = topic
self.channelMuted = False
def __repr__(self):
return '<id %r>' % self.id
def get_upvotes(self):
return len(self.upvotes)
def add_viewer(self):
self.currentViewers = self.currentViewers + 1
db.session.commit()
def remove_viewer(self):
self.currentViewers = self.currentViewers - 1
db.session.commit()
def serialize(self):
sysSettings = settings.query.first()
streamURL = ''
if sysSettings.adaptiveStreaming is True:
streamURL = '/streams/' + self.channel.channelLoc + '.m3u8'
elif self.channel.record is True:
streamURL = '/live-rec/' + self.channel.channelLoc + '/index.m3u8'
else:
streamURL = '/live/' + self.channel.channelLoc + '/index.m3u8'
return {
'id': self.id,
'channelID': self.linkedChannel,
'channelEndpointID': self.channel.channelLoc,
'owningUser': self.channel.owningUser,
'streamPage': '/view/' + self.channel.channelLoc + '/',
'streamURL': streamURL,
'streamName': self.streamName,
'thumbnail': '/stream-thumb/' + self.channel.channelLoc + '.png',
'topic': self.topic,
'currentViewers': self.currentViewers,
'totalViewers': self.currentViewers,
'upvotes': self.get_upvotes()
}
``` |
{
"source": "joyider/pob",
"score": 2
} |
#### File: pob/script/backup.py
```python
import os
from sys import exit
import subprocess
import time
from ConfigParser import SafeConfigParser
import smtplib
import mimetypes
import email
import email.mime.application
conf_file = '../config.ini'
currentdate = time.strftime("%Y-%m-%d_%H%M%S")
bkplog = os.path.realpath('../logs/backup_' + currentdate + '.log')
logfile = os.path.realpath('../logs/rman_' + currentdate + '.log')
print('Log files for current session can be found at: ' + bkplog + ' and ' + logfile)
def getNodeText(node):
nodelist = node.childNodes
result = []
for node in nodelist:
if node.nodeType == node.TEXT_NODE:
result.append(node.data)
return ''.join(result)
def backup_db(log):
sid = config.get('db', 'sid')
cmdfile = config.get('db', 'pob_script')
log.write('Oracle backup settings: - SID: ' + sid + ' - cmdFile: ' + cmdfile + '\n')
rmanCMD = 'rman cmdfile="' + cmdfile + '" log="' + logfile + '" target /'
os.putenv('NLS_DATE_FORMAT', 'DD-MM-YYYY HH24:MI:SS')
os.putenv('ORACLE_SID', sid)
output = subprocess.check_output(rmanCMD, shell=True)
log.write(output)
def send_mail(conf, log, rmanLog, date):
fromaddr = conf.get('smtp', 'from')
toaddr = conf.get('smtp', 'to')
ccaddr = conf.get('smtp', 'cc')
server = conf.get('smtp', 'server')
port = conf.get('smtp', 'port')
useSSL = conf.get('smtp', 'ssl')
username = conf.get('smtp', 'user')
passwd = conf.get('smtp', 'password')
msg = email.mime.Multipart.MIMEMultipart()
msg['Subject'] = 'RMAN log ' + date
msg['From'] = fromaddr
msg['To'] = toaddr
msg['Cc'] = ccaddr
body = email.mime.Text.MIMEText('The log files were attached.')
msg.attach(body)
filename = os.path.basename(log)
with open(log, 'rb') as f:
att = email.mime.application.MIMEApplication(f.read(), _subtype="txt")
att.add_header('Content-Disposition', 'attachment; filename=%s' % filename)
msg.attach(att)
filename = os.path.basename(rmanLog)
with open(rmanLog, 'rb') as f:
att = email.mime.application.MIMEApplication(f.read(), _subtype="txt")
att.add_header('Content-Disposition', 'attachment; filename=%s' % filename)
msg.attach(att)
if (len(server) == 0 or len(port) == 0):
return
server = smtplib.SMTP(server + ':' + port)
if useSSL.lower() == 'true':
server.starttls()
if (len(username) > 0 and len(passwd) > 0):
server.login(username, passwd)
rcpt = ccaddr.split(",") + [toaddr]
server.sendmail(fromaddr, rcpt, msg.as_string())
server.quit()
# end
if __name__ == "__main__":
with open(bkplog, 'w') as log:
if not os.path.exists(conf_file):
log.write('The config file (' + conf_file + ') does not exist...\n')
log.write('Backup process was abandoned.\n')
exit(0)
config = SafeConfigParser()
config.read(conf_file)
backup_db(log)
send_mail(config, bkplog, logfile, currentdate)
``` |
{
"source": "joyider/poraenv",
"score": 2
} |
#### File: joyider/poraenv/poraenv.py
```python
from subprocess import Popen, PIPE
from re import split
from sys import stdout
from datetime import *
import pprint
import glob
import re
import platform
import os
import socket
import getpass
def singleton(class_):
instances = {}
def getinstance(*args, **kwargs):
if class_ not in instances:
instances[class_] = class_(*args, **kwargs)
return instances[class_]
return getinstance
class Database(object):
def __init__(self, sid, ohome):
#super(Database, self).__init__()
self.sid = sid
self.ohome = ohome
#print 'Setting DB thingy', sid, ohome
def __repr__(self):
return 'Database(Oracle_SID=%s, Oracle_HOME=%s)' % (self.sid, self.ohome)
def __str__(self):
return 'Database(Oracle_SID=%s, Oracle_HOME=%s)' % (self.sid, self.ohome)
def getsid(self):
return self.sid
def getohome(self):
return self.ohome
def setenv(self):
os.putenv('ORACLE_SID', self.sid)
os.putenv('ORACLE_HOME', self.ohome)
os.putenv('PATH', os.environ['PATH'] + ':/usr/bin:/usr/sbin:' + self.ohome + '/bin:/opt/csw/bin')
#print self.sid
#print('Setting ORACLE_HOME {}').format(self.ohome)
def getenv(self):
if os.environ['ORACLE_HOME'] or os.environ['ORACLE_SID']:
return os.environ['ORACLE_SID'], os.environ['ORACLE_HOME']
else:
return None
def getDB(self):
return Database
@singleton
class Logging(object):
def __init__(self, dir,
ldate=datetime.today().strftime("%Y%m%d_%H%M")):
self.logdir = dir
self.envlogdir = dir + 'env_' + '_' + ldate + '.log'
#print self.envlogdir
@property
def logDir(self):
return self.logdir
@logDir.setter
def logDir(self, newdir):
self.logdir = newdir
@property
def envlogDir(self):
return self.envlogdir
@envlogDir.setter
def envlogDir(self, newdir):
self.envlogdir = newdir
def getystem():
return platform.system().lower()
def parseoratab(fname):
config = {}
with open(fname, "r") as f:
for line in f.readlines():
li = line.lstrip().lower()
if li and not li.startswith("#") and not li.startswith("+") and not li.startswith("-"):
linesplit = li.split(':')
initfiles = glob.glob(linesplit[1]+'/dbs/init*.ora')
for initfile in initfiles:
if linesplit[0].lower() in initfile.lower():
sid = re.search(linesplit[1]+'/dbs/init(.*).ora', initfile.lower())
config[sid.group(1)] = [linesplit[1]]
return config
def getoratab(ostype):
if 'sunos' in ostype and os.path.exists('/var/opt/oracle/oratab'):
#print ostype
return '/var/opt/oracle/oratab'
elif 'linux' in ostype and os.path.exists('/etc/oratab'):
return '/etc/oratab'
else:
return 'none'
#raise ValueError('oratab not found')
if __name__ == "__main__":
databases = []
mysystem = getystem()
#print mysystem
hostname = socket.gethostname()
#print(hostname)
yearweek = datetime.today().strftime("%Y%U")
logdate = datetime.today().strftime("%Y%m%d_%H%M")
#print logdate
log = Logging((os.path.abspath(__file__)) + "log/" + yearweek)
oratab = getoratab(mysystem)
installed_db_lst = parseoratab(oratab)
for key, value in installed_db_lst.iteritems():
databases.append(Database(key.upper(), value[0]))
#pp.pprint(databases)
while True:
os.system('clear')
for idx in range(0, len(databases)):
print ' ', idx, ' ', databases[idx]
selection = raw_input('Please select: ')
if int(selection) > len(databases) or int(selection) < 0:
print ('Unknown input')
continue
else:
databases[int(selection)].setenv()
print 'Oracle DB is now set as {}'.format(databases[int(selection)].getsid())
bash = 'bash --rcfile <(echo "typeset +x PS1="POraEnv::%s{$ORACLE_SID}@%s:\\$ "") -i' % (getpass.getuser(), hostname)
os.system(bash)
break
``` |
{
"source": "joyinsky/coringa",
"score": 2
} |
#### File: coringa/common/models.py
```python
from django.db import models
from django.conf import settings
import uuid
class WithTimeStampModel(models.Model):
created_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
class WithUsernameModel(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False)
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name="%(class)ss", on_delete=models.PROTECT)
class Meta:
abstract = True
unique_together = (('user', 'name'),)
index_together = (('user', 'name'),)
class CommonModel(WithUsernameModel, WithTimeStampModel):
class Meta(WithUsernameModel.Meta, WithTimeStampModel.Meta):
abstract = True
def __str__(self):
return self.name
```
#### File: coringa/ledgers/serializers.py
```python
from rest_framework import serializers
from ledgers.models import Ledger, Account, Payee, Transaction
class AccountSerializer(serializers.ModelSerializer):
class Meta:
model = Account
fields = ('id', 'name', 'balance', 'ledger')
read_only_fields = ('balance', 'ledger')
class PayeeSerializer(serializers.ModelSerializer):
class Meta:
model = Payee
fields = ('id', 'name', 'ledger')
read_only_fields = ('ledger',)
def create(self, validated_data):
name = validated_data.pop('name')
user = validated_data.pop('user')
instance, _ = Payee.objects.get_or_create(name=name, user=user, defaults=validated_data)
return instance
def validate_name(self, name):
return name.strip()
class TransactionSerializer(serializers.ModelSerializer):
payee = PayeeSerializer()
def get_fields(self):
fields = super(TransactionSerializer, self).get_fields()
ledger = self.context['view'].kwargs.get('ledger_pk')
if not ledger:
if self.instance:
ledger = self.instance.ledger.pk
fields['payee'].queryset = Payee.objects.filter(user=self.context['view'].request.user)\
.filter(ledger_id=ledger)
fields['account'].queryset = Account.objects.filter(user=self.context['view'].request.user)\
.filter(ledger_id=ledger)
return fields
def create(self, validated_data):
payee_serializer = validated_data.pop('payee')
payee_name = payee_serializer.pop('name')
ledger = self.context['view'].kwargs.get('ledger_pk')
if not ledger:
if self.instance:
ledger = self.instance.ledger.pk
payee, _ = Payee.objects.get_or_create(name=payee_name, ledger_id=ledger, defaults=payee_serializer)
validated_data['payee'] = payee
instance = Transaction.objects.create(**validated_data)
return instance
class Meta:
model = Transaction
fields = ('id', 'date', 'account', 'memo', 'amount', 'payee', 'user', 'ledger')
read_only_fields = ('user', 'ledger')
class LedgerSerializer(serializers.ModelSerializer):
class Meta:
model = Ledger
fields = ('id', 'name', 'balance')
read_only_fields = ('balance',)
class LedgerDetailSerializer(LedgerSerializer):
accounts = AccountSerializer(many=True)
class Meta(LedgerSerializer.Meta):
fields = ('id', 'name', 'accounts', 'balance')
```
#### File: tests/api/account.py
```python
from django.urls import reverse
from django.forms.models import model_to_dict
from nose.tools import ok_, eq_
from rest_framework.test import APITestCase
from faker import Faker
from users.tests.factories import UserFactory
from ..factories import LedgerFactory, AccountFactory
from ledgers.models import Account
fake = Faker()
class TestAccountAPI(APITestCase):
"""
Tests the /accounts endpoint.
"""
def setUp(self):
self.user = UserFactory.build()
self.user.save()
self.ledger = LedgerFactory.build()
self.ledger.user = self.user
self.ledger.save()
self.client.credentials(HTTP_AUTHORIZATION='Token {}'.format(self.user.auth_token))
self.url = reverse('api:accounts-list', kwargs={'ledger_pk': self.ledger.pk})
self.account_data = model_to_dict(AccountFactory.build())
def test_post_request_with_no_data_fails(self):
response = self.client.post(self.url, {})
eq_(response.status_code, 400)
def test_create_account(self):
response = self.client.post(self.url, self.account_data)
eq_(response.status_code, 201)
class TestAccountDetailAPI(APITestCase):
def setUp(self):
self.user = UserFactory.build()
self.user.save()
self.ledger = LedgerFactory.build()
self.ledger.user = self.user
self.ledger.save()
self.account = AccountFactory.build()
self.account.ledger = self.ledger
self.account.save()
self.url = reverse('api:account-detail', kwargs={'pk': self.account.pk})
self.client.credentials(HTTP_AUTHORIZATION='Token {}'.format(self.user.auth_token))
def test_get_request_returns_account(self):
response = self.client.get(self.url)
eq_(response.status_code, 200)
def test_put_request_updates_a_account(self):
new_name = fake.name()
payload = {'name': new_name}
response = self.client.put(self.url, payload)
eq_(response.status_code, 200)
account = Account.objects.get(pk=self.account.id)
eq_(account.name, new_name)
```
#### File: tests/models/account.py
```python
from django.test import TestCase
from django.utils import timezone
from ledgers.models import Ledger, Account, Payee, Transaction
from users.models import User
from decimal import Decimal
import random
import string
class AccountModelTest(TestCase):
def setUp(self):
ledger = Ledger()
user1 = User(username="test_user")
user1.save()
ledger.user = user1
ledger.name = 'Test Ledger'
ledger.save()
self.ledger = ledger
name = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(10))
self.account = Account.objects.create(ledger=self.ledger, name=name)
def test_balance(self):
self.assertEqual(self.account.balance, Decimal('0.00'))
t = Transaction()
t.ledger = self.ledger
t.payee, _ = Payee.objects.get_or_create(name='', ledger=self.ledger)
t.account = self.account
t.date = timezone.now()
t.amount = Decimal('1234.56')
t.save()
self.assertEqual(self.account.balance, Decimal('1234.56'))
def test_getbalance(self):
self.assertEqual(self.account.get_balance(), Decimal('0.00'))
t = Transaction()
t.ledger = self.ledger
t.payee, _ = Payee.objects.get_or_create(name='', ledger=self.ledger)
t.account = self.account
t.date = timezone.now()
t.amount = Decimal('1234.56')
t.save()
self.assertEqual(self.account.get_balance(), Decimal('1234.56'))
def test_unsaved(self):
a = Account()
with self.assertRaises(ValueError):
a.set_balance()
self.assertEqual(a.get_balance(), Decimal('0.00'))
def test_setbalance(self):
self.assertIsNone(self.account.set_balance())
self.assertEqual(self.account.balance, Decimal('0.00'))
```
#### File: tests/serializers/account.py
```python
from decimal import Decimal
from django.test import TestCase
from django.forms.models import model_to_dict
from nose.tools import eq_, ok_
from ...serializers import AccountSerializer
from ..factories import LedgerFactory, AccountFactory
from users.tests.factories import UserFactory
class AccountSerializerTest(TestCase):
def setUp(self):
user = UserFactory.build()
user.save()
self.user = user
ledger = LedgerFactory.build()
ledger.user = user
ledger.save()
self.ledger = ledger
self.account_data = model_to_dict(AccountFactory.build())
def test_serializer_with_empty_data(self):
serializer = AccountSerializer(data={})
eq_(serializer.is_valid(), False)
def test_serializer_with_valid_data(self):
serializer = AccountSerializer(data=self.account_data)
ok_(serializer.is_valid())
def test_save(self):
serializer = AccountSerializer(data=self.account_data)
ok_(serializer.is_valid())
account = serializer.save(ledger=self.ledger, user=self.user)
ok_(str(account))
ok_(str(account.name))
def test_balance(self):
serializer = AccountSerializer(data=self.account_data)
ok_(serializer.is_valid())
account = serializer.save(ledger=self.ledger, user=self.user)
eq_(account.balance, Decimal('0.00'))
account.set_balance()
eq_(account.balance, Decimal('0.00'))
eq_(account.get_balance(), Decimal('0.00'))
``` |
{
"source": "joyinsky/pergamum",
"score": 3
} |
#### File: bibloi/templatetags/pagination_range.py
```python
from django import template
register = template.Library()
@register.filter
def pagination_range(obj, current=1, limit=10):
"""
Used with pagination page_range object when you have a lot of pages
> obj = range(100)
> pagination_limit(obj, 1)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
> pagination_limit(obj, 6)
[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
> pagination_limit(obj, 7)
[2, 3, 4, 5, 6, 7, 8, 9, 10, 11]
> pagination_limit(obj, 9)
[4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
> pagination_limit(obj, 99)
[91, 92, 93, 94, 95, 96, 97, 98, 99, 100]
Use within a template with a paginator:
{% for page in obj_list.paginator.page_range|pagination_limit:obj_list.number %}
{{ page }}
{% endfor %}
"""
left = (limit / 2) + 1
right = limit / 2
total = len(obj)
if limit % 2 == 0:
right -= 1
if current < left:
return obj[:limit]
if current > total - right:
return obj[total - limit:]
return obj[current - int(left):current + int(right)]
```
#### File: pergamum/bibloi/views.py
```python
from django.urls import reverse_lazy
from django.shortcuts import Http404
from django.utils.translation import ugettext as _
from vanilla import ListView, CreateView, DetailView, UpdateView, DeleteView, TemplateView
from .forms import ArticleForm, ArticleSearchForm
from .models import Article, Folder
from haystack.generic_views import SearchView
from haystack.query import SearchQuerySet
class ArticleList(ListView):
model = Article
paginate_by = 20
#class ArticleCreate(CreateView):
# model = Article
# form_class = ArticleForm
# success_url = reverse_lazy('bibloi:list')
class ArticleDetail(DetailView):
model = Article
def get_context_data(self, **kwargs):
context = super(ArticleDetail, self).get_context_data(**kwargs)
return context
#class ArticleUpdate(UpdateView):
# model = Article
# form_class = ArticleForm
# success_url = reverse_lazy('bibloi:list')
#class ArticleDelete(DeleteView):
# model = Article
# success_url = reverse_lazy('bibloi:list')
class ArticleSearch(SearchView):
template_name = 'search/search.html'
form_class = ArticleSearchForm
queryset = SearchQuerySet().order_by('-date')
paginate_by = 5
def get_context_data(self, **kwargs):
context = super(ArticleSearch, self).get_context_data(**kwargs)
return context
class FolderView(ListView):
model = Article
template_name = 'bibloi/folder_browse.html'
parent = None
def get_queryset(self):
path = self.kwargs.get('path', '')
folders = path.split('/')
for folder in folders:
try:
if not self.parent:
if folder:
self.parent = Folder.objects.get(name=folder)
else:
self.parent = self.parent.get_children().get(name=folder)
except Folder.DoesNotExist:
raise Http404(_('Folder does not exist'))
return self.model.objects.filter(folder=self.parent)
def get_context_data(self, **kwargs):
context = super(FolderView, self).get_context_data(**kwargs)
context['parent_folders'] = self.parent.parent_folders if self.parent else []
context['current_folder'] = self.parent
if self.parent:
context['folders'] = self.parent.get_children()
else:
context['folders'] = Folder.objects.filter(parent=self.parent)
return context
class TasksView(TemplateView):
template_name = 'tasks.html'
```
#### File: pergamum/scripts/procesardatos.py
```python
import os
import glob
import tablib
import datetime
import pymongo
from pymongo.errors import DocumentTooLarge
import pprint
from tika import parser
from copy import copy
from tqdm import tqdm
TYPES = {'txt', 'png', 'wmv', 'rtf', 'pps', 'jpg', 'wma', 'mdi', 'doc', 'pptx', 'pdf', 'xls', 'htm', 'docx', 'gif'}
TEXT_TYPES = ['txt', 'rtf', 'doc', 'pdf', 'htm', 'docx', 'pps', 'pptx','xls']
MEDIA_TYPES = ['png', 'wmv', 'jpg', 'gif', 'wma', 'mdi',]
data = tablib.Dataset(headers=[])
archivos = [file for file in glob.glob("articledb/**/*", recursive=True) if os.path.isfile(file)]
nombre_archivo = ["".join(archivo.split('/')[-1].split('.')[0:-1]) for archivo in archivos]
extensiones = [archivo.split('.')[-1].lower() for archivo in archivos]
carpetas = ["/".join(archivo.split('/')[1:-1]) for archivo in archivos]
created = [datetime.datetime.fromtimestamp(os.stat(archivo).st_mtime) for archivo in archivos]
data.append_col(nombre_archivo, header='filename')
data.append_col(extensiones, header='ext')
data.append_col(carpetas, header='folder')
data.append_col(archivos, header='path')
data.append_col(archivos, header='created')
data.headers = ['filename', 'ext', 'folder', 'path', 'created']
def write_to_csv(data):
with open("archivos.csv", "w") as db:
db.write(data.csv)
def save_in_mongo(data):
with pymongo.MongoClient('mongodb://localhost:27017/') as db:
archivo = db.archivo
archivo.drop_collection('articulos')
articulos = archivo.articulos
for articulo in tqdm(data.dict):
n = copy(articulo)
if articulo['ext'] in TEXT_TYPES:
try:
parsed = parser.from_file(n['path'])
n['content'] = parsed.get('content')
n['metadata'] = parsed.get('metadata')
except Exception as e:
n['exception'] = True
pprint.pprint(n)
pprint.pprint(e)
try:
articulos.insert(n, check_keys=False)
except DocumentTooLarge:
n['content'] = None
articulos.insert(n, check_keys=False)
def dump_to_csv():
with pymongo.MongoClient('mongodb://localhost:27017/') as db:
articulos = db.archive.articulos
with open("db.json", "w") as fdb:
from bson.json_util import dumps
fdb.write(dumps(articulos.find({}, {"_id": 1, "metadata.title": 1, "metadata.Creation-Date": 1,
"content": 1, "path": 1, "ext": 1, "folder": 1, "filename": 1,
"created": 1})))
if __name__ == "__main__":
# save_in_mongo(data)
with pymongo.MongoClient('mongodb://localhost:27017/') as db:
print(db.archivo.articulos.count())
``` |
{
"source": "joy-it/MultimediaCase-for-Raspberry-Pi",
"score": 2
} |
#### File: resources/de/settings_de.py
```python
import sys
sys.path.append('/storage/.kodi/addons/virtual.rpi-tools/lib')
sys.path.append('/storage/.kodi/addons/script.module.pyserial/lib')
import xbmcaddon
import xbmcgui
import subprocess
import time
import os
import serial
monitor = xbmc.Monitor()
Cancel = False
status_Fan = False
status_LearningMode = False
os.system("rm /storage/.kodi/temp/functions.txt && touch /storage/.kodi/temp/functions.txt")
flags = ["python /storage/.kodi/addons/script.module.MultimediaCase/lib/fan.py &\n"]
with open("/storage/.config/autostart.sh","r") as log, open("/storage/.kodi/temp/functions.txt","w") as file:
for line in log:
if not any(flag in line for flag in flags):
file.write(line)
os.system("touch /storage/.kodi/temp/temp.txt")
def fanControl():
global Cancel
global status_Fan
fan = xbmcgui.Dialog().select("Möchten Sie die Lüftersteuerung aktivieren?", ["Aktivieren", "Deaktivieren", "Mehr Informationen"])
if fan == -1:
Cancel = True
if fan == 1:
xbmcgui.Dialog().ok("Status der Lüftersteuerung","Die Lüftersteuerung ist deaktiviert.")
if fan == 0:
with open("/storage/.kodi/temp/functions.txt", "a") as log:
log.write("python /storage/.kodi/addons/script.module.MultimediaCase/lib/fan.py &\n")
xbmcgui.Dialog().ok("Status der Lüftersteuerung","Die Lüftersteuerung ist aktiviert.")
status_Fan = True
if fan == 2:
xbmcgui.Dialog().ok("Lüftersteuerung HILFE","Dieses Programm steuert den eingebauten Lüfter im Multimedia Case. Der Lüfter kühlt den Pi bis zu 50C herunter, wenn dieser eine Temperatur von 60C überschritten hat. Dies kann auch im dazugehörigen Code angepasst werden.")
fanControl()
def learningMode():
global status_LearningMode
global Cancel
if Cancel == False:
learning_Mode = xbmcgui.Dialog().select("Möchten Sie eine neue PowerOff-Taste einrichten?", ["Ja", "Nein","Mehr Informationen"])
if learning_Mode == -1:
Cancel = True
if learning_Mode == 1:
xbmcgui.Dialog().ok("Status des Learning Modes","Einstellung übersprungen.")
if learning_Mode == 0:
ser = serial.Serial(port='/dev/serial0', baudrate = 38400, parity=serial.PARITY_NONE, stopbits=serial.STOPBITS_ONE, bytesize=serial.EIGHTBITS, timeout=1)
ser.write(str.encode('\x0D'))
ser.write(str.encode('X04'))
ser.write(str.encode('\x0D'))
xbmcgui.Dialog().textviewer("Konfiguration der PowerOff-Taste", "Das Multimedia Case beginnt bunt zu blinken. Nun müssen Sie eine beliebige Taste dreimal betätigen, damit diese als PowerOff-Button gesetzt wird. Wenn ein Signal empfangen wird, leuchtet das Multimedia Case weiß auf. Wenn jedoch eine andere Taste betätigt wird, als die vorherigen, dann leuchtet das Gehäuse rot auf und es muss erneut dreimal eine beliebige Taste betätigt werden. Das Gehäuse wird grün aufleuchten, wenn erfolgreich ein neuer PowerOff-Button gesetzt wurde.\n Sie können mit ENTER fortfahren.")
status_LearningMode = True
if learning_Mode == 2:
xbmcgui.Dialog().ok("PowerOff-Button HILFE", "Mit diesem Programm können Sie eine Taste einer beliebigen Fernbedienung als An- und Austaste des Multimedia Cases konfigurieren. Weitere Tasten zur Steuerung des Systems können im Addon IR Control Configuration konfiguriert werden.")
learningMode()
while not monitor.abortRequested():
fanControl()
learningMode()
if Cancel == False:
if status_Fan == False and status_LearningMode == False:
xbmcgui.Dialog().ok("Funktionen des MultimediaCases", "Sie haben erfolgreich die Lüftersteuerung deaktiviert. Keine neue PowerOff-Taste wurde gesetzt.")
elif status_Fan == True and status_LearningMode == False:
xbmcgui.Dialog().ok("Funktionen des MultimediaCases", "Sie haben erfolgreich die Lüftersteuerung aktiviert. Keine neue PowerOff-Taste wurde gesetzt.")
elif status_Fan == False and status_LearningMode == True:
xbmcgui.Dialog().ok("Funktionen des MultimediaCases", "Sie haben erfolgreich die Lüftersteuerung deaktiviert. Eine neue PowerOff-Taste wurde gesetzt.")
elif status_Fan == True and status_LearningMode == True:
xbmcgui.Dialog().ok("Funktionen des MultimediaCases", "Sie haben erfolgreich die Lüftersteuerung aktiviert. Eine neue PowerOff-Taste wurde gesetzt.")
os.system("rm /storage/.config/autostart.sh && touch /storage/.config/autostart.sh")
os.system("cp /storage/.kodi/temp/functions.txt /storage/.config/autostart.sh")
os.system("rm /storage/.kodi/temp/functions.txt")
os.system("kodi-send --action='RunScript(\"/storage/.kodi/addons/service.autoexec/resources/de/set-settings_de.py\")'")
else:
os.system("rm /storage/.kodi/temp/functions.txt")
xbmcgui.Dialog().ok("Funktionen des MultimediaCases", "Abgebrochen! Konfiguration wurde unterbrochen!")
os.system("kodi-send --action='RunScript(\"/storage/.kodi/addons/service.autoexec/resources/de/set-settings_de.py\")'")
break
``` |
{
"source": "joyjeni/detr-fine",
"score": 2
} |
#### File: detr-fine/archived/main.py
```python
import os
import torch
from torch.utils.data import DataLoader, DistributedSampler
import math
import sys
import time
import datetime
from typing import Iterable
from pathlib import Path
import json
import random
import numpy as np
import torch
import wandb
from dataset.evaluator import SmoothedValue, MetricLogger
from model.detr import build_model
from dataset.construction_dataset import build_dataset
from dataset.evaluator import collate_fn, evaluate, save_on_master
seed = 42
torch.manual_seed(seed)
np.random.seed(seed)
random.seed(seed)
torch.backends.cudnn.deterministic = True
torch.use_deterministic_algorithms(False) # missing some deterministic impl
device = torch.device("cuda:0")
class Args:
pass
args = Args()
# Postitional encoding
args.position_embedding = "sine"
# CNN Backbone
args.backbone = "resnet50"
args.dilation = None
# Hungarian matcher
args.set_cost_class = 1
args.set_cost_bbox = 5
args.set_cost_giou = 2
# Transformer
args.hidden_dim = 256
args.dropout = 0.1
args.nheads = 8
args.dim_feedforward = 2048
args.enc_layers = 6
args.dec_layers = 6
args.pre_norm = None
# DETR
args.num_queries = 100
args.aux_loss = True # calculate loss at eache decoder layer
args.masks = True
args.frozen_weights = None
args.bbox_loss_coef = 5
args.mask_loss_coef = 1
args.dice_loss_coef = 1
args.giou_loss_coef = 2
args.eos_coef = 0.1
# Dataset
args.dataset_file = "coco_panoptic" # construction
args.coco_path = "./data"
args.coco_panoptic_path = "./data"
# Training
args.lr = 1e-4
args.weight_decay = 1e-4
args.lr_backbone = 0 # 0 means frozen backbone
args.batch_size = 3
args.epochs = 2
args.lr_drop = 200
args.clip_max_norm = 0.1
args.output_dir = "out_dir"
args.eval = False
# !mkdir out_dir/panoptic_eval -p
try:
os.mkdir("out_dir/panoptic_eval")
except Exception as e:
pass
# set if you plan to log on wandb
ENABLE_WANDB = True
# if set not train from scratch (detre pretrained on COCO)
used_artifact = None # "2_2_attentionfreeze_aux:latest"
# set if starting a new run
wandb_experiment_name = "2_2_1_transf_unfreeze_aux"
# set to None if starting a new run
run_id = None
if ENABLE_WANDB:
import wandb
if run_id is not None:
wandb.init(project="detr", id=run_id, resume="allow")
else:
wandb.init(project="detr", name=wandb_experiment_name)
wandb.config.position_embedding = args.position_embedding
wandb.config.backbone = args.backbone
wandb.config.dilation = args.dilation
wandb.config.set_cost_class = args.set_cost_class
wandb.config.set_cost_bbox = args.set_cost_bbox
wandb.config.set_cost_giou = args.set_cost_giou
wandb.config.hidden_dim = args.hidden_dim
wandb.config.dropout = args.dropout
wandb.config.nheads = args.nheads
wandb.config.dim_feedforward = args.dim_feedforward
wandb.config.enc_layers = args.enc_layers
wandb.config.dec_layers = args.dec_layers
wandb.config.pre_norm = args.pre_norm
wandb.config.num_queries = args.num_queries
wandb.config.aux_loss = args.aux_loss
wandb.config.masks = args.masks
wandb.config.frozen_weights = args.frozen_weights
wandb.config.bbox_loss_coef = args.bbox_loss_coef
wandb.config.mask_loss_coef = args.mask_loss_coef
wandb.config.dice_loss_coef = args.dice_loss_coef
wandb.config.giou_loss_coef = args.giou_loss_coef
wandb.config.eos_coef = args.eos_coef
wandb.config.lr = args.lr
wandb.config.weight_decay = args.weight_decay
wandb.config.lr_backbone = args.lr_backbone
wandb.config.batch_size = args.batch_size
wandb.config.epochs = args.epochs
wandb.config.lr_drop = args.lr_drop
wandb.config.clip_max_norm = args.clip_max_norm
def freeze_attn(model, args):
for i in range(args.dec_layers):
for param in model.detr.transformer.decoder.layers[i].self_attn.parameters():
param.requires_grad = False
for param in model.detr.transformer.decoder.layers[
i
].multihead_attn.parameters():
param.requires_grad = False
for i in range(args.enc_layers):
for param in model.detr.transformer.encoder.layers[i].self_attn.parameters():
param.requires_grad = False
def freeze_decoder(model, args):
for param in model.detr.transformer.decoder.parameters():
param.requires_grad = False
def freeze_first_layers(model, args):
for i in range(args.enc_layers // 2):
for param in model.detr.transformer.encoder.layers[i].parameters():
param.requires_grad = False
for i in range(args.dec_layers // 2):
for param in model.detr.transformer.decoder.layers[i].parameters():
param.requires_grad = False
def build_pretrained_model(args):
pre_trained = torch.hub.load(
"facebookresearch/detr",
"detr_resnet50_panoptic",
pretrained=True,
return_postprocessor=False,
num_classes=250,
)
model, criterion, postprocessors = build_model(args)
model.detr.backbone.load_state_dict(pre_trained.detr.backbone.state_dict())
model.detr.bbox_embed.load_state_dict(pre_trained.detr.bbox_embed.state_dict())
model.detr.query_embed.load_state_dict(pre_trained.detr.query_embed.state_dict())
model.detr.input_proj.load_state_dict(pre_trained.detr.input_proj.state_dict())
model.detr.transformer.load_state_dict(pre_trained.detr.transformer.state_dict())
model.bbox_attention.load_state_dict(pre_trained.bbox_attention.state_dict())
model.mask_head.load_state_dict(pre_trained.mask_head.state_dict())
freeze_attn(model, args)
return model, criterion, postprocessors
def train_one_epoch(
model: torch.nn.Module,
criterion: torch.nn.Module,
data_loader: Iterable,
optimizer: torch.optim.Optimizer,
device: torch.device,
epoch: int,
max_norm: float = 0,
):
model.train()
criterion.train()
metric_logger = MetricLogger(delimiter=" ")
metric_logger.add_meter("lr", SmoothedValue(window_size=1, fmt="{value:.6f}"))
metric_logger.add_meter(
"class_error", SmoothedValue(window_size=1, fmt="{value:.2f}")
)
header = "Epoch: [{}]".format(epoch)
print_freq = 10
for samples, targets in metric_logger.log_every(data_loader, print_freq, header):
samples = samples.to(device)
targets = [{k: v.to(device) for k, v in t.items()} for t in targets]
outputs = model(samples)
loss_dict = criterion(outputs, targets)
weight_dict = criterion.weight_dict
losses = sum(
loss_dict[k] * weight_dict[k] for k in loss_dict.keys() if k in weight_dict
)
# reduce losses over all GPUs for logging purposes
loss_dict_reduced = loss_dict
loss_dict_reduced_unscaled = {
f"{k}_unscaled": v for k, v in loss_dict_reduced.items()
}
loss_dict_reduced_scaled = {
k: v * weight_dict[k]
for k, v in loss_dict_reduced.items()
if k in weight_dict
}
losses_reduced_scaled = sum(loss_dict_reduced_scaled.values())
loss_value = losses_reduced_scaled.item()
if not math.isfinite(loss_value):
print("Loss is {}, stopping training".format(loss_value))
print(loss_dict_reduced)
sys.exit(1)
optimizer.zero_grad()
losses.backward()
if max_norm > 0:
torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm)
optimizer.step()
metric_logger.update(
loss=loss_value, **loss_dict_reduced_scaled, **loss_dict_reduced_unscaled
)
metric_logger.update(class_error=loss_dict_reduced["class_error"])
metric_logger.update(lr=optimizer.param_groups[0]["lr"])
if ENABLE_WANDB:
wandb.log(loss_dict_reduced)
wandb.log({"loss": loss_value})
metric_logger.synchronize_between_processes()
print("Averaged stats:", metric_logger)
return {k: meter.global_avg for k, meter in metric_logger.meters.items()}
def train():
if args.frozen_weights is not None:
assert args.masks, "Frozen training is meant for segmentation only"
model, criterion, postprocessors = build_pretrained_model(args)
model.to(device)
if ENABLE_WANDB:
wandb.watch(model)
model_without_ddp = model
n_parameters = sum(p.numel() for p in model.parameters() if p.requires_grad)
print("number of params:", n_parameters)
param_dicts = [
{
"params": [
p
for n, p in model_without_ddp.named_parameters()
if "backbone" not in n and p.requires_grad
]
},
{
"params": [
p
for n, p in model_without_ddp.named_parameters()
if "backbone" in n and p.requires_grad
],
"lr": args.lr_backbone,
},
]
optimizer = torch.optim.AdamW(
param_dicts, lr=args.lr, weight_decay=args.weight_decay
)
lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, args.lr_drop)
if ENABLE_WANDB and used_artifact is not None:
artifact = wandb.use_artifact(used_artifact)
artifact_dir = artifact.download()
checkpoint = torch.load(artifact_dir + "/checkpoint.pth")
model.load_state_dict(checkpoint["model"])
if run_id is not None:
optimizer.load_state_dict(checkpoint["optimizer"])
# lr_scheduler.load_state_dict(checkpoint['lr_scheduler'])
start_epoch = checkpoint["epoch"]
else:
start_epoch = 0
dataset_train = build_dataset(image_set="train", args=args)
dataset_val = build_dataset(image_set="val", args=args)
sampler_train = torch.utils.data.RandomSampler(dataset_train)
sampler_val = torch.utils.data.SequentialSampler(dataset_val)
batch_sampler_train = torch.utils.data.BatchSampler(
sampler_train, args.batch_size, drop_last=True
)
data_loader_train = DataLoader(
dataset_train,
batch_sampler=batch_sampler_train,
collate_fn=collate_fn,
num_workers=4,
)
data_loader_val = DataLoader(
dataset_val,
args.batch_size,
sampler=sampler_val,
drop_last=False,
collate_fn=collate_fn,
num_workers=4,
)
if args.frozen_weights is not None:
checkpoint = torch.load(args.frozen_weights, map_location="cpu")
model_without_ddp.detr.load_state_dict(checkpoint["model"])
output_dir = Path(args.output_dir)
if args.eval:
test_stats = evaluate(
model, criterion, postprocessors, data_loader_val, device, args.output_dir
)
print(test_stats)
return
print("Start training")
start_time = time.time()
for epoch in range(start_epoch + 1, args.epochs):
train_stats = train_one_epoch(
model,
criterion,
data_loader_train,
optimizer,
device,
epoch,
args.clip_max_norm,
)
lr_scheduler.step()
if args.output_dir:
checkpoint_path = output_dir / "checkpoint.pth"
save_on_master(
{
"model": model_without_ddp.state_dict(),
"optimizer": optimizer.state_dict(),
"lr_scheduler": lr_scheduler.state_dict(),
"epoch": epoch,
"args": args,
},
checkpoint_path,
)
if ENABLE_WANDB:
artifact = wandb.Artifact(wandb_experiment_name, type="model")
artifact.add_file(checkpoint_path)
wandb.log_artifact(artifact)
test_stats = evaluate(
model, criterion, postprocessors, data_loader_val, device, args.output_dir
)
log_stats = {
**{f"train_{k}": v for k, v in train_stats.items()},
**{f"test_{k}": v for k, v in test_stats.items()},
"epoch": epoch,
"n_parameters": n_parameters,
}
if ENABLE_WANDB:
wandb.log(test_stats)
if args.output_dir:
with (output_dir / "log.txt").open("a") as f:
f.write(json.dumps(log_stats) + "\n")
total_time = time.time() - start_time
total_time_str = str(datetime.timedelta(seconds=int(total_time)))
print("Training time {}".format(total_time_str))
```
#### File: detr-fine/dataprepration/overlay_custom_mask.py
```python
import numpy as np
import cv2
from math import floor
def get_overlayed_mask(image_size, annotations):
height, width = image_size
# # create a single channel height, width pixel black image
blank_image = np.zeros((height, width))
## Show Code image
# plt.imshow(image)
# plt.show()
# Create list of polygons to be drawn
# for i, annotation in enumerate(annotations):
polygons_list = []
# Add the polygon segmentation
for segmentation_points in annotations["segmentation"]:
segmentation_points = np.multiply(segmentation_points, 1).astype(int)
polygons_list.append(segmentation_points)
for x in polygons_list:
end = []
if len(x) % 2 != 0:
print(x)
for l in range(0, len(x), 2):
coords = [floor(x[l]), floor(x[l + 1])]
end.append(coords)
contours = np.array(end)
if end == []:
continue
cv2.fillPoly(blank_image, pts=[contours], color=(1, 1, 1))
## Plot final image
# plt.imshow(image)
# plt.show()
return blank_image
```
#### File: detr/datasets/construction_panoptic.py
```python
import json
from pathlib import Path
import numpy as np
import torch
from PIL import Image
from panopticapi.utils import rgb2id
# from util.box_ops import masks_to_boxes
from .construction import make_construction_transforms
import logging
def box_xywh_to_xyxy(x):
xs, ys, w, h = x.unbind(-1)
b = [xs, ys, (xs + w), (ys + h)]
return torch.stack(b, dim=-1)
def masks_to_boxes(segments):
boxes = []
labels = []
iscrowd = []
area = []
for ann in segments:
if len(ann["bbox"]) == 4:
boxes.append(ann["bbox"])
area.append(ann['area'])
else:
boxes.append([0, 0, 2, 2])
area.append(4)
labels.append(ann["category_id"])
iscrowd.append(ann['iscrowd'])
if len(boxes) == 0 and len(labels) == 0:
boxes.append([0, 0, 2, 2])
labels.append(1)
area.append(4)
iscrowd.append(0)
boxes = torch.tensor(boxes, dtype=torch.int64)
labels = torch.tensor(labels, dtype=torch.int64)
iscrowd = torch.tensor(iscrowd)
area = torch.tensor(area)
boxes = box_xywh_to_xyxy(boxes)
return boxes, labels, iscrowd, area
class ConstructionPanoptic:
def __init__(self, img_folder, ann_folder, ann_file, transforms=None, return_masks=True):
with open(ann_file, "r") as f:
self.coco = json.load(f)
# sort 'images' field so that they are aligned with 'annotations'
# i.e., in alphabetical order
self.coco["images"] = sorted(self.coco["images"], key=lambda x: x["id"])
# sanity check
if "annotations" in self.coco:
for img, ann in zip(self.coco["images"], self.coco["annotations"]):
assert img["file_name"][:-4] == ann["file_name"][:-4]
self.img_folder = img_folder
self.ann_folder = ann_folder
self.ann_file = ann_file
self.transforms = transforms
self.return_masks = return_masks
def __getitem__(self, idx):
try:
ann_info = (
self.coco["annotations"][idx]
if "annotations" in self.coco
else self.coco["images"][idx]
)
img_path = Path(self.img_folder) / ann_info["file_name"].replace(".png", ".jpg")
ann_path = Path(self.ann_folder) / ann_info["file_name"]
img = Image.open(img_path).convert("RGB")
w, h = img.size
if "segments_info" in ann_info:
masks = np.asarray(Image.open(ann_path), dtype=np.uint32)
masks = rgb2id(masks)
ids = np.array([ann["id"] for ann in ann_info["segments_info"]])
masks = masks == ids[:, None, None]
masks = torch.as_tensor(masks, dtype=torch.uint8)
# labels = torch.tensor(
# [ann["category_id"] for ann in ann_info["segments_info"]],
# dtype=torch.int64,
# )
target = {}
target['image_id'] = torch.tensor([ann_info['image_id'] if "image_id" in ann_info else ann_info["id"]])
if self.return_masks:
target['masks'] = masks
boxes, labels, iscrowd, area = masks_to_boxes(ann_info["segments_info"])
target['labels'] = labels
# Instead of finding boxes, just take the one from json info available
# target["boxes"] = masks_to_boxes(ann_info["segments_info"])
target["boxes"] = boxes
target['size'] = torch.as_tensor([int(h), int(w)])
target['orig_size'] = torch.as_tensor([int(h), int(w)])
target['iscrowd'] = iscrowd
target['area'] = area
# if "segments_info" in ann_info:
# for name in ['iscrowd', 'area']:
# target[name] = torch.tensor([ann[name] for ann in ann_info['segments_info']])
if self.transforms is not None:
img, target = self.transforms(img, target)
return img, target
except Exception as e:
logging.error(ann_info)
raise e
def __len__(self):
return len(self.coco['images'])
def get_height_and_width(self, idx):
img_info = self.coco['images'][idx]
height = img_info['height']
width = img_info['width']
return height, width
def build(image_set, args):
root = Path(args.data_path)
assert (
root.exists()
), f"provided Panoptic path {root} does not exist"
mode = "panoptic"
PATHS = {
"train": ("images", f"{mode}", f"{mode}.json"),
"val": ("images", f"val_{mode}", f"val_{mode}.json"),
}
img_folder, ann_folder, ann_file = PATHS[image_set]
img_folder_path = root / img_folder
ann_folder_path = root / ann_folder
ann_file = root / ann_file
dataset = ConstructionPanoptic(
img_folder_path,
ann_folder_path,
ann_file,
transforms=make_construction_transforms(image_set),
return_masks=args.masks,
)
return dataset
``` |
{
"source": "joyjeni/-Learn-Artificial-Intelligence-with-TensorFlow",
"score": 3
} |
#### File: -Learn-Artificial-Intelligence-with-TensorFlow/section3/3_1_embeddings.py
```python
import os
import numpy as np
import argparse
import tensorflow as tf
from tensorflow.contrib.training import HParams
import dataset
import components
from util import newsgroups, tfrecords
tf.logging.set_verbosity(tf.logging.INFO)
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=64)
parser.add_argument('--data_dir', default='data')
parser.add_argument('--processed_data_dir', default='processed_data')
parser.add_argument('--model_dir', default='model_dir')
parser.add_argument('--predict_only', action='store_true', default=False)
parser.add_argument('-V', '--vocab_size', default=5000)
parser.add_argument('-T', '--max_seq_len', default=20)
parser.add_argument('-E', '--embed_size', default=100)
parser.add_argument('--hidden_size', default=256)
args = parser.parse_args()
def input_fn(data_dir, params):
text_batch, _ = dataset.Dataset(data_dir, params).make_batch(params.batch_size)
return text_batch, text_batch
def model_fn(features, labels, mode, params):
"""
Args:
features: Tensor with shape (batch_size, max_seq_len) with dtype int.
labels: same as features, since we are training an autoencoder.
params: tf.contrib.HParams object containing...
embed_size, vocab_size
"""
if mode == tf.estimator.ModeKeys.PREDICT:
features = features['x']
# Load and build the embedding layer, initialized with pre-trained
# GloVe embeddings.
embedded_features = components.glove_embed(
features,
embed_shape=(params.vocab_size, params.embed_size),
vocabulary=params.vocab)
hidden_layer = tf.layers.Dense(
params.hidden_size, activation=tf.tanh)(embedded_features)
logits = tf.layers.Dense(params.vocab_size)(hidden_layer)
if mode == tf.estimator.ModeKeys.PREDICT:
output_probs = tf.nn.softmax(logits)
preds = tf.argmax(output_probs, -1)
table = tf.contrib.lookup.index_to_string_table_from_tensor(params.vocab)
preds_words = table.lookup(preds)
return tf.estimator.EstimatorSpec(
mode, predictions=preds_words)
loss = tf.losses.sparse_softmax_cross_entropy(
labels=labels, logits=logits)
train_op = tf.contrib.layers.optimize_loss(
loss=loss, global_step=tf.train.get_or_create_global_step(),
learning_rate=0.01,
optimizer='Adam')
with tf.variable_scope('accuracy', values=[labels, logits]):
flattened_logits = tf.reshape(
logits, [params.batch_size * params.max_seq_len, -1])
flattened_labels = tf.reshape(
labels, [params.batch_size * params.max_seq_len,])
output_probs = tf.nn.softmax(flattened_logits)
num_correct = tf.to_float(tf.nn.in_top_k(
output_probs, tf.to_int64(flattened_labels), 1))
accuracy = tf.reduce_mean(num_correct)
tf.summary.scalar('accuracy', accuracy)
if mode == tf.estimator.ModeKeys.TRAIN:
logging_hook = tf.train.LoggingTensorHook({
'loss': loss,
'acc': accuracy
}, every_n_iter=100)
return tf.estimator.EstimatorSpec(
mode,
loss=loss,
train_op=train_op,
training_hooks=[logging_hook])
def main(_):
# tf.estimator will load/reuse anything found in its model_dir, so
# we make sure to clear its contents before every training run.
# For predictions, however, we of course want to load the previously
# trained model from disk.
if tf.gfile.Exists(args.model_dir) and not args.predict_only:
tf.gfile.DeleteRecursively(args.model_dir)
tf.gfile.MakeDirs(args.model_dir)
hparams = HParams(**vars(args))
# We will use the 20 newsgroups dataset to train our model.
# Note that we won't be using the labels, since our model is simply
# learning to reconstruct its inputs as its output.
train_file_path = os.path.join(hparams.data_dir, '20ng-train-all-terms.txt')
# Define the path to the file that we'll store our vocabulary in.
# This file will have the same number of lines as our vocab_size.
# Each line will contain a single word in our vocabulary, listed in
# order of decreasing frequency seen in our training data.
vocab_path = os.path.join(hparams.processed_data_dir, 'vocab.txt')
# Data preparation: getting vocabulary and saving tfrecords format.
if not tf.gfile.Exists(vocab_path):
print('Extracting vocab, labels, and tokenized texts from data.')
vocab, labels, texts = newsgroups.fit_and_extract(
train_file_path, hparams.vocab_size)
print('Saving vocabulary to {}.'.format(vocab_path))
with open(vocab_path, 'w+') as f:
f.write('\n'.join(vocab))
tfrecords_path = os.path.join(hparams.processed_data_dir, 'embed.tfrecords')
print('Saving tfrecords to {}.'.format(tfrecords_path))
tfrecords.save_tfrecords(
out_path=tfrecords_path,
labels=labels,
texts=texts,
vocab=vocab)
else:
print('Reading existing vocabulary from {}.'.format(vocab_path))
with open(vocab_path) as f:
vocab = [l.strip() for l in f.readlines()]
hparams.vocab = vocab
print('Creating autoencoder.')
autoencoder = tf.estimator.Estimator(
model_fn=model_fn,
model_dir=hparams.model_dir,
config=tf.estimator.RunConfig(log_step_count_steps=10000),
params=hparams)
if not args.predict_only:
print('Training autoencoder.')
autoencoder.train(
input_fn=lambda: input_fn(hparams.processed_data_dir, hparams),
steps=1000)
sample_sentences = [
'i like dogs',
'i am a test sentence',
'TensorFlow is a fun library to use']
pred_inputs = []
for sent in sample_sentences:
token_ids = [vocab.index(w)
for w in sent.split()[:args.max_seq_len]
if w in vocab]
# Pad if necessary.
if len(token_ids) < args.max_seq_len:
token_ids.extend([0] * (args.max_seq_len - len(token_ids)))
pred_inputs.append(token_ids)
pred_inp_fn = tf.estimator.inputs.numpy_input_fn(
x={'x': np.asarray(pred_inputs)}, shuffle=False)
predictions = autoencoder.predict(input_fn=pred_inp_fn)
print('Sample predictions:')
for i, prediction in enumerate(predictions):
clean_prediction = ' '.join([tok.decode() for tok in prediction if tok != b'_UNK'])
print('\nExpected:', sample_sentences[i], sep='\t')
print('Actual: ', clean_prediction, sep='\t')
if __name__ == '__main__':
tf.app.run()
```
#### File: -Learn-Artificial-Intelligence-with-TensorFlow/section3/3_4_classification_part_one.py
```python
import os
import argparse
from glob import glob
from pprint import pprint
import numpy as np
import tensorflow as tf
from tensorflow.contrib.training import HParams
tf.logging.set_verbosity(tf.logging.INFO)
import dataset
import components
from util import glove, newsgroups, tfrecords
parser = argparse.ArgumentParser(description="Train a RNN on 20NG dataset.")
parser = argparse.ArgumentParser()
parser.add_argument('--batch_size', type=int, default=64)
parser.add_argument('--data_dir', default='data')
parser.add_argument('--processed_data_dir', default='processed_data')
parser.add_argument('--model_dir', default='model_dir')
parser.add_argument('--predict_only', action='store_true', default=False)
parser.add_argument('-V', '--vocab_size', default=5000)
parser.add_argument('-T', '--max_seq_len', default=20)
parser.add_argument('-E', '--embed_size', default=100)
parser.add_argument('--hidden_size', default=256)
parser.add_argument('--num_iter', default=10, type=int)
parser.add_argument('--train_steps', default=1000, type=int)
parser.add_argument('--eval_steps', default=200, type=int)
parser.add_argument('--steps_per_print', default=200, type=int)
args = parser.parse_args()
def input_fn(hparams, mode):
with tf.variable_scope('input_fn'):
return dataset.Dataset(hparams.processed_data_dir, hparams).make_batch(mode)
def model_fn(features, labels, mode, params):
# 20 Newsgroups dataset has 20 unique labels.
num_classes = 20
# Load and build the embedding layer, initialized with
# pre-trained GloVe embeddings.
# Has shape (batch_size, max_seq_len, embed_size)
embedded_features = components.glove_embed(
features,
embed_shape=(params.vocab_size, params.embed_size),
vocabulary=params.vocab)
# Define LSTMCell with state size of 128.
cell = tf.nn.rnn_cell.LSTMCell(128)
# Use tf.nn.dynamic_rnn for efficient computation.
# It utilizes TensorFlow's tf.while_loop to repeatedly
# call cell(...) over the sequential embedded_features.
#
# Returns:
# the full output sequence as `outputs` tensor,
# which has shape (batch_size, max_seq_len, 128)
# the final LSTMStateTuple(c_final, h_final), where both
# c_final and h_final have shape (batch_size, 128)
outputs, state = tf.nn.dynamic_rnn(
cell=cell, inputs=embedded_features, dtype=tf.float32)
# We project the final output state to obtain
# the logits over each of our possible classes (labels).
logits = tf.layers.Dense(num_classes)(state.h)
# For PREDICT mode, compute predicted label for each example in batch.
if mode == tf.estimator.ModeKeys.PREDICT:
output_probs = tf.nn.softmax(logits)
preds = tf.argmax(output_probs, -1)
# Create table for converting prediction index -> label.
table = tf.contrib.lookup.index_to_string_table_from_tensor(params.vocab)
# Convert each prediction index to the corresponding label.
preds_words = table.lookup(preds)
return tf.estimator.EstimatorSpec(
mode, predictions=preds_words)
loss = tf.losses.sparse_softmax_cross_entropy(
labels=labels, logits=logits)
with tf.variable_scope('accuracy', values=[labels, logits]):
output_probs = tf.nn.softmax(logits)
num_correct = tf.to_float(tf.nn.in_top_k(
output_probs, tf.to_int64(labels), 1))
accuracy = tf.reduce_mean(num_correct)
if mode == tf.estimator.ModeKeys.EVAL:
preds = tf.argmax(output_probs, -1)
eval_metric_ops = {'acc': tf.metrics.accuracy(labels, preds)}
return tf.estimator.EstimatorSpec(
mode, loss=loss, eval_metric_ops=eval_metric_ops)
train_op = tf.contrib.layers.optimize_loss(
loss=loss, global_step=tf.train.get_or_create_global_step(),
learning_rate=1e-3,
optimizer='Adam')
if mode == tf.estimator.ModeKeys.TRAIN:
tf.summary.scalar('acc', accuracy)
logging_hook = tf.train.LoggingTensorHook({
'step': tf.train.get_global_step(),
'loss': loss,
'acc': accuracy
}, every_n_iter=params.steps_per_print)
return tf.estimator.EstimatorSpec(
mode,
loss=loss,
train_op=train_op,
training_hooks=[logging_hook])
def main():
# tf.estimator will load/reuse anything found in its model_dir, so
# we make sure to clear its contents before every training run.
# For predictions, however, we of course want to load the previously
# trained model from disk.
if tf.gfile.Exists(args.model_dir) and not args.predict_only:
tf.gfile.DeleteRecursively(args.model_dir)
tf.gfile.MakeDirs(args.model_dir)
tf.gfile.MakeDirs(args.processed_data_dir)
tf.gfile.Copy(os.path.join(args.data_dir, 'labels.txt'),
os.path.join(args.processed_data_dir, 'labels.txt'), overwrite=True)
hparams = HParams(**vars(args))
# Define the path to the file that we'll store our vocabulary in.
# This file will have the same number of lines as our vocab_size.
# Each line will contain a single word in our vocabulary, listed in
# order of decreasing frequency seen in our training data.
vocab_path = os.path.join(hparams.processed_data_dir, 'vocab.txt')
# Data preparation: getting vocabulary and saving tfrecords format.
if not tf.gfile.Exists(vocab_path):
for mode in ['train', 'test']:
data_file_path = os.path.join(
hparams.data_dir, '20ng-{}-all-terms.txt'.format(mode))
print('Extracting vocab, labels, and tokenized texts from data.')
if mode == 'train':
vocab, labels, texts = newsgroups.fit_and_extract(
data_file_path, hparams.vocab_size)
print('Saving vocabulary to {}.'.format(vocab_path))
with open(vocab_path, 'w+') as f:
f.write('\n'.join(vocab))
else:
_, labels, texts = newsgroups.fit_and_extract(
data_file_path, hparams.vocab_size)
tfrecords_path = os.path.join(
hparams.processed_data_dir, '20ng_simple_{}.tfrecords'.format(mode))
print('Saving tfrecords to {}.'.format(tfrecords_path))
tfrecords.save_tfrecords(
out_path=tfrecords_path,
labels=labels,
texts=texts,
vocab=vocab)
else:
print('Reading existing vocabulary from {}.'.format(vocab_path))
with open(vocab_path) as f:
vocab = [l.strip() for l in f.readlines()]
hparams.vocab = vocab
print('Creating classifier.')
classifier = tf.estimator.Estimator(
model_fn=model_fn,
model_dir=hparams.model_dir,
config=tf.estimator.RunConfig(
log_step_count_steps=10000,
),
params=hparams)
if not args.predict_only:
for i in range(hparams.num_iter):
classifier.train(
input_fn=lambda: input_fn(hparams, 'train'),
steps=hparams.train_steps)
classifier.evaluate(
input_fn=lambda: input_fn(hparams, 'test'),
steps=hparams.eval_steps)
if __name__ == '__main__':
main()
```
#### File: -Learn-Artificial-Intelligence-with-TensorFlow/section3/hooks.py
```python
import sys
import tensorflow as tf
class EarlyStoppingHook(tf.train.SessionRunHook):
"""Custom SessionRunHook that will terminate training when accuracy
is found above some threshold.
N.B. Relies on existense of an 'acc_metric' collection in the default
graph.
"""
def __init__(self, max_acc=0.99):
"""
Args:
acc: `Tensor` in the current graph that will contain the updated
accuracy value after each session run call.
max_acc: (float) largest permissible accuracy.
"""
self._acc_tensor = None
self._acc_op = None
self._max_acc = max_acc
def before_run(self, run_context):
if tf.get_collection('acc_metric'):
self._acc_tensor, self._acc_op = tf.get_collection('acc_metric')
return tf.train.SessionRunArgs([self._acc_tensor, self._acc_op])
else:
return tf.train.SessionRunArgs()
def after_run(self, run_context, run_values):
if not run_values.results:
return
if run_values.results[0] > self._max_acc:
tf.logging.info(
'Early stopping -- Accuracy {:.3f} above threshold '
'of {}.\n'.format(run_values.results[0], self._max_acc))
sys.exit()
class EmbeddingVisualizerHook(tf.train.SessionRunHook):
def __init__(self, embed_tensor):
super(EmbeddingVisualizerHook, self).__init__()
self._embed_tensor = embed_tensor
def before_run(self, run_context):
return tf.train.SessionRunArgs(fetches=self._embed_tensor)
def after_run(self, run_context, run_values):
self._embeddings[0].extend(run_values[0][0])
self._embeddings[1].extend(run_values[0][1])
def get_embeddings(self):
return {
'values': self._embeddings[0],
'labels': self._embeddings[1]}
```
#### File: -Learn-Artificial-Intelligence-with-TensorFlow/section3/snippets.py
```python
import tensorflow as tf
# ===============================================
# Previously was snippets.py of: 3_2_RNNs
# ===============================================
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
# Get 4 copies of feeding [inputs, m_prev] through the "Sigma" diagram.
# Note that each copy has its own distinct set of weights.
lstm_matrix = self._linear1([inputs, m_prev])
i, j, f, o = tf.split(
value=lstm_matrix, num_or_size_splits=4, axis=1)
# Feed each of the gates through a sigmoid.
i = sigmoid(i)
f = sigmoid(f + self._forget_bias)
o = sigmoid(o)
c = f * c_prev + i * self._activation(j)
m = o * self._activation(c)
new_state = LSTMStateTuple(c, m)
return m, new_state
# ===============================================
# RNN illustration
# ===============================================
hidden_size = 32
def rnn_step(x, h_prev):
# Project inputs to each have dimension hidden_size.
combined_inputs = tf.layers.Dense(hidden_size)(tf.concat([x, h_prev], axis=1))
# Compute the next hidden state.
h = tf.tanh(combined_inputs)
return h
# ===============================================
# Bidirectional RNNs
# ===============================================
outputs_tuple, final_state_tuple = tf.nn.bidirectional_dynamic_rnn(
cell_fw=tf.nn.rnn_cell.LSTMCell(128),
cell_bw=tf.nn.rnn_cell.LSTMCell(128),
inputs=inputs,
dtype=tf.float32)
# Concatenate the forward and backward outputs.
# Shape: (batch_size, max_seq_len, 2 * state_size)
outputs = tf.concat(outputs_tuple, -1)
# ===============================================
# Stacked RNNs
# ===============================================
def lstm_cell():
return tf.nn.rnn_cell.LSTMCell(128)
cell = tf.nn.rnn_cell.MultiRNNCell([
lstm_cell() for _ in range(2)])
outputs, final_state = tf.nn.dynamic_rnn(cell, inputs, dtype=tf.float32)
```
#### File: section3/util/glove.py
```python
import os
import glob
import numpy as np
def get_glove_path(glove_dir, dim=25, prompt_if_multiple_found=True):
matches = glob.glob('{}/glove.*{}d.txt'.format(glove_dir.rstrip('/'), dim))
if len(matches) == 0:
raise FileNotFoundError('Could not find GloVe file for dimension {}.'.format(dim))
elif len(matches) == 1 or not prompt_if_multiple_found:
return matches[0]
else:
relative_matches = list(map(lambda m: m[m.index(glove_dir):], matches))
print('\nMultiple GloVe files found with dim={}. '
'Enter number of choice:\n{}'.format(
dim, '\n'.join(list(map(lambda i: str(i).replace(',', ':'),
enumerate(relative_matches))))))
choice = int(input('Number (default=0): ') or 0)
print('Using: {}\n'.format(os.path.basename(matches[choice])))
return matches[choice]
def get_glove(dim=25, vocab_size=None, lang=None, prompt_if_multiple_found=True):
"""Load glove word2vec dictionary with vector of size `dim`.
Args:
dim: (int) dimensionality of word vectors.
vocab_size: (int) Number of vectors to get. Default is to get
all of them in the provided file.
lang: (str) language to use, e.g. 'en_US'. Default is ignore language.
prompt_if_multiple_found: (bool) whether to prompt user if multiple
GloVe files are found with the specified `dim`. If False, choose
the first match.
"""
word2vec = {}
glove_path = get_glove_path('data/glove', dim, prompt_if_multiple_found)
if not os.path.exists(glove_path):
raise FileNotFoundError(
'Could not GloVe file: {}. Please go to {} and '
'download/unzip "glove.6B.zip" to the "glove" '
'directory.'.format(glove_path, 'https://nlp.stanford.edu/projects/glove/'))
with open(glove_path) as f:
for line in f:
word, vec = line.split(' ', 1)
try:
word2vec[word] = np.fromstring(vec, sep=' ')
except Exception:
print('word:', word)
print('vec:', vec)
raise ValueError
if vocab_size and len(word2vec) >= vocab_size:
break
return word2vec
```
#### File: -Learn-Artificial-Intelligence-with-TensorFlow/section4/optimize_loss.py
```python
import tensorflow as tf
from tensorflow.python.ops import control_flow_ops
from pprint import pprint
OPTIMIZER_CLS_NAMES = {
"Adagrad": tf.train.AdagradOptimizer,
"Adam": tf.train.AdamOptimizer,
"Ftrl": tf.train.FtrlOptimizer,
"Momentum": tf.train.MomentumOptimizer,
"RMSProp": tf.train.RMSPropOptimizer,
"SGD": tf.train.GradientDescentOptimizer,
}
def _clip_gradients_by_norm(grads_and_vars, clip_gradients):
"""Clips gradients by global norm."""
gradients, variables = zip(*grads_and_vars)
clipped_gradients, _ = tf.clip_by_global_norm(gradients, clip_gradients)
return list(zip(clipped_gradients, variables))
def create_gradient_summaries(gradients):
for gradient, variable in gradients:
if isinstance(gradient, tf.IndexedSlices):
grad_values = gradient.values
else:
grad_values = gradient
if grad_values is not None:
var_name = variable.name.replace(':', '_')
tf.summary.scalar('gradient_norm/%s' % var_name,
tf.global_norm([grad_values]))
def create_adam_summaries(opt, learning_rate):
for slot in ['m', 'v']:
t_list = [opt.get_slot(v, slot) for v in tf.trainable_variables()
if v is not None]
slot_norm = tf.global_norm(t_list, name='{}_norm_op'.format(slot))
tf.summary.scalar('{}_norm'.format(slot), slot_norm)
if slot == 'v':
effective_lr = tf.divide(learning_rate, 1e-8 + tf.sqrt(slot_norm))
tf.summary.scalar('effective_lr', effective_lr)
def optimize_loss(loss, learning_rate, optimizer, clip_gradients=None):
"""Simplified version of tf.contrib.layers.optimize_loss, for
illustration purposes.
Args:
loss: (float) initial value for the learning rate.
optimizer: (str) one of the allowed optimizers in OPTIMIZER_CLS_NAMES.
clip_gradients: (float) if given, clip gradients such that their norm
is at most `clip_gradients` for any given variable.
Returns:
train_op: the training operation that computes gradients and updates weights.
"""
global_step = tf.train.get_global_step()
with tf.variable_scope('OptimizeLoss', values=[loss, global_step]):
update_ops = set(tf.get_collection(tf.GraphKeys.UPDATE_OPS))
loss = control_flow_ops.with_dependencies(list(update_ops), loss)
lr = tf.get_variable(
'learning_rate', (), trainable=False,
initializer=tf.constant_initializer(learning_rate))
opt = OPTIMIZER_CLS_NAMES[optimizer](learning_rate=lr)
# `gradients` is list of (gradient, variable) pairs, where
# `gradient` is the gradient for `variable`.
gradients = opt.compute_gradients(
loss, var_list=tf.trainable_variables())
if clip_gradients is not None:
gradients = _clip_gradients_by_norm(gradients, clip_gradients)
tf.summary.scalar('global_norm/clipped_gradient_norm',
tf.global_norm(list(zip(*gradients))[0]))
# Generate a scalar summary for each variable, giving its gradient norm.
create_gradient_summaries(gradients)
# Create gradient updates.
grad_updates = opt.apply_gradients(
gradients, global_step=global_step, name='train')
if optimizer == 'Adam':
create_adam_summaries(opt, learning_rate)
# Ensure the train_op computes grad_updates.
train_op = control_flow_ops.with_dependencies([grad_updates], loss)
return train_op
```
#### File: section5/client/client_util.py
```python
import os
import pickle
import threading
from lib import PATHS, util
from lib.data_tree import DataTree
from lib.preprocessing.read import get_dirpaths
from collections import OrderedDict
from operator import itemgetter
class ClientVectorizer:
def __init__(self, data_config):
self._pretrained_vectorizer_path = os.path.join(
DataTree.get_output_dir(data_config), 'vectorizer.pkl')
self._vectorizer = pickle.load(open(self._pretrained_vectorizer_path, 'rb'))
def doc_path_to_matrix(self, doc_path):
with open(doc_path, encoding='utf-8', errors='ignore') as f:
doc = f.read().strip().lower()
return self._vectorizer.docs_to_matrix(
docs=[doc], _print_info=os.path.basename(doc_path))
def __getattr__(self, item):
if hasattr(self._vectorizer, item):
return getattr(self._vectorizer, item)
else:
raise AttributeError('{} does not have attribute {}.'.format(
self.__class__.__name__, item))
class ClientPathTree:
def __init__(self, base_name):
self.config_name = base_name
self.base_dir = PATHS('models', 'servables')
self.current_dir = self.base_dir
self.prediction_paths = dict()
def update_prediction_path(self, path_name, category, probability):
if path_name not in self.prediction_paths:
self.prediction_paths[path_name] = []
self.prediction_paths[path_name].append((category, probability))
def is_servable_category(self, category):
return str(category) in self.categories
def draw(self, path_name):
def print_row(category, probability):
category_width = 30
probability_width = 5
print('{c:<{cw}} {p:<{pw}}'.format(
cw=category_width, pw=probability_width,
c=category, p=probability))
print()
print_row('Category', 'Probability')
print_row('-' * len('Category'), '-' * len('Probability'))
for category, probability in self.prediction_paths[path_name]:
print_row(category, '{:.3%}'.format(probability))
@property
def servable_dirs(self):
return [d for d in get_dirpaths(self.base_dir, nested=False)
if self.config_name in d]
@property
def categories(self):
dirnames = [os.path.basename(d) for d in self.servable_dirs]
return [name.rsplit('__', 1)[-1] for name in dirnames]
class ResultCounter(object):
"""Counter for the prediction results, in the case where we want asynchronous
requests. See model_client.py for example usage."""
def __init__(self, num_tests, concurrency, batch_size):
self._num_tests = num_tests
self._concurrency = concurrency
self._error = 0
self._done = 0
self._active = 0
self._condition = threading.Condition()
self.batch_size = batch_size
def inc_error(self):
with self._condition:
self._error += 1
def inc_done(self):
with self._condition:
self._done += 1
self._condition.notify()
def dec_active(self):
with self._condition:
self._active -= 1
self._condition.notify()
def get_error_rate(self):
with self._condition:
while self._done != self._num_tests:
self._condition.wait()
return self._error / (self.batch_size * float(self._num_tests))
def throttle(self):
with self._condition:
while self._active == self._concurrency:
self._condition.wait()
self._active += 1
```
#### File: section5/client/predict.py
```python
import sys
import numpy as np
import tensorflow as tf
from grpc.beta import implementations
from tensorflow_serving.apis import predict_pb2, model_pb2, prediction_service_pb2
import argparse
DESCRIPTION = """Make queries to a TensorFlow ModelServer."""
parser = argparse.ArgumentParser(
description=DESCRIPTION,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument(
'--server', default='localhost:9000',
help='PredictionService host:port')
def main():
args = parser.parse_args()
if not args.server:
print('please specify server host:port')
sys.exit()
# Get the server stub. This is how we interact with the server.
host, port = args.server.split(':')
stub = get_prediction_service_stub(host, int(port))
# Create test example to send as 'x' for prediction.
X = np.array(['i', 'like', 'dogs'])
# Reshape to indicate batch of size 1.
X = np.reshape(X, (1, 3))
# Issue predict request to tensorflow model server,
category = predict_category(stub, X)
print('Predicted category:', category)
def get_prediction_service_stub(host, port):
channel = implementations.insecure_channel(host, port)
stub = prediction_service_pb2.beta_create_PredictionService_stub(channel)
return stub
def predict_category(stub, X):
# Wrap X inside a valid PredictRequest.
predict_request = get_predict_request(X)
# Call TensorFlow model server's Predict API, which returns a PredictResponse.
predict_response = stub.Predict(predict_request, timeout=20.0)
# Extract the predicted category from the PredictResponse object.
prediction_category = get_predicted_category(predict_response)
return prediction_category
def get_predict_request(x):
model_spec = model_pb2.ModelSpec(name='default', signature_name='export_outputs')
request = predict_pb2.PredictRequest(model_spec=model_spec)
request.inputs['x'].CopyFrom(
tf.contrib.util.make_tensor_proto(x, shape=x.shape))
return request
def get_predicted_category(predict_response):
return predict_response.outputs['preds_words'].string_val[0].decode()
if __name__ == '__main__':
main()
``` |
{
"source": "joyjjjjz/algo",
"score": 4
} |
#### File: python/44_shortest_path/dijkstra.py
```python
import sys
from typing import List, Generator
import heapq
class Graph:
def __init__(self, vertex_count):
self.adj = [[] for _ in range(vertex_count)]
def add_edge(self, s, t, w):
edge = Edge(s, t, w)
self.adj[s].append(edge)
def __len__(self):
return len(self.adj)
# 下面这个类是为了dijkstra实现用的
class Vertex:
def __init__(self, v, dist):
self.id = v # 顶点编号ID
self.dist = dist # 从起始顶点到这个顶点的距离
def __gt__(self, other):
return self.dist > other.dist
def __repr__(self):
return str((self.id, self.dist))
class Edge:
def __init__(self, source, target, weight):
self.s = source
self.t = target
self.w = weight
class VertexPriorityQueue:
def __init__(self):
self.vertices = []
def get(self):
return heapq.heappop(self.vertices)
def put(self, v):
self.vertices.append(v)
self.update_priority()
def empty(self):
return len(self.vertices) == 0
def update_priority(self):
heapq.heapify(self.vertices)
def __repr__(self):
return str(self.vertices)
def dijkstra(g, s, t):
size = len(g)
pq = VertexPriorityQueue() # 节点队列
in_queue = [False] * size # 已入队标记
vertices = [ # 需要随时更新离s的最短距离的节点列表
Vertex(v, sys.maxint) for v in range(size)
]
predecessor = [-1] * size # 先驱
vertices[s].dist = 0
pq.put(vertices[s])
in_queue[s] = True
while not pq.empty():
v = pq.get()
if v.id == t:
break
for edge in g.adj[v.id]:
if v.dist + edge.w < vertices[edge.t].dist:
# 当修改了pq中的元素的优先级后:
# 1. 有入队操作:触发了pq的堆化,此后出队可以取到优先级最高的顶点
# 2. 无入队操作:此后出队取到的顶点可能不是优先级最高的,会有bug
# 为确保正确,需要手动更新一次
vertices[edge.t].dist = v.dist + edge.w
predecessor[edge.t] = v.id
pq.update_priority() # 更新堆结构
if not in_queue[edge.t]:
pq.put(vertices[edge.t])
in_queue[edge.t] = True
print_path(s, t, predecessor)
return vertices[t].dist
def print_path(s, t, p):
if t == s:
print(s)
else:
print_path(s, p[t], p)
print t
if __name__ == '__main__':
g = Graph(6)
g.add_edge(0, 1, 10)
g.add_edge(0, 4, 15)
g.add_edge(1, 2, 15)
g.add_edge(1, 3, 2)
g.add_edge(2, 5, 5)
g.add_edge(3, 2, 1)
g.add_edge(3, 5, 12)
g.add_edge(4, 5, 10)
print(dijkstra(g, 0, 5))
# 下面这个用例可以暴露更新队列元素优先级的问题
# g = Graph(4)
# g.add_edge(0, 1, 18)
# g.add_edge(0, 2, 3)
# g.add_edge(2, 1, 1)
# g.add_edge(1, 3, 5)
# g.add_edge(2, 3, 8)
# g.add_edge(0, 3, 15)
# print(dijkstra(g, 0, 3))
``` |
{
"source": "joyjoie/blog",
"score": 2
} |
#### File: migrations/versions/944a46760717_.py
```python
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '944a46760717'
down_revision = '<KEY>'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('comments')
op.alter_column('users', 'profile_pic_path',
existing_type=sa.VARCHAR(),
nullable='False')
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.alter_column('users', 'profile_pic_path',
existing_type=sa.VARCHAR(),
nullable=True)
op.create_table('comments',
sa.Column('id', sa.INTEGER(), autoincrement=True, nullable=False),
sa.Column('body', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('author', sa.TEXT(), autoincrement=False, nullable=True),
sa.Column('user_id', sa.INTEGER(), autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], name='comments_user_id_fkey'),
sa.PrimaryKeyConstraint('id', name='comments_pkey')
)
# ### end Alembic commands ###
``` |
{
"source": "joykabir/py-copytree",
"score": 3
} |
#### File: py-copytree/src/copy_tree.py
```python
from __future__ import print_function
import fnmatch
import os
from os.path import isdir, join
import shutil
def copy_tree(src, dest, ignore=None, symlink=False, force=False):
"""
An advance and alternate implementation of copytree.
:param src: The source path to copy from
:param dest: The destination path to copy to
:param ignore: Ignore patterns, as a function
"""
# Spcial characters in path ". ~"" are resolved
spath = os.path.abspath(os.path.expanduser(src))
dpath = os.path.abspath(os.path.expanduser(dest))
# Create destination if it does not exist
if not os.path.exists(dpath):
os.makedirs(dpath)
try:
shutil.copystat(spath, dpath)
except OSError:
# Does not work in Windows
pass
# Get lists of all files and dirs in source path
sitems = os.listdir(spath)
# Call ignore function and get the items to ignore
if ignore is not None:
ignored_names = ignore(spath, sitems)
else:
ignored_names = set()
for item in sitems:
# If item is found in ignored patterns
# then don't move ahead.
if item in ignored_names:
continue
sitem = os.path.join(spath, item)
ditem = os.path.join(dpath, item)
# Handle symlink if it is True
if os.path.islink(sitem):
if symlink:
if os.path.lexists(ditem):
os.remove(ditem)
os.symlink(os.readlink(sitem), ditem)
# If source item is a directory,
# recursivly check
elif os.path.isdir(sitem):
copy_tree(sitem, ditem, ignore, symlink, force)
# Skip if the file exists in the destination.
# Overwite, if force is True
elif os.path.isfile(ditem):
if force:
print('Overwriting destination: {}'.format(repr(ditem)))
shutil.copy2(sitem, ditem)
# Copy rest, those do not exists in destination
else:
print('Copying: {}'.format(repr(sitem)))
shutil.copy2(sitem, ditem)
def ignore_patterns(*patterns):
"""
List of patterns to ignore
:param args patterns: Defines a sequence of glob-style patterns
to specify what files to ignore.
"""
def _ignore_patterns(path, names): # pylint: disable=unused-argument
"returns ignore list"
ignored_item = []
for pattern in patterns:
ignored_item.extend(fnmatch.filter(names, pattern))
return set(ignored_item)
return _ignore_patterns
def include_patterns(*patterns):
"""
List of patterns to include
See: https://stackoverflow.com/a/35161407
There is a bug though in the answer.
:param args patterns: Defines a sequence of glob-style patterns
to specify what files to NOT ignore.
"""
def _ignore_patterns(path, names):
"returns ignore list"
keep = set(name for pattern in patterns
for name in fnmatch.filter(names, pattern))
ignore = set(name for name in names
if name not in keep and not isdir(join(path, name)))
return ignore
return _ignore_patterns
``` |
{
"source": "joykhatter/Flask-Blog-App",
"score": 3
} |
#### File: Flask-Blog-App/models/blog.py
```python
import sqlite3
from db import db
import json
import base64
class BlogModel(db.Model):
__tablename__ = "blogs"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(60))
content = db.Column(db.String(1000))
img_name = db.Column(db.String(60))
img_url = db.Column(db.String(100))
def __init__(self, title, content, img_name, img_url):
self.title = title
self.content = content
self.img_name = img_name
self.img_url = img_url
def json(self):
data = {
"id":self.id,
"title":self.title,
"content":self.content,
"img_name":self.img_name,
"img_url":self.img_url
}
return data
@classmethod
def find_by_id(cls, id):
return cls.query.filter_by(id=id).first()
def insert_update(self):
db.session.add(self)
db.session.commit()
def delete(self):
db.session.delete(self)
db.session.commit()
``` |
{
"source": "joykour/COCO-Dataset-2018-Stuff-Segmentation-Challenge",
"score": 2
} |
#### File: keras_segmentation/data_utils/data_loader.py
```python
import numpy as np
import cv2
import glob
import itertools
import os
from tqdm import tqdm
from ..models.config import IMAGE_ORDERING
from .augmentation import augment_seg
import random
random.seed(0)
class_colors = [ ( random.randint(0,255),random.randint(0,255),random.randint(0,255) ) for _ in range(5000) ]
def get_pairs_from_paths( images_path , segs_path ):
images = glob.glob( os.path.join(images_path,"*.png") ) + glob.glob( os.path.join(images_path,"*.jpg") ) + glob.glob( os.path.join(images_path,"*.jpeg") )
segmentations = glob.glob( os.path.join(segs_path,"*.png") )
segmentations_d = dict( zip(segmentations,segmentations ))
ret = []
for im in images:
seg_bnme = os.path.basename(im).replace(".jpg" , ".png").replace(".jpeg" , ".png")
seg = os.path.join( segs_path , seg_bnme )
#this line i have commented as error was showing
#assert ( seg in segmentations_d ), (im + " is present in "+images_path +" but "+seg_bnme+" is not found in "+segs_path + " . Make sure annotation image are in .png" )
ret.append((im , seg) )
return ret
def get_image_arr( path , width , height , imgNorm="sub_mean" , odering='channels_first' ):
if type( path ) is np.ndarray:
img = path
else:
img = cv2.imread(path, 1)
if imgNorm == "sub_and_divide":
img = np.float32(cv2.resize(img, ( width , height ))) / 127.5 - 1
elif imgNorm == "sub_mean":
img = cv2.resize(img, ( width , height ))
img = img.astype(np.float32)
img[:,:,0] -= 103.939
img[:,:,1] -= 116.779
img[:,:,2] -= 123.68
img = img[ : , : , ::-1 ]
elif imgNorm == "divide":
img = cv2.resize(img, ( width , height ))
img = img.astype(np.float32)
img = img/255.0
if odering == 'channels_first':
img = np.rollaxis(img, 2, 0)
return img
def get_segmentation_arr( path , nClasses , width , height , no_reshape=False ):
seg_labels = np.zeros(( height , width , nClasses ))
if type( path ) is np.ndarray:
img = path
else:
img = cv2.imread(path, 1)
img = cv2.resize(img, ( width , height ) , interpolation=cv2.INTER_NEAREST )
img = img[:, : , 0]
for c in range(nClasses):
seg_labels[: , : , c ] = (img == c ).astype(int)
if no_reshape:
return seg_labels
seg_labels = np.reshape(seg_labels, ( width*height , nClasses ))
return seg_labels
def verify_segmentation_dataset( images_path , segs_path , n_classes ):
img_seg_pairs = get_pairs_from_paths( images_path , segs_path )
assert len(img_seg_pairs)>0 , "Dataset looks empty or path is wrong "
for im_fn , seg_fn in tqdm(img_seg_pairs) :
img = cv2.imread( im_fn )
seg = cv2.imread( seg_fn )
assert ( img.shape[0]==seg.shape[0] and img.shape[1]==seg.shape[1] ) , "The size of image and the annotation does not match or they are corrupt "+ im_fn + " " + seg_fn
assert ( np.max(seg[:,:,0]) < n_classes) , "The pixel values of seg image should be from 0 to "+str(n_classes-1) + " . Found pixel value "+str(np.max(seg[:,:,0]))
print("Dataset verified! ")
def image_segmentation_generator( images_path , segs_path , batch_size, n_classes , input_height , input_width , output_height , output_width , do_augment=False ):
img_seg_pairs = get_pairs_from_paths( images_path , segs_path )
random.shuffle( img_seg_pairs )
zipped = itertools.cycle( img_seg_pairs )
while True:
X = []
Y = []
for _ in range( batch_size) :
im , seg = next(zipped)
im = cv2.imread(im , 1 )
seg = cv2.imread(seg , 1 )
if do_augment:
img , seg[:,:,0] = augment_seg( img , seg[:,:,0] )
X.append( get_image_arr(im , input_width , input_height ,odering=IMAGE_ORDERING ) )
Y.append( get_segmentation_arr( seg , n_classes , output_width , output_height ) )
yield np.array(X) , np.array(Y)
``` |
{
"source": "JoyKuan/ArchitecturalDesign_AI",
"score": 3
} |
#### File: JoyKuan/ArchitecturalDesign_AI/egg_add_label.py
```python
from datetime import datetime, timedelta, date
import pandas as pd
import os
import collections
# check the last time whether or not it is the same on garmin and embrace files
def get_last_datetime():
id_last_datetime = collections.defaultdict(list)
file_path_embrace = os.path.join(os.getcwd(), 'embrace')
files_embrace = os.listdir(file_path_embrace)
for file in files_embrace:
read_path = os.path.join(file_path_embrace, file)
id = file.split('_')[0]
# ======== Embrace ========
# read either eda or temp because they have the same last datetime
if file.split('_')[1] == 'eda.csv':
print('file:', file)
df_embrace = pd.read_csv(read_path)
# get last time from embrace
last_datetime_embrace = datetime.strptime(df_embrace.iloc[-1]['time'], '%Y-%m-%d %H:%M:%S')
# ======== Garmin ========
garmin_filename = id + '.csv'
read_path_garmin = os.path.join(os.getcwd(), 'garmin', 'heart_rate', garmin_filename)
df_garmin = pd.read_csv(read_path_garmin)
# get last time from garmin
last_datetime_garmin = datetime.strptime(df_garmin.iloc[-1]['timestamp_16'], '%Y-%m-%d %H:%M:%S')
if last_datetime_embrace == last_datetime_garmin:
id_last_datetime[id].append(last_datetime_garmin)
return id_last_datetime
def fill_lost_time(dic_id_datetime):
# only 28.csv has not lost any time
for id_, datetime_ in dic_id_datetime.items():
if id_ == '06':
filename = id_ + '.csv'
print('file:', filename)
file_path_eeg = os.path.join(os.getcwd(), 'EEG', filename)
eeg = pd.read_csv(file_path_eeg)
# trans string to datetime
last_datetime_eeg = datetime.strptime(eeg.iloc[-1]['Timestamp'], '%Y-%m-%d %H:%M:%S')
print('eda last datetime:', datetime_[0])
print('eeg last datetime:', last_datetime_eeg)
if datetime_[0] > last_datetime_eeg:
delta = datetime_[0] - last_datetime_eeg
print('delta:', delta)
expect_end_datetime_eeg = last_datetime_eeg + delta
print('expect end datetime of eeg:', expect_end_datetime_eeg)
# Iterating through a range of time returns every datetime
lost_datetime_list = list()
while last_datetime_eeg < expect_end_datetime_eeg:
last_datetime_eeg += timedelta(minutes=1)
lost_datetime_list.append(last_datetime_eeg)
print(lost_datetime_list, len(lost_datetime_list)) # store a few lost time
# get losing wave values from eeg
row = 0
lost_wave_values = list()
while row < len(lost_datetime_list):
lost_wave_values.append(list(eeg.iloc[row, 1:6]))
row += 1
# append datetime to beginning of list
if len(lost_datetime_list) == len(lost_wave_values):
for i in range(len(lost_wave_values)):
lost_wave_values[i].insert(0, lost_datetime_list[i])
print(lost_wave_values)
# append more rows to current eeg dataframe
for idx in range(len(lost_wave_values)):
eeg.loc[len(eeg['Timestamp'])+idx+1] = lost_wave_values[idx]
write_path = os.path.join(os.getcwd(), 'EEG_lost_fill', filename)
eeg.to_csv(write_path, index=False)
def eeg_add_label(eeg_df, laps_data_df, begin_time, current_date):
combine = list()
count = 0
for i in range(len(laps_data_df)-1, -1, -1):
# total building is 50
if count == 50:
break
lap_num = laps_data_df.iloc[i]['laps'].split(' ')[1]
using_time = datetime.strptime(laps_data_df.iloc[i]['using_time'], '%H:%M:%S.%f').time()
print("using time: ", using_time)
print("begin time: ", begin_time)
current_time = (datetime.combine(current_date, using_time) + begin_time).time()
print("currt time: ", current_time)
for idx in range(len(eeg_df)):
data_datetime = datetime.strptime(eeg_df.iloc[idx, 0], '%Y-%m-%d %H:%M:%S')
current_datetime = datetime.combine(current_date, current_time)
if data_datetime > current_datetime:
print('data_datetime, index, label = ', data_datetime, idx, lap_num)
temp = list(eeg_df.iloc[idx-1, 1:6])
temp.append(lap_num)
combine.append(temp)
delta_current_time = datetime.combine(date.min, current_time) - datetime.min
begin_time = delta_current_time
break
count += 1
person_df = pd.DataFrame(combine, columns=['Theta', 'Alpha', 'BetaL', 'BetaH', 'Gamma', 'label'])
return person_df
if __name__ == '__main__':
# first step: fill lost time
last_datetime_dic = get_last_datetime()
print(last_datetime_dic)
fill_lost_time(last_datetime_dic)
# second step: add label
file_path = os.path.join(os.getcwd(), 'EEG_complete')
files = os.listdir(file_path)
for file in files:
readfile_path = os.path.join(file_path, file)
df = pd.read_csv(readfile_path)
# get the last time and date from files in EEG_complete folder
last_time = df.iloc[len(df) - 1, 0].split(' ')[1]
date_ = datetime.strptime(df.iloc[len(df) - 1, 0].split(' ')[0], '%Y-%m-%d').date()
print('file:', file, 'last time:', last_time, 'date:', date_)
# Start to handle with Time Laps data
laps_folder = os.path.join(os.getcwd(), 'Time Laps')
laps_file = file.split('.')[0] + '.txt'
laps_file_path = os.path.join(laps_folder, laps_file)
# get total time from Time Laps
laps_df = pd.read_csv(laps_file_path, sep='\t', names=["laps", "using_time", "total_time"])
res_df = laps_df.loc[laps_df['laps'] == 'Lap 50']
total_time = res_df['total_time'].to_string(index=False)
total_time = total_time[1:] # handle with ' 00:40:09.130000'
# transfer string to datetime and get start time
last_time = datetime.strptime(last_time, '%H:%M:%S').time()
total_time = datetime.strptime(total_time, '%H:%M:%S.%f').time()
start_time = datetime.combine(date_, last_time) - datetime.combine(date_, total_time)
print('last time: ', last_time)
print('start time:', start_time)
print('total using time:', total_time)
eeg_person_df = eeg_add_label(df, laps_df, start_time, date_)
write_path = os.path.join(os.getcwd(), 'final', 'eeg_label')
write_filename = write_path + '/' + file
eeg_person_df.to_csv(write_filename, index=False)
```
#### File: JoyKuan/ArchitecturalDesign_AI/transfer_datetime_06.py
```python
import pandas as pd
from datetime import datetime, timedelta
from embrace import get_date_from_garmin
import os
import collections
def revise():
path = os.path.join(os.getcwd(), 'Selina Luo_26.11.20_12.21.44.md.pm.bp_datetime.csv')
df = pd.read_csv(path)
for i in range(len(df.Timestamp)):
datetime_ = datetime.strptime(df.iloc[i, 0], '%Y-%m-%d %H:%M:%S.%f')
df.iloc[i, 0] = datetime_ + timedelta(minutes=159)
# write_path = os.path.join(os.getcwd(), 'output.csv')
# df.to_csv(write_path, index=False)
return df
def datetime2minutes():
path = os.path.join(os.getcwd(), 'output.csv')
df = pd.read_csv(path)
df_waves = df.iloc[:, 81:152]
df_waves.insert(loc=0, column='Timestamp', value=df['Timestamp'])
df_waves = df_waves.dropna()
df_waves = df_waves.reset_index(drop=True)
print(df_waves)
flag = 0
dic_eeg = dict()
for col in df_waves.columns:
print('col: ', col)
datetimeOfkind, valuesOfkind = [], []
i = 0
if col != 'Timestamp':
while i < len(df_waves['Timestamp']):
sum_ = 0
count = 0
current_datetime = datetime.strptime(df_waves['Timestamp'][i], '%Y-%m-%d %H:%M:%S.%f')
print('current datetime:', current_datetime)
date_time = datetime.strptime(str(df_waves['Timestamp'][i]).split('.')[0], '%Y-%m-%d %H:%M:%S')
# transfer 15:43:44 to 15:43:00
date_time_00 = date_time.strftime('%Y-%m-%d %H:%M:%S')
date_time_00 = datetime.strptime(date_time_00[0:-2] + '00', '%Y-%m-%d %H:%M:%S')
next_time = date_time_00 + timedelta(minutes=1)
next_time = datetime.strptime(str(next_time).split('.')[0], '%Y-%m-%d %H:%M:%S')
# print('next_time:', next_time)
while current_datetime < next_time:
sum_ += df_waves[col][i]
i += 1
count += 1
if i < len(df_waves['Timestamp']):
current_datetime = datetime.strptime(df_waves['Timestamp'][i], '%Y-%m-%d %H:%M:%S.%f')
elif i >= len(df_waves['Timestamp']):
break
print('col:', col, 'i:', i, 'count:', count, 'sum:', sum_)
valuesOfkind.append(float(sum_ / count))
datetimeOfkind.append(date_time_00)
if flag == 0:
dic_eeg['Timestamp'] = datetimeOfkind
flag = 1
dic_eeg[col] = valuesOfkind
df_final = pd.DataFrame(dic_eeg, columns=list(dic_eeg.keys()))
# write_path = os.path.join(os.getcwd(), '06.csv')
# df_final.to_csv(write_path, index=False)
return df_final
def avg_14_channels(df):
theta, alpha, betal, betah, gamma, datetime_eeg = [], [], [], [], [], []
dic_avg = dict()
for row in range(len(df['Timestamp'])):
theta_temp, alpha_temp, betal_temp, betah_temp, gamma_temp = [], [], [], [], []
for col in df.columns:
if col[-5:] == 'Theta':
theta_temp.append(df[col][row])
elif col[-5:] == 'Alpha':
alpha_temp.append(df[col][row])
elif col[-5:] == 'BetaL':
betal_temp.append(df[col][row])
elif col[-5:] == 'BetaH':
betah_temp.append(df[col][row])
elif col[-5:] == 'Gamma':
gamma_temp.append(df[col][row])
theta.append(sum(theta_temp) / len(theta_temp))
alpha.append(sum(alpha_temp) / len(alpha_temp))
betal.append(sum(betal_temp) / len(betal_temp))
betah.append(sum(betah_temp) / len(betah_temp))
gamma.append(sum(gamma_temp) / len(gamma_temp))
datetime_eeg.append(df['Timestamp'][row])
dic_avg['Timestamp'] = datetime_eeg
dic_avg['Theta'] = theta
dic_avg['Alpha'] = alpha
dic_avg['BetaL'] = betal
dic_avg['BetaH'] = betah
dic_avg['Gamma'] = gamma
df_eeg = pd.DataFrame.from_dict(dic_avg)
write_path = os.path.join(os.getcwd(), '06_1.csv')
df_eeg.to_csv(write_path, index=False)
return df_eeg
def profile_each_person(personInfo, egg_df, id):
print('egg_df:', egg_df)
print('personInfo:', personInfo)
for id_, datetime_ in personInfo.items():
if id_ == id:
person = collections.defaultdict(list)
test_start_time, test_end_time = datetime_[0], datetime_[1]
test_start_time = datetime.strptime(test_start_time, '%Y-%m-%d %H:%M:%S')
test_end_time = datetime.strptime(test_end_time, '%Y-%m-%d %H:%M:%S')
print('test_start_time:', test_start_time, 'test_end_time:', test_end_time)
for i in range(len(egg_df['Timestamp'])):
if test_start_time <= egg_df['Timestamp'][i] <= test_end_time:
person['Timestamp'].append(egg_df['Timestamp'][i])
person['Theta'].append(egg_df['Theta'][i])
person['Alpha'].append(egg_df['Alpha'][i])
person['BetaL'].append(egg_df['BetaL'][i])
person['BetaH'].append(egg_df['BetaH'][i])
person['Gamma'].append(egg_df['Gamma'][i])
person_df = pd.DataFrame.from_dict(dict(person))
print(person_df)
write_csvfilename = id + '.csv'
writeEEG_path = os.path.join(os.getcwd(), 'EEG', write_csvfilename)
person_df.to_csv(writeEEG_path, index=False)
if __name__ == '__main__':
# new_df = revise()
df_ = datetime2minutes()
print(df_)
eeg_df_avg14 = avg_14_channels(df_)
print(eeg_df_avg14)
person_info = get_date_from_garmin()
print(person_info)
profile_each_person(person_info, eeg_df_avg14, '06')
``` |
{
"source": "JoyKuan/Fruit-Market-Price-Mining-and-Analysis",
"score": 3
} |
#### File: JoyKuan/Fruit-Market-Price-Mining-and-Analysis/directory.py
```python
import os
def create_directory(path):
try:
os.mkdir(path)
except OSError:
print ("Creation of the directory %s failed" % path)
else:
print ("Successfully created the directory %s " % path)
``` |
{
"source": "joyliao07/401_midterm_wizard_game",
"score": 3
} |
#### File: 401_midterm_wizard_game/src/exceptions.py
```python
from . import app
from flask import render_template
@app.errorhandler(404)
def not_found(error):
"""404 page"""
return render_template('errors/404_notfound.html', error=error), 404
@app.errorhandler(405)
def bad_method(error):
"""405 page"""
return render_template('errors/405_bad_method.html', error=error), 405
@app.errorhandler(500)
def server_error(error):
""" server error """
return render_template('errors/500_server_error.html', error=error), 500
```
#### File: 401_midterm_wizard_game/src/route_helpers.py
```python
from . import app
import boto3, botocore
import os
def upload_file_to_s3(file, filename, extension, bucket_name, acl="public-read"):
"""
Docs: http://zabana.me/notes/upload-files-amazon-s3-flask.html
"""
s3 = boto3.client(
"s3",
aws_access_key_id=os.environ.get('S3_KEY'),
aws_secret_access_key=os.environ.get('S3_SECRET_ACCESS_KEY'),
)
try:
print('trying file upload')
s3.upload_fileobj(
file,
bucket_name,
filename,
ExtraArgs={
"ACL": acl,
"ContentType": f'image/{extension}'
}
)
new_path = "{}{}".format('https://s3-us-west-2.amazonaws.com/s3wizard/', filename)
except Exception as e:
print('error msg from upload')
print("Something Happened: ", e)
return e
return new_path
```
#### File: 401_midterm_wizard_game/src/submissions.py
```python
PIXEL_PERCENTAGE = .2
COLOR_DETECTION_THRESHOLD = 20
RED_GREEN_THRESHOLD = 5
def evaluate_submission(image, prompt):
"""Compare the ProcessedImage object to the prompts. Return
input: image (ProcessedImage) the processed image submitted by the user
input: prompt (tuple) adjective (color) and noun (object) to match
return: (False, False) color and object not found
return: (True, False) color found, object not found
return: (False, True) color not found, object found
return: (True, True) color and object found
"""
object_found, color_found = False, False
adjective, noun = prompt
# Is the prompt object in this image?
for keyword in image.keywords:
if noun in keyword:
object_found = True
break
color_found = find_target_color(image.colors, adjective)
return (color_found, object_found)
def find_target_color(color_data, target_color):
"""Looks through all of the rgb value sets and totals the proportion of pixels
that are primarily the target color.
input: color_data: color data extracted from image object, represented by rgb values
and pixel_fraction
input: target_color (string): the color we are trying to find
return: True if proportion of target_color pixels > PIXEL_PERCENTAGE
return: False if proportion of target_color pixels < PIXEL_PERCENTAGE
"""
target_pixel_fraction = 0
# import pdb; pdb.set_trace()
# Sum all the pixel_fraction values
pixel_fractions = sum([rgb.pixel_fraction for rgb in color_data])
# Make a list of colors we don't want
bad_colors = [c for c in ['red', 'green', 'blue'] if c != target_color]
# Iterate over all the rgb values found in the image
for detected_rgb in color_data:
# Is this region majority target_color?
for bad_color in bad_colors:
color_diff = eval(f'detected_rgb.color.{ target_color }') - eval(f'detected_rgb.color.{ bad_color }')
# If target_color is green, we need to be more lenient about the proportion of red
if target_color == 'green' and bad_color == 'red':
if abs(color_diff) < RED_GREEN_THRESHOLD:
break
# If the target color value is less than one of the excluded, break
elif color_diff < COLOR_DETECTION_THRESHOLD:
break
else:
# If we get here, the region was majority target_color
target_pixel_fraction += detected_rgb.pixel_fraction
if target_pixel_fraction / pixel_fractions >= PIXEL_PERCENTAGE:
return True
return False
```
#### File: src/test/test_routes.py
```python
import pytest
import io
# Login function
def login_for_test(app):
""" this logs in test user """
app.post('/login', data=dict(
email='<EMAIL>',
password='<PASSWORD>'
), follow_redirects=True)
# test basics
def test_app_import(app):
assert app
def test_bad_route(app):
""" test 404 with bad route """
rv = app.test_client().get('/fake')
assert rv.status_code == 404
assert b'Page not found' in rv.data
# Home route
def test_home_get_no_login(app):
""" test login page for status code/correct message with no login """
rv = app.test_client().get('/')
assert rv.status_code == 200
assert b'You must be the new apprentice.' in rv.data
def test_home_get_with_login(app, db, session, account):
""" test login page for status code/correct message with login """
with app.test_client() as app:
login_for_test(app)
rv = app.get('/')
assert rv.status_code == 200
assert b'Welcome back, my apprentice' in rv.data
def test_home_bad_method(app):
""" test home route with unimplemented method for correct status code """
rv = app.test_client().delete('/')
assert rv.status_code == 405
assert b'Are you trying to pull a fast one' in rv.data
# Play route
def test_play_get_no_login(app):
""" test that going to /play while not logged in redirects to login
page """
rv = app.test_client().get('/play', follow_redirects=True)
assert b'You must be logged in' in rv.data
assert rv.status_code == 200
def test_play_get_with_login(app, session, db, account):
""" test that going to /play while logged in takes you to the prompt """
with app.test_client() as app:
login_for_test(app)
rv = app.get('/play')
assert rv.status_code == 200
assert b'I seem to have forgotten what a'
def test_play_post_no_login(app):
""" test that trying to post to /play with no login redirects to login """
rv = app.test_client().post('/play', follow_redirects=True)
assert rv.status_code == 200
assert b'You must be logged in' in rv.data
def test_play_post_with_login(app, session, db, account):
""" tests posting to play route (user making initial submission) """
with app.test_client() as app:
login_for_test(app)
data = dict()
data['file_upload'] = (io.BytesIO(b'hi'), 'test_no_match.jpg')
rv = app.post(
'/play',
data=data,
content_type='multipart/form-data',
follow_redirects=True
)
assert rv.status_code == 200
assert b'So, is this what a' in rv.data
def test_play_post_with_login_no_data(app, session, db, account):
""" test that posting to /play with no data while logged in
just serves the /play get page """
with app.test_client() as app:
login_for_test(app)
rv = app.post('/play')
assert rv.status_code == 200
assert b'I seem to have forgotten what a'
def test_play_bad_method(app):
""" test home route with unimplemented method for correct status code """
rv = app.test_client().delete('/play')
assert rv.status_code == 405
assert b'Are you trying to pull a fast one' in rv.data
# Submission route
def test_submission_get_no_login(app):
""" tests that user is prompted to login when visiting submission page
when not logged in """
rv = app.test_client().get('/submission', follow_redirects=True)
assert b'You must be logged in' in rv.data
assert rv.status_code == 200
def test_submission_get_with_login_no_data(app, session, db, account):
""" tests that 404 is received when going to /submission without
submitting anything """
with app.test_client() as app:
login_for_test(app)
rv = app.get('/submission', follow_redirects=True)
assert rv.status_code == 404
assert b'Page not found' in rv.data
def test_submission_get_with_login(app, db, session, account):
""" tests that going to /submission route after submitting an image
(but before confirming/finalizing submission) serves correct content """
with app.test_client() as app:
login_for_test(app)
data = dict()
data['file_upload'] = (io.BytesIO(b'hi'), 'test.jpg')
app.post(
'/play',
data=data,
content_type='multipart/form-data'
)
rv = app.get('/submission')
assert rv.status_code == 200
assert b'So, is this what a' in rv.data
def test_submission_route_bad_method(app):
""" test submission route with unimplemented method for correct
status code """
rv = app.test_client().delete('/submission')
assert rv.status_code == 405
assert b'Are you trying to pull a fast one' in rv.data
# Feedback route
def test_feedback_get_no_login(app):
""" tests going to /feedback without being logged in to make sure
user is prompted to log in """
rv = app.test_client().get('/feedback', follow_redirects=True)
assert rv.status_code == 200
assert b'You must be logged in' in rv.data
def test_feedback_no_data(app, session, db, account):
""" tests that 404 is received when going to /feedback without
submitting anything """
with app.test_client() as app:
login_for_test(app)
rv = app.get('/feedback', follow_redirects=True)
assert rv.status_code == 404
assert b'Page not found' in rv.data
def test_feedback_get_no_match(app, session, db, account, prompt):
""" tests feedback text when user submits unmatching image """
with app.test_client() as app:
login_for_test(app)
f = open('src/test/test_images/test_no_match.jpg', 'rb').read()
data = dict()
data['file_upload'] = (io.BytesIO(f), 'test.jpg')
app.post(
'/play',
data=data,
content_type='multipart/form-data',
follow_redirects=True
)
rv = app.get('/feedback')
assert rv.status_code == 200
assert b'This is not a' in rv.data
assert b'It\'s not even' in rv.data
def test_feedback_get_color_only_match(app, session, db, account, prompt):
""" tests feedback text when user submits image that matches color only """
with app.test_client() as app:
login_for_test(app)
f = open('src/test/test_images/test_color_match.jpg', 'rb').read()
data = dict()
data['file_upload'] = (io.BytesIO(f), 'test.jpg')
app.post(
'/play',
data=data,
content_type='multipart/form-data'
)
rv = app.get('/feedback')
assert rv.status_code == 200
assert b'Well... it\'s' in rv.data
assert b'but it\'s not even a' in rv.data
def test_feedback_get_noun_only_match(app, session, db, account, prompt):
""" tests feedback text when user submits image that matches object
type only """
with app.test_client() as app:
login_for_test(app)
f = open('src/test/test_images/test_noun_match.jpg', 'rb').read()
data = dict()
data['file_upload'] = (io.BytesIO(f), 'test.jpg')
app.post(
'/play',
data=data,
content_type='multipart/form-data',
)
rv = app.get('/feedback')
assert rv.status_code == 200
# assert b'That looks like a' in rv.data
# assert b'but it\'s not' in rv.data
def test_feedback_get_full_match(app, session, db, account, prompt):
""" tests feedback text when user submits full match """
with app.test_client() as app:
login_for_test(app)
f = open('src/test/test_images/test_full_match.png', 'rb').read()
data = dict()
data['file_upload'] = (io.BytesIO(f), 'test.png')
app.post(
'/play',
data=data,
content_type='multipart/form-data'
)
rv = app.get('/feedback')
assert rv.status_code == 200
# assert b'Yes, that\'s a' in rv.data
def test_feedback_route_bad_method(app):
""" test feedback route with unimplemented method for correct ]
status code """
rv = app.test_client().delete('/feedback')
assert rv.status_code == 405
assert b'Are you trying to pull a fast one' in rv.data
# History route
def test_history_get_no_login(app):
""" tests going to /history without being logged in to make sure user is
redirected """
rv = app.test_client().get('/history', follow_redirects=True)
assert b'You must be logged in' in rv.data
assert rv.status_code == 200
def test_history_get_no_submissions(app, session, db, account):
""" tests going to /history when user hasn't submitted anything """
with app.test_client() as app:
login_for_test(app)
rv = app.get('/history')
assert rv.status_code == 200
assert b'Apprentice, you have no submissions yet.' in rv.data
def test_history_get_with_submissions(app, session, db, account, prompt):
""" tests that submission is present on history page """
with app.test_client() as app:
login_for_test(app)
f = open('src/test/test_images/test_color_match.jpg', 'rb').read()
data = dict()
data['file_upload'] = (io.BytesIO(f), 'test.jpg')
app.post(
'/play',
data=data,
content_type='multipart/form-data'
)
rv = app.get('/history')
assert rv.status_code == 200
assert b'Apprentice, your past submissions are below.'
assert b'Blue Chair (Fail)' in rv.data
# Players route
def test_players_get_no_login(app):
""" tests that going to /players when not logged in prompts user to log
in """
rv = app.test_client().get('/players', follow_redirects=True)
assert b'You must be logged in' in rv.data
assert rv.status_code == 200
def test_players_get_no_submissions(app, session, db, account, prompt):
""" tests that going to /players with no submissions shows different
message """
with app.test_client() as app:
login_for_test(app)
rv = app.get('/players')
assert rv.status_code == 200
assert b'Apprentice, there are no successful submissions' in rv.data
def test_players_get_with_submissions(app, session, db, account, prompt):
""" tests that going to /players shows a player's successful submission """
with app.test_client() as app:
login_for_test(app)
f = open('src/test/test_images/test_full_match.png', 'rb').read()
data = dict()
data['file_upload'] = (io.BytesIO(f), 'test.png')
app.post(
'/play',
data=data,
content_type='multipart/form-data'
)
app.get('/feedback')
rv = app.get('/players')
assert rv.status_code == 200
assert b'Other player\'s submissions are below.'
# assert b'Blue Chair' in rv.data
```
#### File: 401_midterm_wizard_game/src/validate_image.py
```python
from datetime import datetime
import os
from PIL import Image
def validate_image(file_path):
"""
Determine if the submitted image is valid.
:input file_path: modeled submission object
:returns: True if image is valid
:returns: False if image is invalid
"""
if os.environ['WIZARD_APP_TESTING'] == 'False':
img_time = get_image_timestamp(file_path)
# No EXIF data
if img_time is None:
return False
# Image older than 1 day
if (datetime.now() - img_time).days > 0:
return False
return True
def get_image_timestamp(image_file):
"""
Read an image timestamp by extracting EXIF data.
:param image_file: full path to image file as string
:returns: image file timestamp as datetime object
"""
binary_image = Image.open(image_file)
exif_data = binary_image._getexif()
if exif_data:
return datetime.strptime(exif_data[306], '%Y:%m:%d %I:%M:%S')
return None
``` |
{
"source": "joyliao07/code_practice_and_review",
"score": 4
} |
#### File: code_practice_and_review/Challenge/longest_common_prefix.py
```python
class Solution:
def longestCommonPrefix(self, strs: List[str]) -> str:
if len(strs) == 0:
return ''
if len(strs) == 1:
return strs[0]
length_lst = len(strs)
first = strs[0]
length_word = len(first)
common = ''
for i in range(length_word):
counter = 0
for a in range(1, length_lst):
if len(strs[a]) < (i + 1):
break
else:
if first[i] == strs[a][i]:
counter += 1
if counter == (length_lst - 1):
common += first[i]
if counter < (length_lst - 1):
return common
return(common)
```
#### File: code_practice_and_review/Challenge/longest_palindromic_substring.py
```python
class Solution:
def longestPalindrome(self, s: str) -> str:
"""Runtime 13.55%; Memory 72.72%"""
length = 0
longest = ''
for i in range(len(s)):
phrase = ''
for a in range(i, len(s)):
phrase += s[a]
if phrase == phrase[::-1]:
if length < len(phrase):
length = len(phrase)
longest = phrase
return longest
def longestPalindrome_method_2(self, s: str) -> str:
"""Runtime 91.15%; Memory 33.26%"""
longest = ''
i = 0
while i < len(s):
l, r = i, i
while r+1 < len(s) and s[r] == s[r+1]:
r += 1
i = r + 1
ans = find_longest(s, l, r)
if len(longest) < len(ans):
longest = ans
return longest
def find_longest(self, s, l, r):
while (l >= 0 and r < len(s) and s[l] == s[r]):
l -= 1
r += 1
return s[(l+1):r]
```
#### File: code_practice_and_review/Challenge/longest_string.py
```python
class Solution(object):
def lengthOfLongestSubstring(self, s):
"""
:type s: str
:rtype: int
"""
acc = []
for what in s:
if what not in acc:
acc.append(what)
return len(acc)
```
#### File: code_practice_and_review/Challenge/palindrome_number.py
```python
class Solution:
def isPalindrome(self, x: int) -> bool:
y = str(x)
if y == y[::-1]:
return True
else:
return False
```
#### File: code_practice_and_review/Challenge/reverse_interger.py
```python
class Solution:
def reverse(self, x: int) -> int:
neg = False
if x < 0:
neg = True
x = x * -1
new_num = []
new_int = 0
position = 10
while x > 0:
digit = x % position
new_num.append(int(digit/(position/10)))
x -= digit
position *= 10
new_num = new_num[::-1]
position = 1
for what in new_num:
new_int += (what * position)
position *= 10
if neg == True:
new_int = new_int * -1
if new_int > 2147483649 or new_int < -2147483649:
new_int = 0
return new_int
```
#### File: code_practice_and_review/Challenge/roman_to_integer.py
```python
class Solution:
def romanToInt(self, s: str) -> int:
if s == '':
return 0
num = 0
lst = list(s)
for i in range(len(lst)):
if lst[i] == 'M':
num += 1000
lst[i] = ' '
if len(lst) - i >= 2:
if lst[i] == 'C' and lst[i+1] == 'M':
num += 900
lst[i] = ' '
lst[i+1] = ' '
if lst[i] == 'D':
num += 500
lst[i] = ' '
if len(lst) - i >= 2:
if lst[i] == 'C' and lst[i+1] == 'D':
num += 400
lst[i] = ' '
lst[i+1] = ' '
if lst[i] == 'C':
num += 100
lst[i] = ' '
if len(lst) - i >= 2:
if lst[i] == 'X' and lst[i+1] == 'C':
num += 90
lst[i] = ' '
lst[i+1] = ' '
if lst[i] == 'L':
num += 50
lst[i] = ' '
if len(lst) - i >= 2:
if lst[i] == 'X' and lst[i+1] == 'L':
num += 40
lst[i] = ' '
lst[i+1] = ' '
if lst[i] == 'X':
num += 10
lst[i] = ' '
if len(lst) - i >= 2:
if lst[i] == 'I' and lst[i+1] == 'X':
num += 9
lst[i] = ' '
lst[i+1] = ' '
if lst[i] == 'V':
num += 5
lst[i] = ' '
if len(lst) - i >= 2:
if lst[i] == 'I' and lst[i+1] == 'V':
num += 4
lst[i] = ' '
lst[i+1] = ' '
if lst[i] == 'I':
num += 1
lst[i] = ' '
return num
```
#### File: code_practice_and_review/Challenge/sudoku.py
```python
Determine if a 9x9 Sudoku board is valid. Only the filled cells need to be validated according to the following rules:
Each row must contain the digits 1-9 without repetition.
Each column must contain the digits 1-9 without repetition.
Each of the 9 3x3 sub-boxes of the grid must contain the digits 1-9 without repetition.
A partially filled sudoku which is valid.
# The Sudoku board could be partially filled, where empty cells are filled with the character '.'.
# Example 1:
# Input:
# [
# ["5","3",".",".","7",".",".",".","."],
# ["6",".",".","1","9","5",".",".","."],
# [".","9","8",".",".",".",".","6","."],
# ["8",".",".",".","6",".",".",".","3"],
# ["4",".",".","8",".","3",".",".","1"],
# ["7",".",".",".","2",".",".",".","6"],
# [".","6",".",".",".",".","2","8","."],
# [".",".",".","4","1","9",".",".","5"],
# [".",".",".",".","8",".",".","7","9"]
# ]
# Output: true
# Example 2:
# Input:
# [
# ["8","3",".",".","7",".",".",".","."],
# ["6",".",".","1","9","5",".",".","."],
# [".","9","8",".",".",".",".","6","."],
# ["8",".",".",".","6",".",".",".","3"],
# ["4",".",".","8",".","3",".",".","1"],
# ["7",".",".",".","2",".",".",".","6"],
# [".","6",".",".",".",".","2","8","."],
# [".",".",".","4","1","9",".",".","5"],
# [".",".",".",".","8",".",".","7","9"]
# ]
# Output: false
class Solution:
def isValidSudoku_1(self, board: List[List[str]]) -> bool:
"""Runtime 81.74%; Memory 75.68%"""
for i in range(0, 9):
row = board[i]
test = []
for what in row:
if what != ".":
if what in test:
return False
test.append(what)
test = []
for a in range(0, 9):
if board[a][i] != ".":
if board[a][i] in test:
return False
test.append(board[a][i])
ind = 0
test = []
while ind < 7:
for i in range(ind, ind+3):
for a in range(0, 3):
if board[a][i] != ".":
if board[a][i] in test:
return False
test.append(board[a][i])
test = []
ind += 3
ind = 0
while ind < 7:
for i in range(ind, ind+3):
for a in range(3, 6):
if board[a][i] != ".":
if board[a][i] in test:
return False
test.append(board[a][i])
test = []
ind += 3
ind = 0
while ind < 7:
for i in range(ind, ind+3):
for a in range(6, 9):
if board[a][i] != ".":
if board[a][i] in test:
return False
test.append(board[a][i])
test = []
ind += 3
return True
def isValidSudoku_2(self, board: List[List[str]]) -> bool:
"""Runtime 93.75%; Memory 45.16%"""
for i in range(0, 9):
row = board[i]
test = []
for what in row:
if what != ".":
if what in test:
return False
test.append(what)
test = []
for a in range(0, 9):
if board[a][i] != ".":
if board[a][i] in test:
return False
test.append(board[a][i])
ind_a = 0
while ind_a < 7:
ind = 0
while ind < 7:
test = []
for i in range(ind, ind+3):
for a in range(ind_a, ind_a+3):
if board[a][i] != ".":
if board[a][i] in test:
return False
test.append(board[a][i])
test = []
ind += 3
ind_a += 3
return True
```
#### File: code_practice_and_review/Challenge/swap_nodes_in_pairs.py
```python
class Solution:
def swapPairs(self, head: ListNode) -> ListNode:
"""Runtime 95.03%; Memory 70.15%"""
p = 0
if head is None:
return
if head.next is None:
return head
current = head
c_next = head.next
temp = True
previous = None
while c_next:
if previous:
previous.next = c_next
current.next = c_next.next
c_next.next = current
if temp == True:
temp_head = c_next
temp = False
previous = current
current = current.next
if current:
c_next = current.next
else:
break
return temp_head
``` |
{
"source": "joyliao07/data_structures_and_algorithms",
"score": 4
} |
#### File: binary_search_tree/tests/test_binary_search_tree.py
```python
from ..binary_search_tree import NodeTree, NodeQueue, Queue, BST
import pytest
@pytest.fixture
def empty_queue():
"""To create an empty queue for testing purpose."""
return BST()
def test_class_exist():
"""To test that the class queue exists."""
assert BST
def test_str_method_for_node():
"""To test the output of str method."""
short = NodeTree('1')
assert str(short) == '1'
def test_repr_method_for_node():
"""To test the output of repr method."""
short = NodeTree('1')
assert repr(short) == '<NODE: 1>'
def test_repr_method_for_bst():
"""To test the output of repr method."""
short = BST(['1'])
assert repr(short) == '<BST root: 1>'
def test_str_method_for_bst():
"""To test the output of str method."""
short = BST(['1', '2', '3'])
assert str(short) == f'Value of the root Queue is: 1'
def test_add_node_with_valid_input_edge_case():
"""To test add_node method with valid input."""
tree_new = BST()
tree_new.add_node(1)
assert tree_new.root.val == 1
def test_add_node_with_valid_input():
"""To test add_node method with valid input."""
tree_new = BST([1, 2])
tree_new.add_node(3)
assert tree_new.root.val == 1
def test_add_node_with_valid_input_2():
"""To test add_node method with valid input."""
tree_new = BST([100])
tree_new.add_node(3)
assert tree_new.root.val == 100
def test_in_order_with_empty_input():
"""To test in_order method with an empty tree."""
tree_new = BST()
with pytest.raises(TypeError) as err:
tree_new.in_order_traversal(tree_new.root)
assert str(err.value) == (f'There is no node to traverse.')
def test_in_order_with_valid_input(capsys):
"""To test in_order method with valid input."""
tree_new = BST([10, 12, 11, 15, 20, 17])
tree_new.in_order_traversal(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '10\n11\n12\n15\n17\n20\n'
def test_in_order_with_valid_input_edge_case(capsys):
"""To test in_order method with valid input."""
tree_new = BST([10])
tree_new.in_order_traversal(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '10\n'
def test_pre_order_with_empty_input():
"""To test pre_order method with an empty tree."""
tree_new = BST()
with pytest.raises(TypeError) as err:
tree_new.pre_order_traversal(tree_new.root)
assert str(err.value) == (f'There is no node to traverse.')
def test_pre_order_with_valid_input(capsys):
"""To test pre_order method with valid input."""
tree_new = BST([10, 12, 11, 15, 20, 17])
tree_new.pre_order_traversal(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '10\n12\n11\n15\n20\n17\n'
def test_pre_order_with_valid_input_edge_case(capsys):
"""To test pre_order method with valid input."""
tree_new = BST([10])
tree_new.pre_order_traversal(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '10\n'
def test_post_order_with_empty_input():
"""To test post_order method with an empty tree."""
tree_new = BST()
with pytest.raises(TypeError) as err:
tree_new.post_order_traversal(tree_new.root)
assert str(err.value) == (f'There is no node to traverse.')
def test_post_order_with_valid_input(capsys):
"""To test post_order method with valid input."""
tree_new = BST([10, 12, 11, 15, 20, 17])
tree_new.post_order_traversal(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '11\n17\n20\n15\n12\n10\n'
def test_post_order_with_valid_input_edge_case(capsys):
"""To test post_order method with valid input."""
tree_new = BST([10])
tree_new.post_order_traversal(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '10\n'
def test_breadth_first_with_valid_input(capsys):
"""To test breadth_first method with valid input."""
tree_new = BST([10, 12, 11, 15, 20, 17])
tree_new.breadth_first(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '10\n12\n11\n15\n20\n17\n'
def test_breadth_first_with_valid_input_2(capsys):
"""To test breadth_first method with valid input."""
tree_new = BST([40, 15, 47, 20, 30, 50, 65])
tree_new.breadth_first(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '40\n15\n47\n20\n50\n30\n65\n'
def test_breadth_first_with_empty_input():
"""To test breadth_first method with an empty tree."""
tree_new = BST()
with pytest.raises(TypeError) as err:
tree_new.breadth_first(tree_new.root)
assert str(err.value) == (f'There is no node to traverse.')
def test_breadth_first_with_valid_input_edge_case(capsys):
"""To test breadth_first method with valid input."""
tree_new = BST([10])
tree_new.breadth_first(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '10\n'
def test_find_max_value_with_valid_input_edge_case(capsys):
"""To test find_max_value method with valid input."""
tree_new = BST([10])
tree_new.find_maximum_value(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '10\n'
def test_find_max_value_with_valid_input(capsys):
"""To test find_max_value method with valid input."""
tree_new = BST([40, 15, 47, 20, 30, 50, 65])
tree_new.find_maximum_value(tree_new.root)
captured = capsys.readouterr()
assert captured.out == '65\n'
def test_find_max_value_with_empty_input():
"""To test find_max_value method with an empty tree."""
tree_new = BST()
with pytest.raises(TypeError) as err:
tree_new.find_maximum_value(tree_new.root)
assert str(err.value) == (f'There is no node to traverse.')
```
#### File: data_structures/depth_first/depth_first.py
```python
class Graph:
"""
"""
def __init__(self, iterable=None):
if iterable is None:
iterable = {}
if type(iterable) is not dict:
raise TypeError('Iterable is not a dictionary.')
# "gdict" will be the "graph" in conftest
self.gdict = iterable
# self.graph = iterable
def __repr__(self):
output = f'<List of vertices: { self.gdict.keys() }>'
# <List of vertices: dict_keys(['G', 'B', 'C', 'D', 'E', 'F'])>
return output
def __str__(self):
output = f'The list of vertices are: {self.gdict.keys()}'
# "The list of vertices are: dict_keys(['G', 'B', 'C', 'D', 'E', 'F'])"
return output
def __len__(self):
return len(self.gdict.keys())
def add_vert(self, val):
"""
"""
# add vertice to self.graph (i.e. self.gdict)
if type(val) is not float and type(val) is not str:
print('Val must be a string or a number.')
raise KeyError('Val must be a string or a number.')
if val in self.gdict:
print('Vert already exists.')
raise KeyError('Vert already exists.')
self.gdict[val] = {}
def has_vert(self, val):
"""
"""
# This is a boolean itself:
return val in self.gdict
def add_edge(self, v1, v2, weight):
"""
"""
if v1 not in self.gdict:
print('The vert does not exist.')
raise KeyError('The vert does not exist.')
self.gdict[v1][v2] = weight
print(self.gdict)
return self.gdict
def get_neighbors(self, val):
"""
"""
if val not in self.gdict:
print('There is no existing vert.')
raise KeyError('There is no existing vert.')
print(self.gdict[val].keys())
return self.gdict[val].keys()
def breadth_first(self, node=None):
if node is None:
print(f'There is no node to traverse.')
raise TypeError(f'There is no node to traverse.')
else:
visited = {}
for i in self.gdict.keys():
visited[i] = 'Yay'
queue = []
queue.append(node)
visited[node] = 'Nay'
while queue:
popped = queue.pop(0)
print(popped)
for what in self.gdict[popped]:
if visited[what] == 'Yay':
queue.append(what)
visited[what] = 'Nay'
def flight_cost(self, node1=None, node2=None, node3=None):
if node1 not in self.gdict.keys() or node2 not in self.gdict.keys():
print('Invalid itinerary.')
raise KeyError(False, '$0')
else:
cost = 0
if node2 not in self.gdict[node1]:
print(False, '$', cost)
raise KeyError(False, '$', cost)
else:
cost += self.gdict[node1][node2]
if node3 is not None and node3 not in self.gdict.keys():
print(False, '$', cost)
raise KeyError(False, '$', cost)
elif node3 in self.gdict.keys() and node3 not in self.gdict[node2]:
print(False, '$', cost)
raise KeyError(False, '$', cost)
elif node3 in self.gdict.keys() and node3 in self.gdict[node2]:
cost += self.gdict[node2][node3]
print(True, '$', cost)
return(True, '$', cost)
def depth_first_traversal(self, node):
visited = []
def walk(node):
if node in visited:
return
print(node)
visited.append(node)
for neighbor in self.gdict[node].keys():
if neighbor not in visited:
walk(neighbor)
if node not in self.gdict.keys():
print('Node does not exist.')
raise KeyError('Node does not exist.')
else:
walk(node)
map = {
'A': {'B':0, 'D':0},
'B': {'A':0, 'C':0, 'D':0},
'C': {'B':0, 'G':0},
'D': {'A':0, 'B':0, 'E':0, 'H':0, 'F':0},
'E': {'D':0},
'F': {'D':0, 'H':0},
'G': {'C':0},
'H': {'F':0, 'D':0},
}
g = Graph(map)
g.depth_first_traversal('A')
```
#### File: data_structures/fifo_animal_shelter/fifo_animal_shelter.py
```python
class Node(object):
"""This class is set up to create new Nodes."""
def __init__(self, value, next_node=None):
self.value = value
self.next_node = next_node
def __str__(self):
return f'{ self.value }'
def __repr__(self):
return f'<NODE: { self.value }>'
class AnimalShelter(object):
"""To create a node for a Queue and other related methods."""
def __init__(self, iterable=None):
self.front = None
self.rear = None
self._size = 0
if iterable is None:
iterable = []
if type(iterable) is not list:
raise TypeError('Iterable is not a list.')
for what in iterable:
self.enqueue(what)
def __str__(self):
output = f'Queue: Value of the front Queue is: {self.front.value}'
return output
def __len__(self):
return self._size
def __repr__(self):
return f'<Queue front: { self.front.value }>'
def enqueue(self, value):
"""
"""
if value is not 'dog' and value is not 'cat':
raise TypeError(f'The input must be dog or cat.')
if self.front is None:
new_node = Node(value)
self.front = new_node
self.rear = new_node
self._size += 1
return self
else:
new_node = Node(value)
self.rear.next_node = new_node
self.rear = new_node
self._size += 1
return self
def dequeue(self, val):
"""
"""
if self.front is None:
raise TypeError(f'Input must be a non-empty queue.')
if val is not 'dog' and val is not 'cat':
raise TypeError(f'The val must be dog or cat.')
else:
current = Node(None, self.front)
loop = 0
while current.next_node:
if current.next_node.value == val:
if loop == 0:
self.front = self.front.next_node
current.next_node.next_node = None
return current.next_node
else:
if self.rear == current.next_node:
self.rear = current
temp = current.next_node
current.next_node = current.next_node.next_node
return temp
else:
loop = loop + 1
current = current.next_node
############################################
#### TO PRINT: ####
#### ####
############################################
# test_queue = AnimalShelter(['dog', 'dog', 'cat', 'cat', 'cat'])
# test_queue.enqueue('dog')
# test_queue.dequeue('dog')
# test_queue.dequeue('cat')
# current = test_queue.front
# while current:
# print(current.value)
# current = current.next_node
# print('front: ', test_queue.front.value)
# print('rear: ', test_queue.rear.value)
```
#### File: get_edge/tests/conftest.py
```python
import pytest
from ..get_edge import Graph
@pytest.fixture()
def graph_empty():
g = Graph()
return g
@pytest.fixture()
def graph_one():
g = Graph()
g.gdict = {
'A': {'B': 10},
'B': {'A': 5, 'D': 15, 'C': 20},
'C': {'E': 1},
'D': {'A': 5},
'E': {'F': 2, 'B': 4},
'F': {'D': 11}
}
return g
@pytest.fixture()
def graph_two():
g = Graph()
g.gdict = {
'A': {'B': 10, 'C': 15},
'B': {'D': 15, 'E': 5, 'C': 2},
'C': {'F': 50, 'G': 25},
'D': {},
'E': {'C': 5},
'F': {'E': 10},
'G': {'F': 20}
}
return g
@pytest.fixture()
def graph_map():
map = {
'Pandora': {'Arendelle': 150, 'Metroville': 82},
'Arendelle': {'Pandora': 150, 'Metroville': 99, 'New Monstropolis': 42},
'Metroville': {'Pandora': 82, 'Arendelle': 99, 'New Monstropolis': 105, 'Naboo': 26, 'Narnia': 37},
'New Monstropolis': {'Arendelle': 42, 'Metroville': 105, 'Naboo': 73},
'Naboo': {'New Monstropolis': 73, 'Metroville': 26, 'Narnia': 250},
'Narnia': {'Metroville': 37, 'Naboo': 250}
}
g = Graph(map)
return g
```
#### File: data_structures/linked_list/linked_list.py
```python
from .node import Node
# from node import Node
class LinkedList(object):
"""To generate linked lists with input values."""
def __init__(self, iterable=None):
self.head = None
self._size = 0
if iterable is None:
iterable = []
if type(iterable) is not list:
raise TypeError('Iterable must be of type list.')
for val in iterable:
self.insert(val)
def __str__(self):
output = f'Linked List: Head val is: {self.head}'
return output
def __repr__(self):
output = f'<LinkedList: head - {self.head} size - {self._size} >'
return output
def __len__(self):
return self._size
def insert(self, value):
"""To insert a value to the linked list."""
self.head = Node(value, self.head)
self._size += 1
def includes(self, searched):
""" To tell whether the ll includes the designated valude."""
current = self.head
while current:
if current.val == searched:
return True
current = current._next
return False
def append(self, newVal):
"""To insert newVal to the tail of the linked list."""
current = self.head
if current is None:
self.head = Node(newVal)
self._size += 1
return
while current._next:
current = current._next
current._next = Node(newVal)
self._size += 1
def insertBefore(self, value, newVal):
"""To insert newVal right before the matched value in the linked list."""
current = self.head
if current is None:
self.head = Node(newVal)
self._size += 1
return
if current.val == value:
self.head = Node(newVal, self.head)
self._size += 1
return
while current._next:
if current._next.val == value:
current._next = Node(newVal, current._next)
self._size += 1
return
current = current._next
else:
print('There is no matched value in the linked list.')
return
def insertAfter(self, value, newVal):
"""To insert newVal right after the matched value in the linked list."""
current = self.head
if current is None:
self.head = Node(newVal)
self._size += 1
return
if current.val == value:
self.head = Node(newVal, current)
self._size += 1
return
while current._next:
if current.val == value:
print('found match!')
current._next = Node(newVal, current._next)
self._size += 1
return
current = current._next
else:
print('There is no matched value in the linked list.')
return
def kth_from_end(self, k):
"""To return the value of the kth node from the end."""
try:
ruler = self.head._next
except:
print('The kth value is not available.')
return('The kth value is not available.')
if k < 0:
print('Please enter a non-negative integer as the argument.')
return('Please enter a non-negative integer as the argument.')
try:
for i in range(k):
ruler = ruler._next
except:
print('The kth value is not available.')
return('The kth value is not available.')
current = self.head
while ruler:
current = current._next
ruler = ruler._next
else:
print(current.val)
return(current.val)
#FOR RUNNING PYTHON LINKED_LIST.PY IN THE TERMINAL:
# fix = LinkedList()
# fix.append('apple')
# fix.append('banana')
# fix.append('cucumber')
# fix.append('date')
# fix.append('elderberry')
# fix.kth_from_end(-1)
# #TO PRINT:
# result = fix.head
# while result:
# print(result.val)
# result = result._next
```
#### File: data_structures/queue_with_stacks/test_queue_with_stacks.py
```python
from .queue_with_stacks import PseudoQueue
import pytest
def test_pseudoqueue_exit():
"""To test that the class PseudoQueue exists."""
assert PseudoQueue
def test_pseudoqueue_create_a_pseudoqueue():
"""To test that the class PseudoQueue creates a pseudoqueue."""
result = PseudoQueue()
assert type(result) == PseudoQueue
def test_pseudoqueue_enqueue_empty():
"""To test enqueue return an empty stack."""
result = PseudoQueue()
result.enqueue(None)
assert result.stack_1.top.value is None
assert len(result.stack_1) == 1
def test_pseudoqueue_enqueue():
"""To test enqueue properly works with valid inputs."""
result = PseudoQueue()
result.enqueue('a')
result.enqueue('b')
result.enqueue('c')
assert result.rear.value == 'c'
assert len(result.stack_1) == 3
def test_dequeue():
"""To test dequeue properly works with valid inputs."""
result = PseudoQueue()
result.enqueue('a')
result.enqueue('b')
result.enqueue('c')
result.dequeue()
assert result.rear.value == 'c'
assert len(result.stack_1) == 2
def test_dequeue_consecutively():
"""To test dequeue properly works consecutive times."""
result = PseudoQueue()
result.enqueue('a')
result.enqueue('b')
result.enqueue('c')
result.dequeue()
result.dequeue()
assert result.rear.value == 'c'
assert len(result.stack_1) == 1
def test_dequeue_with_empty_error():
"""To test dequeue with an empty self."""
result = PseudoQueue()
answer = result.dequeue()
assert answer == f'Input stack cannot be empty.'
```
#### File: repeated_word/tests/test_repeated_word.py
```python
from ..repeated_word import Hash
import pytest
def test_class_exist():
"""To test that the class Graph exists."""
assert Hash
def test_str_method_empty():
"""To test the output of str method."""
h = Hash()
assert str(h) == 'Hash table length is 0'
def test_repr_method_empty():
"""To test the output of repr method."""
h = Hash()
assert repr(h) == '<Hash table list: []>'
def test_add_hash_with_valid_value():
"""To test add_hash method with valid input."""
h = Hash()
h.add_hash(["apple", 300])
assert repr(h) == "<Hash table list: [[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [['apple', 300]]]>"
def test_add_hash_with_valid_value_2():
"""To test add_hash method with valid input."""
h = Hash([["apple", 300], ['banana', 15]])
assert repr(h) == "<Hash table list: [[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [['apple', 300]], [], [], [], [], [], [], [['banana', 15]]]>"
def test_add_hash_with_valid_value_3():
"""To test add_hash method with valid input."""
h = Hash([["apple", 300], ['banana', 15], ['baaann', 30]])
assert repr(h) == "<Hash table list: [[], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [], [['apple', 300]], [], [], [], [], [], [], [['banana', 15], ['baaann', 30]]]>"
def test_retrieve_val_with_empty_value():
"""To test retrieve_val method with empty input."""
h = Hash([["apple", 300], ['banana', 15]])
with pytest.raises(TypeError) as err:
h.retrieve_val(None)
assert str(err.value) == 'No key to look for.'
def test_retrieve_val_with_valid_value_1():
"""To test retrieve_val method with valid input."""
h = Hash([["apple", 300], ['banana', 15]])
assert h.retrieve_val('apple') == 300
def test_retrieve_val_with_valid_value_2():
"""To test retrieve_val method with valid input."""
h = Hash([["apple", 300], ['banana', 15]])
h.add_hash(['cranberry', 45])
assert h.retrieve_val('cranberry') == 45
def test_retrieve_val_with_value_not_exist():
"""To test retrieve_val method with valid input."""
h = Hash([["apple", 300], ['banana', 15]])
assert h.retrieve_val('cranberry') == 'No matching key is found.'
def test_add_word_with_valid_value_1():
"""To test add_word method with valid input."""
h = Hash()
h.add_word(['apple', None])
assert str(h) == 'Hash table length is 53'
def test_add_word_with_empty_key():
"""To test add_word method with an empty key."""
h = Hash()
with pytest.raises(TypeError) as err:
h.add_word([None, None])
assert str(err.value) == 'No word in the string.'
def test_repeated_word_with_empty_string():
"""To test repeated_word method with an empty string."""
h = Hash()
with pytest.raises(TypeError) as err:
h.repeated_word('')
assert str(err.value) == 'No word in the string.'
def test_repeated_word_with_repeated_word(capsys):
"""To test repeated_word method with repeated word in a string."""
h = Hash()
string = "Every day is a good day is a wonderful day."
h.repeated_word(string)
captured = capsys.readouterr()
assert captured.out == 'day\n'
def test_repeated_word_with_non_string():
"""To test repeated_word method with a non-string."""
h = Hash()
string = ['abc', None]
with pytest.raises(TypeError) as err:
h.repeated_word(string)
assert str(err.value) == 'Input is not a string.'
```
#### File: data_structures/sorting_algos/selection.py
```python
class Sort(object):
"""
"""
def __init__(self, iterable=None):
if iterable is None:
iterable = []
if type(iterable) is not list:
raise TypeError('Input is not a list.')
self.len = len(iterable)
self.lst = iterable
def __repr__(self):
output = f'<Input list: { self.lst }>'
return output
def __str__(self):
output = f'Input list length is { self.len }'
return output
def selection(self):
"""
"""
for i in self.lst:
if isinstance(i, str):
raise TypeError('Items must be integers.')
for i in range(len(self.lst)):
smallest_idx = i
for a in range(i+1, len(self.lst)):
if self.lst[smallest_idx] > self.lst[a]:
smallest_idx = a
self.lst[i], self.lst[smallest_idx] = self.lst[smallest_idx], self.lst[i]
# print(self.lst)
return(self.lst)
# inpu = Sort([1, 2, 11, 4, 5, -11])
# inpu.selection()
```
#### File: data_structures/stack/stack.py
```python
from .node import Node
# from node import Node
class Stack(object):
"""To create a node for a stack and other related methods."""
def __init__(self, iterable=None):
self.top = None
self._size = 0
if iterable is None:
iterable = []
if type(iterable) is not list:
raise TypeError('Iterable is not a list.')
for what in iterable:
self.push(what)
def __str__(self):
output = f'Stack: Value of the top stack is: {self.top.value}'
return output
def __len__(self):
return self._size
def __repr__(self):
return f'<STACK Top: { self.top }>'
def push(self, value):
"""
"""
self.top = Node(value, self.top)
# node = Node(value)
# node.next_node = self.top
# self.top = node
self._size += 1
return self
def pop(self):
"""
"""
if self.top:
old_top = self.top
self.top = old_top.next_node
old_top.next_node = None
self._size -= 1
return old_top
else:
return f'Input stack cannot be empty.'
def peek(self):
"""
"""
try:
return self.top.value
except:
return f'Input must be a non-empty stack.'
################################################
##### To print: #####
##### #####
################################################
new_stack = Stack([1, 2, 3, 4])
# new_stack.push(5)
# new_stack.pop()
# new_stack.pop()
# new_stack.pop()
print(type(new_stack))
```
#### File: data_structures/stack/test_stack.py
```python
from .stack import Stack
import pytest
@pytest.fixture
def empty_stack():
"""To create an empty queue for testing purpose."""
return Stack()
def test_stack_exist():
"""To test that the class Stack exists."""
assert Stack
def test_str_method():
"""To test the output of str method."""
short = Stack([1, 2, 3])
assert str(short) == f'Stack: Value of the top stack is: 3'
def test_size_method():
"""To test the size method."""
short = Stack([1, 2, 3])
assert len(short) == 3
def test_push_with_valid_input():
"""To test push method with valid input."""
stack_new = Stack()
stack_new.push(5)
assert stack_new.top.value == 5
def test_push_with_list_input():
"""To test push method with a list input."""
stack_new = Stack()
stack_new.push([1, 2, 3])
assert stack_new.top.value == [1, 2, 3]
def test_pop_with_valid_input():
"""To test pop method with valid input."""
stack_new = Stack([1, 2, 3])
stack_new.pop()
assert stack_new.top.value == 2
def test_pop_with_valid_input_2():
"""To test pop method with valid input."""
stack_new = Stack([1, 2, 3])
result = stack_new.pop()
assert result.value == 3
assert result.next_node is None
def test_pop_with_empty_stack():
"""To test pop method with invalid input."""
stack_new = Stack()
result = stack_new.pop()
assert result == f'Input stack cannot be empty.'
def test_peek_with_valid_input():
"""To test peek method with valid input."""
stack_new = Stack([1, 2, 3])
assert stack_new.peek() == 3
def test_peek_with_empty_stack():
"""To test peek method with invalid input."""
stack_new = Stack()
assert stack_new.peek() == f'Input must be a non-empty stack.'
```
#### File: data_structures/tree_intersection/tree_intersection.py
```python
class NodeTree(object):
"""This class is set up to create new Nodes."""
def __init__(self, val, left=None, right=None):
self.val = val
self.left = left
self.right = right
def __str__(self):
return f'{ self.val }'
def __repr__(self):
return f'<NODE: { self.val }>'
class BST(object):
"""To create a node(s) for a binary search tree and its related methods."""
def __init__(self, iterable=None):
self.root = None
self._size = 0
self.set_one = set()
self.shared = set()
if iterable is None:
iterable = []
if type(iterable) is not list:
raise TypeError('Iterable is not a list.')
for what in iterable:
self.add_node(what)
def __str__(self):
output = f'Value of the root Queue is: {self.root.val}'
return output
def __len__(self):
return self._size
def __repr__(self):
return f'<BST root: { self.root.val }>'
def add_node(self, val):
"""
"""
# print('line 50 start to create new node: ', val)
if self.root is None:
self.root = NodeTree(val)
# print('root is created outside insersion: ', self.root.val)
return
def insersion(node, val):
# print('line 57 start to insert val: ', val)
if node is None:
node = NodeTree(val)
# print('line 60 root is created: ', node.val)
return
if node.val < val:
# print('line 63 node.val is: ', node.val)
if node.right is None:
node.right = NodeTree(val)
# print('line 66 created node.right: ', node.right.val)
return
else:
# print('line 68 to call insersion and pass in node.right: ', node.right.val)
# print('line 69 to call insersion and pass in val: ', val)
insersion(node.right, val)
return
elif node.val > val:
# print('line 72 node.val is: ', node.val)
if node.left is None:
node.left = NodeTree(val)
# print('line 75 created node.left: ', node.left.val)
return
else:
# print('line 77 to call insersion and pass in node.left: ', node.left.val)
# print('line 78 to call insersion and pass in val: ', val)
insersion(node.left, val)
return
insersion(self.root, val)
def in_order_traversal(self, node=None):
if self.root is None:
raise TypeError(f'There is no node to traverse.')
else:
if node.left:
self.in_order_traversal(node.left)
print(node.val)
if node.right:
self.in_order_traversal(node.right)
def pre_order_traversal(self, node=None):
if self.root is None:
raise TypeError(f'There is no node to traverse.')
else:
print(node.val)
if node.left:
self.pre_order_traversal(node.left)
if node.right:
self.pre_order_traversal(node.right)
def post_order_traversal(self, node=None):
if self.root is None:
raise TypeError(f'There is no node to traverse.')
else:
if node.left:
self.post_order_traversal(node.left)
if node.right:
self.post_order_traversal(node.right)
print(node.val)
def post_order_set(self, node=None):
if self.root is None:
raise TypeError(f'There is no node to traverse.')
else:
if node.left:
self.post_order_set(node.left)
if node.right:
self.post_order_set(node.right)
self.set_one.add(node.val)
def post_order_intersection(self, node=None):
if node is None:
raise TypeError(f'There is no node to traverse.')
else:
if node.left:
self.post_order_intersection(node.left)
if node.right:
self.post_order_intersection(node.right)
if node.val in self.set_one:
self.shared.add(node.val)
def tree_intersection(self, tree_two):
# To populate self.set_one:
self.set_one = set()
self.post_order_set(self.root)
print('set_one is: ', self.set_one)
# To populate self.shared:
self.shared = set()
self.post_order_intersection(tree_two.root)
print('shared is: ', self.shared)
# tree_one = BST([10, 12, 11, 15, 20, 17])
# tree_two = BST([40, 15, 47, 20, 30, 50, 65])
# tree_one.tree_intersection(tree_two)
# TREE_ONE SET_ONE BASED ON [11, 17, 20, 15, 12, 10]: {10, 11, 12, 15, 17, 20}
# TREE_TWO SET_ONE BASED ON [30, 20, 15, 65, 50, 47, 40]: {65, 40, 15, 47, 50, 20, 30}
``` |
{
"source": "joyliao07/django_lender",
"score": 3
} |
#### File: lender_books/templatetags/extras.py
```python
from django.utils import timezone
from django import template
register = template.Library()
@register.filter
def get_date_string(value):
"""
"""
now_aware = timezone.now()
delta = value - now_aware
if delta.days == 0:
return 'Today!'
elif delta.days < 1:
return f'{ abs(delta.days) } { "day" if abs(delta.days) == 1 else "days" } ago.'
elif delta.days == 1:
return 'Tomorrow'
elif delta.days > 1:
return f'In { delta.days } days'
else:
return 'You lose!'
``` |
{
"source": "joyliao07/ec2_pipeline",
"score": 3
} |
#### File: ec2_pipeline/project_data/models.py
```python
from django.db import models
class Kickstarter(models.Model):
"""To create model Kickstarter."""
kickstarter_id = models.CharField(max_length=1024, primary_key=True)
name = models.CharField(max_length=1024)
category = models.CharField(max_length=1024)
main_category = models.CharField(max_length=1024)
currency = models.CharField(max_length=1024)
deadline = models.CharField(max_length=1024)
goal = models.CharField(max_length=1024)
launched = models.CharField(max_length=1024)
pledged = models.CharField(max_length=1024)
state = models.CharField(max_length=1024)
backers = models.CharField(max_length=1024)
country = models.CharField(max_length=1024)
usd_pledged = models.CharField(max_length=1024)
usd_pledged_real = models.CharField(max_length=1024)
usd_goal_real = models.CharField(max_length=1024)
def __str__(self):
return '{}'.format(self.name)
```
#### File: ec2_pipeline/project_data/views.py
```python
from django.shortcuts import render, get_list_or_404, get_object_or_404
from django.core.cache.backends.base import DEFAULT_TIMEOUT
from django.views.decorators.cache import cache_page
from django.core.paginator import Paginator
from django.conf import settings
from .models import Kickstarter
CACHE_TTL = getattr(settings, 'CACHE_TTL', DEFAULT_TIMEOUT)
@cache_page(CACHE_TTL)
def kickstarter_list(request):
"""
"""
kickstarter_list = get_list_or_404(Kickstarter)
paginator = Paginator(kickstarter_list, 20)
page = request.GET.get('page') # https://site.com?page=20
kickstarters = paginator.get_page(page)
context = {
"kickstarters": kickstarters
}
return render(request, 'details/kickstarter_list.html', context)
# def review_detail_view(request, pk):
# context = {
# 'review': get_object_or_404(Review, pk=pk),
# }
# return render(request, 'reviews/review_detail.html', context)
``` |
{
"source": "joyliao07/madlib-cli",
"score": 3
} |
#### File: joyliao07/madlib-cli/test_madlib.py
```python
import madlib
# import pytest
def test_read_file():
"""To test read_file to make sure the output is correct."""
contents = madlib.read_file('./infile.txt')
assert contents.startswith('Make Me A Video Game!\n')
def test_welcome(capsys):
"""To test welcome()'s printed words."""
madlib.welcome()
captured = capsys.readouterr()
assert captured.out == 'Welcome to Madlib - Make me a video game!\nTo quit at any time, enter "quit".\n \nPlease follow the promps to enter your choices of words.\n'
def test_check_string_two_outputs():
"""To make sure check_string output the correct characters."""
ques, story = madlib.check_string('Here I {abc} a very good time.')
assert story == story
assert ques == '{abc}'
def test_update_newlist():
"""To ensure update_newlist correctly replace desired characters."""
ques = '{abc}'
phrase = 'Here I {abc} a very good time.'
user_input = 'surely had'
result = madlib.update_newlist(ques, phrase, user_input)
assert result == 'Here I surely had a very good time.'
def test_write_file():
"""To ensure write_file has correct output"""
contents = 'yay'
path = 'testfile.txt'
madlib.write_file(path, contents)
with open(path) as f:
assert f.read() == contents
``` |
{
"source": "joyliu37/coreir",
"score": 2
} |
#### File: magma/inline/test_inline_operators.py
```python
import magma as m
from magma.testing.utils import check_files_equal
import mantle
import fault
import fault.random
import os
def test_simple_top():
Top = m.DefineCircuit("Top", "I0", m.In(m.UInt(8)), "I1", m.In(m.UInt(8)), "O", m.Out(m.UInt(8)))
sum_ = Top.I0 + Top.I1
m.wire(sum_, Top.O)
m.EndCircuit()
m.compile("test_simple_top", Top, output="coreir-verilog", inline=True)
# assert check_files_equal(__file__, "build/test_simple_top.v",
# "gold/test_simple_top.v")
def test_two_ops():
Top = m.DefineCircuit("test_two_ops", "I0", m.In(m.UInt(8)), "I1", m.In(m.UInt(8)), "O", m.Out(m.UInt(8)))
result = Top.I0 + Top.I1 - Top.I0
m.wire(result, Top.O)
m.EndCircuit()
m.compile("test_two_ops", Top, output="coreir-verilog", inline=True)
# assert check_files_equal(__file__, "build/test_two_ops.v",
# "gold/test_two_ops.v")
# Roundabout way to do this since we can't pass the --inline flag through
# fault's tester interface yet
tester = fault.Tester(Top)
for i in range(0, 16):
I0 = fault.random.random_bv(8)
I1 = fault.random.random_bv(8)
tester.poke(Top.I0, I0)
tester.poke(Top.I1, I1)
tester.eval()
tester.expect(Top.O, I0 + I1 - I0)
tester.compile_and_run(target="verilator",
skip_compile=True,
directory=".")
def test_const():
Top = m.DefineCircuit("test_const", "I0", m.In(m.UInt(8)), "I1", m.In(m.UInt(8)), "O", m.Out(m.UInt(8)))
result = Top.I0 + Top.I1 * m.uint(3, 8)
m.wire(result, Top.O)
m.EndCircuit()
m.compile("test_const", Top, output="coreir-verilog", inline=True)
tester = fault.Tester(Top)
for i in range(0, 16):
I0 = fault.random.random_bv(8)
I1 = fault.random.random_bv(8)
tester.poke(Top.I0, I0)
tester.poke(Top.I1, I1)
tester.eval()
tester.expect(Top.O, I0 + I1 * 3)
tester.compile_and_run(target="verilator",
skip_compile=True,
directory=".")
```
#### File: unit/circuits/gen_nested_clock_tuple.py
```python
import magma as m
import mantle
class TestNestedClockTuple(m.Circuit):
IO = ["I", m.In(m.Tuple(clk1=m.Clock, clk2=m.Clock, i=m.Bit)),
"O", m.Out(m.Bits(2))]
@classmethod
def definition(io):
ff0 = mantle.FF()
ff1 = mantle.FF()
m.wire(io.I.clk1, ff0.CLK)
m.wire(io.I.clk2, ff1.CLK)
m.wire(io.I.i, ff0.I)
m.wire(io.I.i, ff1.I)
m.wire(m.bits([ff0.O, ff1.O]), io.O)
class TestNestedClockTupleMain(m.Circuit):
IO = ["CLK", m.In(m.Clock), "I", m.In(m.Bit), "O", m.Out(m.Bits(2))]
@classmethod
def definition(io):
# Coreir should automatically wire io.CLK to circ.
circ = TestNestedClockTuple()
m.wire(circ.I.i, io.I)
m.wire(circ.O, io.O)
print(repr(TestNestedClockTuple))
m.compile("TestNestedClockTuple", TestNestedClockTupleMain, output="coreir")
``` |
{
"source": "JoyLubega/Glory-Recipes",
"score": 3
} |
#### File: Api/contollers/categories.py
```python
import re
from flask import jsonify, url_for
from flask import request
from ..models.category import CategoryModel
class Category(object):
"""
Handles all category operations
"""
@staticmethod
def create_category(name, parent_id):
"""
Creates a new recipe category
:param name:
:return: object
"""
if not name:
response = jsonify({'Error': 'Missing name'})
response.status_code = 400
return response
if type(name) is int:
response = jsonify({'Error': 'Numbers cant be a Name'})
response.status_code = 400
return response
if not re.match(r"(^[a-zA-Z_ ]*$)", name):
response = jsonify(
{'message':
'Name should be in alphabetical'}
)
response.status_code = 400
return response
if re.match(r"(^[ ]*$)", name):
response = jsonify(
{'message':
'A space is not a name'}
)
response.status_code = 400
return response
category_name = name.lower()
category = CategoryModel(
name=category_name, parent_id=parent_id)
try:
category.save()
response = jsonify({
'id': category.id,
'name': category.name,
'date_added': category.date_added,
'parent_id': category.parent_id
})
response.status_code = 201
return response
except Exception:
response = jsonify(
{
'Error': 'Name ' + category.name.capitalize() + ' exists'
}
)
response.status_code = 409
return response
@staticmethod
def get_categories(search, limit):
"""
Gets all recipe categories
:param category_id:
:param search:
:return:
"""
page = request.args.get('page', 1, type=int)
response = CategoryModel.query.limit(limit).all()
if not response:
response = jsonify({"Msg": "No categories found"})
response.status_code = 400
return response
else:
results = []
if search:
categories = CategoryModel.query.filter(
CategoryModel.name.ilike('%{0}%'.format(search)))
else:
categories = CategoryModel.query.order_by(CategoryModel.id)
if categories:
pagination = categories.paginate(
page, per_page=limit, error_out=False)
category_lists = pagination.items
if pagination.has_prev:
prev = url_for('get_categories', page=page -
1, limit=limit, _external=True)
else:
prev = None
if pagination.has_next:
next = url_for('get_categories', page=page +
1, limit=limit, _external=True)
else:
next = None
if category_lists:
for cat in category_lists:
obj = {
'id': cat.id,
'name': cat.name,
'date_created': cat.date_added,
'parent_id': cat.parent_id,
}
results.append(obj)
response = jsonify({
'Category': results,
'prev': prev,
'next': next,
'count': pagination.total
})
response.status_code = 200
return response
else:
return {'message': 'No Categories to display'}, 404
@staticmethod
def get_single_category(category_id):
"""
Gets single category
:param category_id:
"""
category = CategoryModel.query.filter_by(id=category_id).first()
if not category:
response = jsonify({
'error': 'category with id: ' +
str(category_id) + ' is not found'
})
response.status_code = 404
return response
category_data = {
'id': category.id,
'name': category.name,
'Parent': category.parent_id,
'date_added': category.date_added,
}
response = jsonify(category_data)
response.status_code = 200
return response
@staticmethod
def delete_category(category_id):
"""
Delete single category
:param category_id:
"""
category = CategoryModel.query.filter_by(id=category_id).first()
if not category:
response = jsonify({
'error': 'category with id: ' +
str(category_id) + ' is not found'
})
response.status_code = 404
return response
else:
category.delete()
category_data = {
'id': category.id,
'name': category.name,
'Parent': category.parent_id,
'date_added': category.date_added,
}
response = jsonify({"msg": "category deleted permanently",
"data": category_data})
response.status_code = 200
return response
@staticmethod
def update_category(category_id, data):
"""
Updates a category
:param category_id:
:param category_name:
:param parent_id:
"""
category = CategoryModel.query.filter_by(id=category_id).first()
if category:
if 'name' in data:
if data['name'] != category.name and \
CategoryModel.query.filter_by(
name=data['name']).first():
return jsonify({'msg': 'please use a different username'})
else:
category.name = data['name']
if 'parent_id' in data:
if data['parent_id'] == category.id:
return jsonify({'msg': 'Can not be a parent to self'})
else:
category.parent_id = data['parent_id']
category.update()
Category = CategoryModel.query.filter_by(id=category_id).first()
result = {
'id': Category.id,
'name': Category.name,
'Parent': Category.parent_id
}
response = jsonify(
{
'message': 'Category has been successfully updated',
'category': result
}
)
response.status_code = 200
return response
else:
response = jsonify({"Error": "The category does not exist"})
response.status_code = 404
return response
```
#### File: Api/models/category.py
```python
from datetime import datetime
from Api.api import db
class CategoryModel(db.Model):
"""
Category database Model
"""
__tablename__ = 'categories'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(50), unique=True)
date_added = db.Column(db.DateTime, default=datetime.utcnow())
date_updated = db.Column(db.DateTime, default=datetime.utcnow())
parent_id = db.Column(
db.Integer, db.ForeignKey('categories.id'), nullable=True)
children = db.relationship("CategoryModel")
def __init__(self, name, parent_id):
self.name = name
self.parent_id = parent_id
def save(self):
"""
Save category to data store
"""
db.session.add(self)
db.session.commit()
@staticmethod
def update():
"""Updates category"""
db.session.commit()
@staticmethod
def get_all():
"""Get all categories"""
CategoryModel.query.all()
def delete(self):
"""Delete Category"""
db.session.delete(self)
db.session.commit()
def __repr__(self) -> str:
return "<Category: {}>".format(self.name)
```
#### File: Api/models/recipes.py
```python
from datetime import datetime
from Api.api import db
class RecipeModel(db.Model):
"""
Recipe Database Model
"""
__tablename__ = 'recipes'
id = db.Column(
db.Integer, primary_key=True, autoincrement=True, index=True)
name = db.Column(db.String(100))
date_added = db.Column(db.DateTime, default=datetime.utcnow())
date_updated = db.Column(db.DateTime, default=datetime.utcnow())
category_id = db.Column(db.Integer, db.ForeignKey('categories.id'))
user_id = db.Column(db.Integer, db.ForeignKey('users.id'))
recipe_text = db.Column(db.String(100), index=True)
__table_args__ = (db.UniqueConstraint(
'category_id', 'name', name='unq_i_name'),)
def __init__(self, name, category_id, user_id, recipe_text):
self.name = name
self.category_id = category_id
self.user_id = user_id
self.recipe_text = recipe_text
def save(self):
"""
Save Recipe to Data Store
"""
db.session.add(self)
db.session.commit()
@staticmethod
def get_all():
"""Get all recipes"""
RecipeModel.query.all()
def delete(self):
"""Delete Item"""
db.session.delete(self)
db.session.commit()
@staticmethod
def update():
"""Update recipes """
db.session.commit()
def __repr__(self) -> str:
return "<Recipe: {}>".format(self.name)
```
#### File: Api/models/reviews.py
```python
from datetime import datetime
from Api.api import db
class ReviewsModel(db.Model):
"""
Review Database Model includes comments and rates
"""
__tablename__ = 'reviews'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
date_added = db.Column(db.DateTime, default=datetime.utcnow())
date_updated = db.Column(db.DateTime, default=datetime.utcnow())
recipe_id = db.Column(db.Integer, db.ForeignKey('recipes.id'))
comment_text = db.Column(db.String(100))
rate = db.Column(db.Integer, nullable=False)
def __init__(self, comment_text, recipe_id, rate):
self.comment_text = comment_text
self.recipe_id = recipe_id
self.rate = rate
def save(self):
"""
Save reviews to Data Store
"""
db.session.add(self)
db.session.commit()
@staticmethod
def get_all():
"""Get all Reviews"""
ReviewsModel.query.all()
def delete(self):
"""Delete a reeview"""
db.session.delete(self)
db.session.commit()
def __repr__(self) -> str:
return (
"<Comment: {}>",
"<Rate: {}>",
"<recipe_id: {}>".format(self.comment_text, self.rate,
self.recipe_id))
```
#### File: Api/models/user.py
```python
from Api.api import db
from werkzeug.security import generate_password_hash, check_password_hash
class UserModel(db.Model):
"""
User Database model
"""
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(100))
email = db.Column(db.String(100), unique=True)
password = db.Column(db.String(200))
status = db.Column(db.String(100))
def __init__(self, email, password, name=None):
self.email = email
self.name = name
self.password = generate_password_hash(password)
@staticmethod
def check_password(pw_hash, password):
"""
Validates password
:param pw_hash:
:param password:
"""
return check_password_hash(pw_hash, password)
def save(self):
"""
Save User to Data store
"""
db.session.add(self)
db.session.commit()
@staticmethod
def update():
"""Updates user"""
db.session.commit()
@staticmethod
def get_all():
"""Get all Users"""
return UserModel.query.all()
def delete(self):
"""Delete User"""
db.session.delete(self)
db.session.commit()
def __repr__(self) -> str:
return "<User: {}>".format(self.name)
``` |
{
"source": "JoyLubega/TheShoppingListAPI",
"score": 3
} |
#### File: TheShoppingListAPI/tests/test_shoppinglist.py
```python
import unittest
from flask import json
from api import db
from api.shoppinglistApi import app
from instance.config import application_config
class ShoppingListTestCase(unittest.TestCase):
def setUp(self):
app.config.from_object(application_config['TestingEnv'])
self.client = app.test_client()
self.shoppinglist = {'shoppinglist': 'Go to Goborola'}
# Binds the app to current context
with app.app_context():
# Create all tables
db.create_all()
user = json.dumps({
'email': '<EMAIL>',
'password': '<PASSWORD>',
'name': 'Joyce',
})
response = self.client.post('/auth/register', data=user)
json_repr = json.loads(response.data.decode())
self.token = json_repr['token']
def test_add_shoppinglist_without_name(self):
"""Should return 400 for missing shoppinglist name"""
shoppinglist= json.dumps({
'name': '',
'desc': 'travel'
})
response = self.client.post('/shoppinglists', data=shoppinglist,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 404)
self.assertIn('Missing name', response.data.decode())
def test_add_shoppinglist_successfully(self):
"""Should return 201 for shoppinglist added"""
shoppinglist = json.dumps({
'name': 'Travel',
'desc': 'Visit places'
})
response = self.client.post('/shoppinglists', data=shoppinglist,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 201)
print(str(response.data.decode()))
self.assertIn('travel', response.data.decode())
def test_add_shoppinglist_with_existing_shoppinglist_name(self):
"""Should return 400 for existing shoppinglist name"""
# First Add shoppinglist
self.test_add_shoppinglist_successfully()
shoppinglist = json.dumps({
'name': 'Travel',
'desc': 'travel'
})
response = self.client.post('/shoppinglists', data=shoppinglist,
headers={"Authorization": self.token})
(response.data.decode())
self.assertEqual(response.status_code, 409)
self.assertIn('Shoppinglist name Already exists', response.data.decode())
def test_get_shoppinglist_when_DB_is_empty(self):
"""Should return no shoppinglist lists msg"""
response = self.client.get('/shoppinglists',
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 400)
def test_get_shoppinglist(self):
"""Should return all shoppinglists"""
# First add shoppinglist
self.test_add_shoppinglist_successfully()
response = self.client.get('/shoppinglists',
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 200)
self.assertIn('travel',
response.data.decode())
def test_get_shoppinglist_search(self):
"""Should return 200 and shoppinglist"""
# First add shoppinglist
self.test_add_shoppinglist_successfully()
response = self.client.get('/shoppinglists?q=travel',
headers={"Authorization": self.token})
print(response.data.decode())
self.assertEqual(response.status_code, 200)
self.assertIn('travel',
response.data.decode())
def test_get_single_shoppinglist(self):
"""Should return 200 and shoppinglists"""
# First add shoppinglists
self.test_add_shoppinglist_successfully()
response = self.client.get('/shoppinglists/1',
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 200)
self.assertIn('Travel',
response.data.decode())
def test_get_single_shoppinglist_with_no_shoppinglist(self):
"""Should return 404 if no shoppinglist"""
response = self.client.get('/shoppinglists/1',
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 404)
self.assertIn('not found',
response.data.decode())
def test_get_single_shoppinglist_not_existing(self):
"""Should return 400 for doesnt exists"""
# First add shoppinglists
self.test_add_shoppinglist_successfully()
response = self.client.get('/shoppinglists/2',
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 404)
self.assertIn('shoppinglist with id 2 not found',
response.data.decode())
def test_get_single_shoppinglist(self):
"""Should return a single shoppinglist"""
# First add shoppinglists
self.test_add_shoppinglist_successfully()
response = self.client.get('/shoppinglists/1',
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 200)
self.assertIn('travel',
response.data.decode())
def test_update_shoppinglist_which_doesnt_exist(self):
"""
Should return 400 for shoppinglist
does not exists
"""
# First add shoppinglist
self.test_add_shoppinglist_successfully()
shoppinglist = json.dumps({
'name': 'travel',
'desc': 'Visit places'
})
response = self.client.put('/shoppinglists/2', data=shoppinglist,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 400)
self.assertIn('does not exist', response.data.decode())
def test_update_shoppinglist_without_name(self):
"""Should return 400 for missing shoppinglist name"""
self.test_add_shoppinglist_successfully
shoppinglist= json.dumps({
'name': '',
'desc': 'tattoo'
})
response = self.client.put('/shoppinglist/1', data=shoppinglist,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 404)
self.assertIn('The request can not be linked to, Please check your endpoint url', response.data.decode())
def test_update_shoppinglist_with_same_name(self):
"""Should return 409 for shoppinglist updates with same name"""
shoppinglist = json.dumps({
'name': 'Travel',
'desc': 'Visit places'
})
response = self.client.post('/shoppinglists', data=shoppinglist,
headers={"Authorization": self.token})
shoppinglist = json.dumps({
'name': 'London',
'desc': 'Visit places'
})
response = self.client.post('/shoppinglists', data=shoppinglist,
headers={"Authorization": self.token})
shoppinglist1 = json.dumps({
'name': 'Travel',
'desc': 'Visit places'
})
response = self.client.put('/shoppinglists/2', data=shoppinglist1,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 409)
self.assertIn('Shoppinglist name Already exists', response.data.decode())
def test_update_shoppinglist_successfully(self):
"""Should return 200 for shoppinglists update succesfully"""
# First add shoppinglist
self.test_add_shoppinglist_successfully()
shoppinglist = json.dumps({
'name': 'Travel',
'desc': 'Visit places'
})
response = self.client.put('/shoppinglists/1', data=shoppinglist,
headers={"Authorization": self.token})
self.assertEqual(response.status_code, 200)
self.assertIn('shoppinglist updated', response.data.decode())
def test_delete_shoppinglist_that_doesnt_exist(self):
"""Should return 201 for shoppinglist added"""
response = self.client.delete(
'/shoppinglists/1', headers={"Authorization": self.token})
self.assertEqual(response.status_code, 404)
self.assertIn('shoppinglist not found', response.data.decode())
def test_delete_shoppinglist_successfully(self):
"""Should return 200 for shoppinglist deleted"""
# First add a shoppinglist
self.test_add_shoppinglist_successfully()
response = self.client.delete(
'/shoppinglists/1', headers={"Authorization": self.token})
self.assertEqual(response.status_code, 200)
self.assertIn('shoppinglist deleted', response.data.decode())
def tearDown(self):
# Drop all tables
with app.app_context():
# Drop all tables
db.session.remove()
db.drop_all()
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "JoyLubega/tic-tac-toe",
"score": 3
} |
#### File: tic-tac-toe/api/views.py
```python
from flask import Flask, request, jsonify
from api.tictactoe import TicTacToe
import random
import unicodedata
app = Flask(__name__)
tic = TicTacToe()
@app.route('/game')
def newgame():
"""
Get request to return the next board
"""
board = request.args.get('board')
if board is None:
return jsonify({'error': 'enter the board please'}), 400
if len(board) < 9:
response = jsonify({'Error': 'Board is short'})
response.status_code = 400
return response
if tic.is_board_valid(board) is False:
return jsonify({'error': 'The board is invalid'}), 400
if tic.is_tie(board) is True:
return jsonify({'Message': 'The board is a tie'}), 400
if tic.is_winner(board, 'o'):
return jsonify({'Message': 'Player O is the winner'}), 400
if board.count('o') - board.count('x') == 1 :
next_player= 'x'
if (board.count('x') - board.count('o') == 1) or (board.count('x') - board.count('o') == 0):
next_player= 'o'
if next_player == 'o':
the_move = tic.getmove_server(board,'o')
new = board
sep_list = list(new)
sep_list[the_move]='o'
print(sep_list)
next_board = "".join(sep_list)
return jsonify({
"next_board": next_board
})
if next_player == 'x':
possible_boards = tic.expected_boards(board, next_player) # all boards of the given player # noqa E501
return jsonify({
"next_board": possible_boards[0]
})
```
#### File: tic-tac-toe/tests/test_tic_tac_toe.py
```python
import unittest
from api .views import app, tic
class TicTacToeTestCase(unittest.TestCase):
"""
Tests for the Tictactoe end points
"""
def setUp(self):
self.client = app.test_client()
def test_make_request_with_no_board_provided(self):
"""Should return 400 for missing board parameter"""
response = self.client.get('/game')
self.assertEqual(response.status_code, 400)
self.assertIn('enter the board please',response.data.decode())
def test_make_request_when_it_is_a_tie(self):
"""Should return 400 when the board provided is a tie"""
response = self.client.get('/game', query_string="board=xxxoooxox")
self.assertEqual(response.status_code, 400)
self.assertIn('The board is a tie', response.data.decode())
def test_invalid_board(self):
"""Should return 400 for an invalid board"""
response = self.client.get('/game', query_string="board=oooooooooo")
self.assertEqual(response.status_code, 400)
self.assertIn('The board is invalid',response.data.decode())
def test_not_plausibly_os_turn(self):
"""Should return 400 for if it was os turn to play"""
r = self.client.get('/game', query_string="board=xxx ")
self.assertEqual(r.status_code, 400)
def test_winning_board(self):
self.assertTrue(tic.is_winner('xxxoo ', player='x'))
def test_loosing_board(self):
self.assertFalse(tic.is_winner('xx xooooo', player='x'))
def test_valid_board_with_valid_return(self):
"""Should return 200 for valid board string"""
response = self.client.get('/game?board= xxo o ')
print(response)
self.assertEqual(response.status_code, 200)
self.assertIn('oxxo o ', response.data.decode())
#testing for optimization
def test_score_winning_and_losing_base_cases(self):
self.assertEqual(tic.score('xxx ', player='x'), 1)
self.assertEqual(tic.score('xxx ', player='o'), -1)
self.assertEqual(tic.score('o o o ', player='o'), 1)
self.assertEqual(tic.score('o o o ', player='x'), -1)
self.assertEqual(tic.score('x x x ', player='x'), 1)
self.assertEqual(tic.score('x x x ', player='o'), -1)
def test_score_tie_base_cases(self):
self.assertEqual(tic.score('xoooxxoxo', player='x'), 0)
self.assertEqual(tic.score('xoooxxoxo', player='o'), 0)
def test_score_easy_board_can_win(self):
self.assertEqual(tic.score('oo xx ', player='o'), 1)
self.assertEqual(tic.score('oo xx ', player='x'), 1)
if __name__ == "__main__":
unittest.main()
``` |
{
"source": "JoyLubega/Traveller",
"score": 2
} |
#### File: Traveller/flights/auth_views.py
```python
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from rest_framework_jwt.settings import api_settings
from rest_framework import permissions, generics, status
from rest_framework.response import Response
from .serializers import TokenSerializer, UserSerializer
from .decorators import validate_user_registration, validate_user_login
jwt_payload_handler = api_settings.JWT_PAYLOAD_HANDLER
jwt_encode_handler = api_settings.JWT_ENCODE_HANDLER
class LoginView(generics.CreateAPIView):
"""
POST auth/login/
"""
permission_classes = (permissions.AllowAny,) # This permission class will overide the global permission class setting
queryset = User.objects.all()
@validate_user_login
def post(self, request, *args, **kwargs):
username = request.data.get("username", "")
password = request.data.get("password", "")
user = authenticate(request, username=username, password=password)
if user is not None:
# login saves the user’s ID in the session,
# using Django’s session framework.
login(request, user)
serializer_class = TokenSerializer(data={
# using drf jwt utility functions to generate a token
"token": jwt_encode_handler(
jwt_payload_handler(user))
})
serializer_class.is_valid()
return Response(serializer_class.data)
error_msg= {"error":"Not Registered, Please register."}
return Response(error_msg,status=status.HTTP_401_UNAUTHORIZED)
class RegisterView(generics.CreateAPIView):
"""
POST /register/
"""
permission_classes = (permissions.AllowAny,)
@validate_user_registration
def post(self, request, *args, **kwargs):
username = request.data.get("username", "")
password = request.data.get("password", "")
email = request.data.get("email", "")
if not username and not password and not email:
return Response(
data={
"message": "username, password and email is required to register a user"
},
status=status.HTTP_400_BAD_REQUEST
)
new_user = User.objects.create_user(
username=username, password=password, email=email
)
return Response(
data=UserSerializer(new_user).data,
status=status.HTTP_201_CREATED
)
```
#### File: flights/tests/test_auth.py
```python
from rest_framework import status
from django.contrib.auth.models import User
from .base import BaseViewTest
class AuthLoginUserTest(BaseViewTest):
"""
Tests for the login/ endpoint
"""
def test_login_user_with_valid_credentials(self):
# test login with valid credentials
response = self.login_a_user("test_user", "testing")
print(response)
# assert token key exists
self.assertIn("token", response.data)
# assert status code is 200 OK
self.assertEqual(response.status_code, status.HTTP_200_OK)
# test login with invalid credentials
response = self.login_a_user("anonymous", "pass")
# assert status code is 401 UNAUTHORIZED
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_login_user_with_empty_credentials(self):
# test login with valid credentials
response = self.login_a_user("", "testing")
print(response)
# assert status code is 401 UNAUTHORIZED
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_register_a_user(self):
response = self.register_a_user("new_user", "new_pass", "<EMAIL>")
# assert status code is 201 CREATED
self.assertEqual(response.data["username"], "new_user")
self.assertEqual(response.data["email"], "<EMAIL>")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
# test with invalid data
response = self.register_a_user()
# assert status code
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_register_a_userwith_empty_creds(self):
response = self.register_a_user("", "new_pass", "<EMAIL>")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_register_a_userwith_emptycreds(self):
response = self.register_a_user("name", "", "<EMAIL>")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
``` |
{
"source": "joymachinegames/joymachine-public",
"score": 3
} |
#### File: python/maya/maya_center_selected_objects.py
```python
import maya.cmds as cmds
# Note (trent, 5/7/18): This script assumes a Z-up scene (and that all selected objects should be placed above the origin on Z with their pivots at the origin.
def moveToCenter(sel):
if len(sel)<=0:
print( "ERROR: Select at least one object." )
bbox = cmds.exactWorldBoundingBox(sel)
bottom = [(bbox[0] + bbox[3])/2, (bbox[1] + bbox[4])/2, bbox[2]]
cmds.xform(sel, piv=bottom, ws=True)
cmds.move(0,0,0, sel, rotatePivotRelative = True)
selection = cmds.ls(sl=True)
for sel in selection:
moveToCenter(sel)
cmds.makeIdentity(sel, apply=True)
```
#### File: python/megascan-assembler/megascan.py
```python
from PIL import Image
import sys, os
import zipfile
TEXTURE_FILE_PREFIX = "t_"
def get_family_root(directoryPath):
""" guess the path on disk to the texture family's directory """
return os.path.basename(os.path.normpath(directoryPath))
def get_filename_root(directoryPath):
""" guess the prefix of texture filenames """
# guess the filename root by figuring out the full filename for 'albedo'
albedo_path = next(f for f in os.listdir(directoryPath) if "albedo" in f.lower())
if albedo_path == None:
print('Could not find an albedo file in directory %s. Aborting.' % directoryPath)
# ...then chop that full filename, to guess the prefix
albedo_idx = albedo_path.lower().find("albedo")
return albedo_path[:albedo_idx]
def get_filename_for_channel(directoryPath, sourceRoot, channel_type):
""" combine the directory path and filename root to guess the filename for a source image """
# e.g. "pkfg22_4K_" + "Roughness.jpg"
return os.path.join(directoryPath, sourceRoot + channel_type)
def is_combined_texture_plan(plan):
"""
If true, this texture is composed of three other single-channel textures,
which are copied into the R, G, and B channels of the destination image.
"""
return 'r' in plan and 'g' in plan and 'b' in plan
def do_single_channel(directoryPath, outputRoot, sourceRoot, outputSuffix, single_channel_name):
""" Take a texture from disk and convert it to a single-channel texture """
output_file_path = lower(TEXTURE_FILE_PREFIX + outputRoot + outputSuffix)
# load the channel
sourceChannelPath = get_filename_for_channel(directoryPath, sourceRoot, single_channel_name)
try:
sourceChannel = Image.open(sourceChannelPath)
except IOError:
return
else:
with Image.new('L', sourceChannel.size) as output:
output.paste(sourceChannel)
output.save(output_file_path)
return output_file_path
# File doesn't exist.
return
def do_saveas(directoryPath, outputRoot, sourceRoot, outputSuffix, output_name):
""" Take an RGB texture from disk and save it as an RGB texture with the name and format we expect """
output_file_path = lower(TEXTURE_FILE_PREFIX + outputRoot + outputSuffix)
# Just copy the entire image over.
source_path = get_filename_for_channel(directoryPath, sourceRoot, output_name)
try:
source = Image.open(source_path)
except IOError:
return
else:
source.save(output_file_path)
return output_file_path
return
def do_rgb(directoryPath, outputRoot, sourceRoot, outputSuffix, plan):
""" Take multiple textures and combine them into a single RGB texture. """
output_file_path = lower(TEXTURE_FILE_PREFIX + outputRoot + outputSuffix)
# HACK: since M is optional in M_R_AO, we're going to go backwards
# and just guess that AO is always here and M isn't. in the future, maybe
# add some magic to the filename for 'is optional?' but then how do we decide
# sizes? who cares for now...
b_path = get_filename_for_channel(directoryPath, sourceRoot, plan['b'])
g_path = get_filename_for_channel(directoryPath, sourceRoot, plan['g'])
r_path = get_filename_for_channel(directoryPath, sourceRoot, plan['r'])
with Image.open(b_path) as bSource:
with Image.open(g_path) as gSource:
if os.path.exists(r_path):
rSource = Image.open(r_path)
else:
rSource = Image.new('RGB', bSource.size)
output = Image.merge('RGB', (rSource.split()[0], gSource.split()[0], bSource.split()[0]))
output.save(output_file_path)
rSource.close()
return output_file_path
def pack_directory(directoryPath, plan):
if not os.path.isdir(directoryPath):
print('Directory %s does not exist. Aborting.' % (directoryPath))
sys.exit(1)
family_root = get_family_root(directoryPath)
filename_root = get_filename_root(directoryPath)
print('Handling family %s at %s*.jpg' % (family_root, filename_root))
converted = []
for planned_suffix in plan:
planned = plan[planned_suffix]
# determine k or r, g, b
if 'k' in planned.keys():
greyscale_result = do_single_channel(directoryPath, family_root, filename_root, planned_suffix, planned['k'])
if greyscale_result is not None:
# single channel mode
converted.append(greyscale_result)
print('Rule ' + planned_suffix + ' (1-channel) generated file ' + greyscale_result)
elif 'rgb' in planned.keys():
# copy this texture directly
copied_result = do_saveas(directoryPath, family_root, filename_root, planned_suffix, planned['rgb'])
if copied_result is not None:
converted.append(copied_result)
print('Rule ' + planned_suffix + ' was converted to RGB texture ' + copied_result)
elif is_combined_texture_plan(planned):
# assume r, g, b => rgb
colour_result = do_rgb(directoryPath, family_root, filename_root, planned_suffix, planned)
converted.append(colour_result)
print('Rule ' + planned_suffix + ' (RGB) generated file ' + colour_result)
else:
print('Error in build plan: Do not know how to build ' + planned_suffix + ', channels provided are ' + ', '.join(planned.keys()))
return
# all done, bundle them into a zip.
archive_family(family_root, converted)
def archive_family(family_root, paths):
zipname = lower(family_root.strip("-_ ") + ".zip")
with zipfile.ZipFile(zipname, mode = 'w') as z:
for path in paths:
z.write(path)
print(' ~> Generated zip archive of %s called %s' % (family_root, zipname))
def lower(str):
return str.lower()
def printUsage():
print('Usage: %s [directories]' % (sys.argv[0]))
def main():
plan = {
# r = red, g = green, b = blue, k = grey (single channel), 'rgb' = copy directly
'_m_r_ao.png': {
# Metallic, roughness, AO
'r': 'Metallic.jpg',
'g': 'Roughness.jpg',
'b': 'AO.jpg'
},
'_n.png': {
# Normal map
'rgb': 'Normal.jpg'
},
'_c.png': {
# Just cavity
'k': 'Cavity.jpg'
},
'_o.png': {
# Opacity (mask).
'k': 'Opacity.jpg'
},
'_t.png': {
# Translucency (subsurface scattering -- generally reserved only for foliage).
'rgb': 'Translucency.jpg'
},
'_a.png': {
# Just albedo
'rgb': 'Albedo.jpg'
},
'_d.png': {
# Displacement map (single channel texture)
'k': 'Displacement.jpg'
}
}
if len(sys.argv) < 2:
printUsage()
sys.exit(1)
for d in sys.argv[1:]:
pack_directory(d, plan)
if __name__ == '__main__': main()
```
#### File: python/misc/rename_images_by_timestamp.py
```python
import os, os.path
import shutil
from datetime import datetime
import sys
def do_folder(src_folder):
assert os.path.isdir(src_folder)
dst_folder = os.path.join(src_folder, 'timestamp_prefixed')
print 'Input dir: %s'%src_folder
print 'Output dir: %s'%dst_folder
if not os.path.exists(dst_folder):
print 'Creating %s' %dst_folder
os.makedirs(dst_folder)
for f in os.listdir(src_folder):
base,ext = os.path.splitext(f)
src_file = os.path.join(src_folder, f)
if os.path.isfile(src_file) and ext.lower() in ['.png','.jpg','.gif']:
ctime = os.path.getmtime(src_file)
dt = datetime.utcfromtimestamp(ctime)
dst_file = os.path.join(dst_folder, dt.strftime("%Y_%m_%d_")+f)
print '%s ---> %s' % (src_file,dst_file)
shutil.copyfile(src_file,dst_file)
if __name__ == '__main__':
folders = sys.argv[1:]
if len(folders) == 0:
do_folder('.')
else:
for f in folders:
do_folder(f)
```
#### File: python/vector_field_generator/VectorFieldTool_GUI.py
```python
from Tkinter import *
from tkFileDialog import *
from tkMessageBox import *
from ttk import Combobox
from math import ceil
import os
import VFGenerator as core
from tooltip import CreateToolTip
# Define Basic Background and Text Colors
textcol = "#999999"
bgColor = "#282828"
GenVar = IntVar
biasVar = IntVar
b_height = 20
b_width = 20
b_xpad = b_width + 20
def assignValues():
gridsize = [int(ceil(float(grids_x.get()))),int(ceil(float(grids_y.get()))),int(ceil(float(grids_z.get())))]
generator = getGenType(GenType_dpd.get())
minbounds = [int(MinB_x.get()),int(MinB_y.get()),int(MinB_z.get())]
maxbounds = [int(MaxB_x.get()),int(MaxB_y.get()),int(MaxB_z.get())]
directionbias = [float(DirB_x.get()),float(DirB_y.get()),float(DirB_z.get())]
directionstrength = float(DirStr.get())
scalenoiseamount = float(SclNAmt.get())
directionnoiseamount = float(DirNAmt.get())
mainscalefactor = float(Scl_F.get())
filename = File_Entry.get()
filecheck = os.path.isfile(filename)
pathcheck = os.path.split(filename)
pathbool = os.path.isdir(pathcheck[0])
if(pathbool):
core.makeVectorField(gridsize, minbounds, maxbounds, generator, filename, directionbias, directionstrength,scalenoiseamount, directionnoiseamount, mainscalefactor)
else:
showerror("Save Location", "Invalid save location")
#print(gridsize)
#print(generator)
#print(File_Entry.get())
#print(minbounds)
#print(maxbounds)
#print(directionbias)
#print(directionstrength)
#print(scalenoiseamount)
#print(directionnoiseamount)
#print(mainscalefactor)
def getGenType(string01):
return{"Uniform":0,"UniformNormalized":1,"Grid":2,"GridNormalized":3,"Radial":4}[string01]
def saveTo():
File_Entry.delete(0,'end')
saveDir = asksaveasfilename()
saveDir = "%s.fga" % saveDir
File_Entry.insert(2,str(saveDir))
def updatedirstr(event):
DirStr.delete(0,'end')
DirStr.insert(1,str(DirStr_slider.get()))
def updatescalenoise(event):
SclNAmt.delete(0,'end')
SclNAmt.insert(1,str(SclNAmt_slider.get()))
def updatedirnamt(event):
DirNAmt.delete(0,'end')
DirNAmt.insert(1,str(DirNAmt_slider.get()))
def updatemainscl(event):
Scl_F.delete(0,'end')
Scl_F.insert(3,str(Scl_F_slider.get()))
GenTypeOptions = ["Uniform","UniformNormalized","Grid","GridNormalized","Radial"]
dirbiasoptions = [-1,0,1]
root = Tk()
root.option_add("*TCombobox*Listbox*Background", '#282828')
root.option_add("*TCombobox*Listbox*Foreground", '#999999')
root.title("Kashaar's VectorField Thingymabub v1.0")
root.geometry("400x512")
root.configure(background=bgColor)
root.resizable(width=False, height=False)
# Create Generator Type Dropdown Widgets
GenType_lbl = Label(root,text="Choose Generator Type: ",fg=textcol,bg=bgColor)
GenType_lbl.place(x=20,y=20,height=20)
GenType_tlt = CreateToolTip(GenType_lbl,"Choose what type of noise generator to use")
GenType_dpd = Combobox(root,values=GenTypeOptions,textvariable=GenVar,takefocus=True,state="readonly")
GenType_dpd.set(GenTypeOptions[3])
GenType_dpd.place(x=170,y=20,height=20)
#Create Gridsize Entry Widget
Grid_lbl = Label(root,text="GridSize:",fg=textcol,bg=bgColor)
Grid_lbl.place(x=20,y=50,height=20)
Grid_tlt = CreateToolTip(Grid_lbl,"Use values no lower than 2. High values take longer to generate, and cost more memory.")
gridx_lbl = Label(root,text="X:",fg=textcol,bg=bgColor)
gridx_lbl.place(x=80,y=50,height=20,width=40)
grids_x = Entry(root,bg=bgColor,fg=textcol)
grids_x.insert(2,"16")
grids_x.place(x=120,y=50,height=20,width=40)
gridy_lbl = Label(root,text="Y:",fg=textcol,bg=bgColor)
gridy_lbl.place(x=160,y=50,height=20,width=40)
grids_y = Entry(root,bg=bgColor,fg=textcol)
grids_y.insert(2,"16")
grids_y.place(x=200,y=50,height=20,width=40)
gridz_lbl = Label(root,text="Z:",fg=textcol,bg=bgColor)
gridz_lbl.place(x=240,y=50,height=20,width=40)
grids_z = Entry(root,bg=bgColor,fg=textcol)
grids_z.insert(2,"16")
grids_z.place(x=280,y=50,height=20,width=40)
# Create MinBounds Entry Widget
MinB_lbl = Label(root,text="MinBounds:",fg=textcol,bg=bgColor)
MinB_lbl.place(x=20,y=80,height=20)
MinB_tlt = CreateToolTip(MinB_lbl,"bottom left back coordinate of the grid")
MinBx_lbl = Label(root,text="X:",fg=textcol,bg=bgColor)
MinBx_lbl.place(x=80,y=80,height=20,width=40)
MinB_x = Entry(root,bg=bgColor,fg=textcol)
MinB_x.insert(2,"-100")
MinB_x.place(x=120,y=80,height=20,width=40)
MinBy_lbl = Label(root,text="Y:",fg=textcol,bg=bgColor)
MinBy_lbl.place(x=160,y=80,height=20,width=40)
MinB_y = Entry(root,bg=bgColor,fg=textcol)
MinB_y.insert(2,"-100")
MinB_y.place(x=200,y=80,height=20,width=40)
MinBz_lbl = Label(root,text="Z:",fg=textcol,bg=bgColor)
MinBz_lbl.place(x=240,y=80,height=20,width=40)
MinB_z = Entry(root,bg=bgColor,fg=textcol)
MinB_z.insert(2,"-100")
MinB_z.place(x=280,y=80,height=20,width=40)
# Create MaxBounds Entry Widget
MaxB_lbl = Label(root,text="MaxBounds:",fg=textcol,bg=bgColor)
MaxB_lbl.place(x=20,y=110,height=20)
MaxB_tlt = CreateToolTip(MaxB_lbl,"top right front coordinate of the grid")
MaxBx_lbl = Label(root,text="X:",fg=textcol,bg=bgColor)
MaxBx_lbl.place(x=80,y=110,height=20,width=40)
MaxB_x = Entry(root,bg=bgColor,fg=textcol)
MaxB_x.insert(2,"100")
MaxB_x.place(x=120,y=110,height=20,width=40)
MaxBy_lbl = Label(root,text="Y:",fg=textcol,bg=bgColor)
MaxBy_lbl.place(x=160,y=110,height=20,width=40)
MaxB_y = Entry(root,bg=bgColor,fg=textcol)
MaxB_y.insert(2,"100")
MaxB_y.place(x=200,y=110,height=20,width=40)
MaxBz_lbl = Label(root,text="Z:",fg=textcol,bg=bgColor)
MaxBz_lbl.place(x=240,y=110,height=20,width=40)
MaxB_z = Entry(root,bg=bgColor,fg=textcol)
MaxB_z.insert(2,"100")
MaxB_z.place(x=280,y=110,height=20,width=40)
# Create DirectionBias Entry Widget
DirB_lbl = Label(root,text="Direction Bias:",fg=textcol,bg=bgColor)
DirB_lbl.place(x=20,y=140,height=20)
DirB_tlt = CreateToolTip(DirB_lbl,"Stick to values between -1 and 1.To make vector fields generally point in a specific direction, define the direction here...")
DirBx_lbl = Label(root,text="X:",fg=textcol,bg=bgColor)
DirBx_lbl.place(x=100,y=140,height=20,width=b_width)
#DirBx_dpd = Combobox(root,values=dirbiasoptions,takefocus=True,state="readonly")
#DirBx_dpd.set(dirbiasoptions[1])
#DirBx_dpd.place(x=120,y=140,height=20,width=40)
DirB_x = Entry(root,bg=bgColor,fg=textcol)
DirB_x.insert(2,"0")
DirB_x.place(x=120,y=140,height=20,width=b_width)
DirBy_lbl = Label(root,text="Y:",fg=textcol,bg=bgColor)
DirBy_lbl.place(x=180,y=140,height=20,width=b_width)
#DirBy_dpd = Combobox(root,values=dirbiasoptions,takefocus=True,state="readonly")
#DirBy_dpd.set(dirbiasoptions[1])
#DirBy_dpd.place(x=200,y=140,height=20,width=40)
DirB_y = Entry(root,bg=bgColor,fg=textcol)
DirB_y.insert(2,"0")
DirB_y.place(x=200,y=140,height=20,width=b_width)
DirBz_lbl = Label(root,text="Z:",fg=textcol,bg=bgColor)
DirBz_lbl.place(x=260,y=140,height=20,width=b_width)
#DirBz_dpd = Combobox(root,values=dirbiasoptions,takefocus=True,state="readonly")
#DirBz_dpd.set(dirbiasoptions[1])
#DirBz_dpd.place(x=280,y=140,height=20,width=40)
DirB_z = Entry(root,bg=bgColor,fg=textcol)
DirB_z.insert(2,"0")
DirB_z.place(x=280,y=140,height=20,width=b_width)
# Create DirectionStrength Entry Widget
DirStr_lbl = Label(root,text="Direction Strength:",fg=textcol,bg=bgColor)
DirStr_lbl.place(x=20,y=170,height=20)
DirStr_tlt = CreateToolTip(DirStr_lbl,"Use this parameter to scale DirectionBias.")
DirStr = Entry(root,bg=bgColor,fg=textcol)
DirStr.insert(4,"0.0")
DirStr.place(x=140,y=170,height=20,width=40)
DirStr_slider = Scale(root,from_=0,to=100,orient=HORIZONTAL,length=180,showvalue=0,resolution=-1,command=updatedirstr)
DirStr_slider.place(x=200,y=170)
# Create ScaleNoiseAmount Entry Widget
SclNAmt_lbl = Label(root,text="Scale Noise Amount:",fg=textcol,bg=bgColor)
SclNAmt_lbl.place(x=20,y=200,height=20)
SclNAmt_tlt = CreateToolTip(SclNAmt_lbl,"This is used to give some variation to the generated vectors' lengths. A factor of 1.0 for example would mean that instead of length 1, output vectors get a random length between 0 and 2.")
SclNAmt = Entry(root,bg=bgColor,fg=textcol)
SclNAmt.insert(3,"0.05")
SclNAmt.place(x=140,y=200,height=20,width=40)
SclNAmt_slider = Scale(root,from_=0,to=10,orient=HORIZONTAL,length=180,showvalue=0,resolution=-1,command=updatescalenoise)
SclNAmt_slider.place(x=200,y=200)
SclNAmt_slider.set(0.05)
# Create Widget for Direction Noise
DirNAmt_lbl = Label(root,text="Direction Noise Scale Amount:",fg=textcol,bg=bgColor)
DirNAmt_lbl.place(x=20,y=230,height=20)
DirNAmt_tlt = CreateToolTip(DirNAmt_lbl,"Adds uniform unidirectional noise to the generator's result, for nicer swirlies.")
DirNAmt = Entry(root,bg=bgColor,fg=textcol)
DirNAmt.insert(3,"0.0")
DirNAmt.place(x=200,y=230,height=20,width=40)
DirNAmt_slider = Scale(root,from_=0,to=10,orient=HORIZONTAL,length=120,showvalue=0,resolution=-1,command=updatedirnamt)
DirNAmt_slider.place(x=260,y=230)
# Create mainscalefactor Entry Widget
Scl_F_lbl = Label(root,text="Main Scale Factor",fg=textcol,bg=bgColor)
Scl_F_lbl.place(x=20,y=260,height=20)
Scl_F_tlt = CreateToolTip(Scl_F_lbl,"Generated vectors are (or should be) within 0 - 1 units of length. This variable lets you scale them all uniformly.")
Scl_F = Entry(root,bg=bgColor,fg=textcol)
Scl_F.insert(3,"10.0")
Scl_F.place(x=140,y=260,height=20,width=b_width*2)
Scl_F_slider = Scale(root,from_=-100,to=100,orient=HORIZONTAL,length=180,showvalue=0,resolution=-1,command=updatemainscl)
Scl_F_slider.place(x=200,y=260)
Scl_F_slider.set(10)
# Create File Dialog
File_lbl = Label(root,text="Save To",fg=textcol,bg=bgColor)
File_lbl.place(x=20,y=290,height=20)
File_tlt = CreateToolTip(File_lbl,"Path to save generated .fga file")
File_Entry = Entry(root,fg=textcol,bg=bgColor)
File_Entry.place(x=80,y=290,height=20,width=300)
#File_Entry.bind("<KeyRelease>",updatefileentry)
File_btn = Button(root,text="...",fg=textcol,bg=bgColor,command=saveTo)
File_btn.place(x=370,y=290,height=20)
Create_btn = Button(root,text="Generate VectorField",bg=bgColor,fg=textcol,command=assignValues)
Create_btn.place(x=128,y=340)
crtbtn_tlt = CreateToolTip(Create_btn,"Script will attempt to open the generated .fga file. Use any text editor.")
root.mainloop()
``` |
{
"source": "joymaitra/nested-json2rel-data",
"score": 3
} |
#### File: nested-json2rel-data/nested_json2rel_data/nested_json_parser.py
```python
import json
class nested_json_parser:
def bucket(self,data={},id = 0, parent = 'root',child_of='root',child_of_id=0):
local_id = id
if data == {}:
local_data = self.data
else:
local_data = data
local_dict={}
local_dict["root_id"] =self.stack[-1][1]
local_dict["root_name"] =parent
local_dict["child_of"] = child_of
local_dict["child_of_id"] = child_of_id
temp_lst = []
for key in local_data:
self.counter += 1
self.stack.append([key,self.counter])
if isinstance(local_data[key],(str, int, float)):
local_dict[key] = local_data[key]
if isinstance(local_data[key], dict):
if local_data[key] == {}:
local_dict["no item available"] = 'NULL'
else:
self.bucket(local_data[key],local_id+len(self.stack),key,self.stack[-2][0],self.stack[-2][1])
if isinstance(local_data[key], list):
for count,lst in enumerate(local_data[key]):
self.stack[-1][1] =self.stack[-1][1]+count
if isinstance(lst, (str,int, float)):
temp ={}
temp["root_id"] = local_id+len(self.stack)+count
temp["root_name"] = key
temp["child_of"] = self.stack[-2][0]
temp["child_of_id"] = self.stack[-2][1]
temp[key] = lst
temp_lst.append(temp)
self.output.append(temp)
if isinstance(lst, dict):
local_id = self.stack[-2][1]
self.bucket(lst,local_id+len(self.stack)+count,key,self.stack[-2][0],local_id)
self.stack.pop()
self.output.append(local_dict)
return(self.output)
def __init__(self,data):
self.data = data
self.counter = 0
self.stack=[['root',0]]
self.output =[]
``` |
{
"source": "joymallyac/Fairway",
"score": 2
} |
#### File: Fairway/Multiobjective Optimization/Compass.py
```python
import pandas as pd
import numpy as np
import random,time
import math,copy
from sklearn.linear_model import LogisticRegression
from sklearn.model_selection import train_test_split
from sklearn.tree import DecisionTreeRegressor
from sklearn.metrics import classification_report, confusion_matrix, accuracy_score
from sklearn import tree
from sklearn.naive_bayes import GaussianNB
from sklearn.ensemble import RandomForestClassifier
from result.measure import calculate_recall,calculate_far,calculate_average_odds_difference, calculate_equal_opportunity_difference, get_counts, measure_final_score
from optimizer.flash import flash_fair_LSR
## Load dataset
dataset_orig = pd.read_csv('dataset/compas-scores-two-years.csv')
## Drop categorical features
## Removed two duplicate coumns - 'decile_score','priors_count'
dataset_orig = dataset_orig.drop(['id','name','first','last','compas_screening_date','dob','age','juv_fel_count','decile_score','juv_misd_count','juv_other_count','days_b_screening_arrest','c_jail_in','c_jail_out','c_case_number','c_offense_date','c_arrest_date','c_days_from_compas','c_charge_desc','is_recid','r_case_number','r_charge_degree','r_days_from_arrest','r_offense_date','r_charge_desc','r_jail_in','r_jail_out','violent_recid','is_violent_recid','vr_case_number','vr_charge_degree','vr_offense_date','vr_charge_desc','type_of_assessment','decile_score','score_text','screening_date','v_type_of_assessment','v_decile_score','v_score_text','v_screening_date','in_custody','out_custody','start','end','event'],axis=1)
## Drop NULL values
dataset_orig = dataset_orig.dropna()
## Change symbolics to numerics
dataset_orig['sex'] = np.where(dataset_orig['sex'] == 'Female', 1, 0)
dataset_orig['race'] = np.where(dataset_orig['race'] != 'Caucasian', 0, 1)
dataset_orig['priors_count'] = np.where((dataset_orig['priors_count'] >= 1 ) & (dataset_orig['priors_count'] <= 3), 3, dataset_orig['priors_count'])
dataset_orig['priors_count'] = np.where(dataset_orig['priors_count'] > 3, 4, dataset_orig['priors_count'])
dataset_orig['age_cat'] = np.where(dataset_orig['age_cat'] == 'Greater than 45',45,dataset_orig['age_cat'])
dataset_orig['age_cat'] = np.where(dataset_orig['age_cat'] == '25 - 45', 25, dataset_orig['age_cat'])
dataset_orig['age_cat'] = np.where(dataset_orig['age_cat'] == 'Less than 25', 0, dataset_orig['age_cat'])
dataset_orig['c_charge_degree'] = np.where(dataset_orig['c_charge_degree'] == 'F', 1, 0)
## Rename class column
dataset_orig.rename(index=str, columns={"two_year_recid": "Probability"}, inplace=True)
## Divide into train,validation,test
dataset_orig_train, dataset_orig_vt = train_test_split(dataset_orig, test_size=0.3, random_state=0)
dataset_orig_valid, dataset_orig_test = train_test_split(dataset_orig_vt, test_size=0.5, random_state=0)
X_train, y_train = dataset_orig_train.loc[:, dataset_orig_train.columns != 'Probability'], dataset_orig_train['Probability']
X_valid , y_valid = dataset_orig_valid.loc[:, dataset_orig_valid.columns != 'Probability'], dataset_orig_valid['Probability']
X_test , y_test = dataset_orig_test.loc[:, dataset_orig_test.columns != 'Probability'], dataset_orig_test['Probability']
def run_ten_times_default():
print(" ---------- Default Results --------")
for i in range(3):
print("----Run No----",i)
start = time.time()
## Divide into train,validation,test
dataset_orig_train, dataset_orig_vt = train_test_split(dataset_orig, test_size=0.3)
dataset_orig_valid, dataset_orig_test = train_test_split(dataset_orig_vt, test_size=0.5)
X_train, y_train = dataset_orig_train.loc[:, dataset_orig_train.columns != 'Probability'], dataset_orig_train['Probability']
X_valid, y_valid = dataset_orig_valid.loc[:, dataset_orig_valid.columns != 'Probability'], dataset_orig_valid['Probability']
X_test, y_test = dataset_orig_test.loc[:, dataset_orig_test.columns != 'Probability'], dataset_orig_test['Probability']
#### DEFAULT Learners ####
# --- LSR
clf = LogisticRegression(C=1.0, penalty='l2', solver='liblinear', max_iter=100) # LSR Default Config
# --- CART
# clf = tree.DecisionTreeClassifier(criterion="gini",splitter="best",min_samples_leaf=1,min_samples_split=2) # CART Default Config
clf.fit(X_train, y_train)
y_pred = clf.predict(X_test)
cnf_matrix_test = confusion_matrix(y_test, y_pred)
print(cnf_matrix_test)
TN, FP, FN, TP = confusion_matrix(y_test,y_pred).ravel()
print("recall:", 1 - calculate_recall(TP,FP,FN,TN))
print("far:",calculate_far(TP,FP,FN,TN))
print("aod for sex:",measure_final_score(dataset_orig_test, clf, X_train, y_train, X_test, y_test, 'sex', 'aod'))
print("eod for sex:",measure_final_score(dataset_orig_test, clf, X_train, y_train, X_test, y_test, 'sex', 'eod'))
print("aod for race:",measure_final_score(dataset_orig_test, clf, X_train, y_train, X_test, y_test, 'race', 'aod'))
print("eod for race:",measure_final_score(dataset_orig_test, clf, X_train, y_train, X_test, y_test, 'race', 'eod'))
end = time.time()
print(end-start)
def run_ten_times_FLASH():
print(" ---------- FLASH Results --------")
for i in range(3):
print("----Run No----",i)
start = time.time()
## Divide into train,validation,test
dataset_orig_train, dataset_orig_vt = train_test_split(dataset_orig, test_size=0.3)
dataset_orig_valid, dataset_orig_test = train_test_split(dataset_orig_vt, test_size=0.5)
X_train, y_train = dataset_orig_train.loc[:, dataset_orig_train.columns != 'Probability'], dataset_orig_train['Probability']
X_valid, y_valid = dataset_orig_valid.loc[:, dataset_orig_valid.columns != 'Probability'], dataset_orig_valid['Probability']
X_test, y_test = dataset_orig_test.loc[:, dataset_orig_test.columns != 'Probability'], dataset_orig_test['Probability']
# tuner = LR_TUNER()
# best_config = tune_with_flash(tuner, X_train, y_train, X_valid, y_valid, 'adult', dataset_orig_valid, 'sex')
best_config = flash_fair_LSR(dataset_orig,"sex","ABCD")
print("best_config",best_config)
p1 = best_config[0]
if best_config[1] == 1:
p2 = 'l1'
else:
p2 = 'l2'
if best_config[2] == 1:
p3 = 'liblinear'
else:
p3 = 'saga'
p4 = best_config[3]
clf = LogisticRegression(C=p1, penalty=p2, solver=p3, max_iter=p4)
# clf = tuner.get_clf(best_config)
print("recall :", 1 - measure_final_score(dataset_orig_test, clf, X_train, y_train, X_test, y_test, 'sex', 'recall'))
print("far :",measure_final_score(dataset_orig_test, clf, X_train, y_train, X_test, y_test, 'sex', 'far'))
print("aod :",measure_final_score(dataset_orig_test, clf, X_train, y_train, X_test, y_test, 'sex', 'aod'))
print("eod :",measure_final_score(dataset_orig_test, clf, X_train, y_train, X_test, y_test, 'sex', 'eod'))
end = time.time()
print(end - start)
run_ten_times_default()
run_ten_times_FLASH()
``` |
{
"source": "JoyMarie1019/Maeve.discord.pay",
"score": 3
} |
#### File: JoyMarie1019/Maeve.discord.pay/util.py
```python
import discord
from discord.ext import commands
from utils.jsonLoader import read_json, write_json
def GetTicketCount():
data = read_json("config")
return data["ticketCount"]
def IncrementTicketCount():
data = read_json("config")
data["ticketCount"] += 1
write_json(data, "config")
def GetTicketSetupMessageId():
data = read_json("config")
return data["ticketSetupMessageId"]
def LogNewTicketChannel(channelId, ticketId):
data = read_json("config")
data[str(channelId)] = {}
data[str(channelId)]["id"] = ticketId
data[str(channelId)]["reactionMsgId"] = None
write_json(data, "config")
def IsATicket(channelId):
data = read_json("config")
return str(channelId) in data
def GetTicketId(channelId):
data = read_json("config")
return data[str(channelId)]["id"]
def RemoveTicket(channelId):
data = read_json("config")
data.pop(str(channelId))
write_json(data, "config")
async def NewTicketSubjectSender(author, channel, subject):
if subject == "No subject specified.":
return
embed = discord.Embed(
title="Provided subject for ticket:", description=subject, color=0xfab6ee,
)
embed.set_author(name=author.name, icon_url=author.avatar_url)
await channel.send(embed=embed)
async def NewTicketEmbedSender(bot, author, channel):
embed = discord.Embed(
title="**Welcome to peer to peer support {}!**".format(author.display_name),
description="**Thank you for contacting support**\n\n> While you wait for a supporter, please briefly describe why you need support and identify any triggering topics that may come up.\n\n> If you need immediate help, please do the command ``.crisis`` to alert higher ups.\n\n> If you need any additional help, we have resources with the bot listed in <#755320868137205761>",
color=0xfab6ee)
embed.set_thumbnail(url="https://i.imgur.com/mxLRE31.png")
embed.set_footer(text="The Butterfly Project Team")
m = await channel.send(f"{author.mention} | <@&{bot.supp_role_id}> | <@&{bot.tsupp_role_id}>", embed=embed)
await m.add_reaction("🔒")
data = read_json("config")
data[str(channel.id)]["reactionMsgId"] = m.id
write_json(data, "config")
async def ReactionCreateNewTicket(bot, payload):
guild = bot.get_guild(751990570989387796)
author = guild.get_member(payload.user_id)
await CreateNewTicket(bot, guild, author)
async def CreateNewTicket(bot, guild, author, *, subject=None, message=None):
subject = subject or "No subject specified."
ticketId = GetTicketCount() + 1
modRole = guild.get_role(bot.mod_role_id)
suppRole = guild.get_role(bot.supp_role_id)
tsuppRole = guild.get_role(bot.tsupp_role_id)
logChannel = bot.get_channel(bot.log_channel_id)
category_id = int(759955006123540530)
category = guild.get_channel(category_id)
overwrites = {
guild.default_role: discord.PermissionOverwrite(read_messages=False),
guild.me: discord.PermissionOverwrite(read_messages=True),
modRole: discord.PermissionOverwrite(read_messages=True),
suppRole: discord.PermissionOverwrite(read_messages=True),
tsuppRole: discord.PermissionOverwrite(read_messages=True),
author: discord.PermissionOverwrite(read_messages=True)}
channel = await guild.create_text_channel(
name=f"Support Ticket #{ticketId}",
overwrites=overwrites,
category=category)
LogNewTicketChannel(channel.id, ticketId)
await SendLog(
bot,
author,
logChannel,
f"Created ticket with ID {ticketId}",
f"Ticket Creator: {author.mention}(`{author.id}`)\nChannel: {channel.mention}({channel.name})\nSubject: {subject}",
0xfab6ee,
)
await NewTicketEmbedSender(bot, author, channel)
await NewTicketSubjectSender(author, channel, subject)
IncrementTicketCount()
if message:
await message.delete()
async def CloseTicket(bot, channel, author, reason=None):
if not IsATicket(channel.id):
await channel.send("I cannot close this as it is not a ticket.")
return
reason = reason or "No closing reason specified."
ticketId = GetTicketId(channel.id)
messages = await channel.history(limit=None, oldest_first=True).flatten()
ticketContent = " ".join(
[f"{message.content} | {message.author.name}\n" for message in messages]
)
with open(f"data/tickets/{ticketId}.txt", "w", encoding="utf8") as f:
f.write(f"Here is the message log for ticket ID {ticketId}\n----------\n\n")
f.write(ticketContent)
fileObject = discord.File(f"data/tickets/{ticketId}.txt")
logChannel = bot.get_channel(bot.log_channel_id)
await SendLog(
bot,
author,
logChannel,
f"Closed Ticked: Id {ticketId}",
f"Close Reason: {reason}",
0xfab6ee,
file=fileObject,
)
await channel.delete()
async def SendLog(
bot: commands.Bot,
author,
channel,
contentOne: str = "Default Message",
contentTwo: str = "\uFEFF",
color=0xfab6ee,
timestamp=None,
file: discord.File = None,
):
embed = discord.Embed(title=contentOne, description=contentTwo, color=color)
if timestamp:
embed.timestamp = timestamp
embed.set_author(name=author.name, icon_url=author.avatar_url)
await channel.send(embed=embed)
if file:
await channel.send(file=file)
def CheckIfValidReactionMessage(msgId):
data = read_json("config")
if data["ticketSetupMessageId"] == msgId:
return True
data.pop("ticketSetupMessageId")
data.pop("ticketCount")
for value in data.values():
if value["reactionMsgId"] == msgId:
return True
return False
async def SetupNewTicketMessage(bot):
data = read_json("config")
channel = bot.get_channel(bot.new_ticket_channel_id)
embed = discord.Embed(
title="𝕋𝕙𝕖 𝔹𝕦𝕥𝕥𝕖𝕣𝕗𝕝𝕪 ℙ𝕣𝕠𝕛𝕖𝕔𝕥 ℙ𝕖𝕖𝕣 𝕊𝕦𝕡𝕡𝕠𝕣𝕥 𝕊𝕪𝕤𝕥𝕖𝕞",
description="Welcome to The Butterfly Project's Peer Support!\n\nThis is a safe space for those who are going through recovery or want to recover with their mental illness. On top of our already loving community, we are proud to offer you:\n\n> Private one on one peer support with our trained volunteers\n\n> A team of moderators and supporters that truly care about your safety\n\n> A completely private and safe enviornment for you to talk to your volunteer\n\n> And custom commands to help you in a pinch\n\nWe are so happy to be a part of your recovery journey! Our volunteers aren't just people on the other side of a screen, they are your friends that you get to know personally in server. Below are the basic rules of our peer to peer support. Once you have read them and understand them, click the check mark below to get started.",
color=0xfab6ee,
)
embed.add_field(
name="ℙ𝕖𝕖𝕣 𝕊𝕦𝕡𝕡𝕠𝕣𝕥 𝔾𝕦𝕚𝕕𝕖𝕝𝕚𝕟𝕖𝕤",
value="Please read the following rules before asking for support.")
embed.add_field(
name="**1. Be mindful of triggering topics**",
value="*When you open a ticket we ask that you describe your situation and identify any triggers that may come up. This is for the safety of our volunteers so they can be aware if they need to cope ahead.*")
embed.add_field(
name="**2. Respect the Supporters**",
value="*Our peer supports are all here on a volunteer basis. They are doing a hard job for free to help you, so pleade do not disrespect the volunteers.If you have an issue with a volunteer, please report them through the modmail system in the server.*")
embed.add_field(
name="**3. Do not abuse the system**",
value="*This support system is here for you to use when other methods of coping fail. Before coming to our peer supports, please try and use the rest of the server and resources provided to you. You may come directly to the supporters when you are in a crisis (suicidal, homicidal, having strong self harm urges, so on) or when you feel you don't have any good coping skills.*")
embed.add_field(
name="***Crisis situation***",
value="*Due to the nature of the job, we may not always have enough volunteers to keep up with the volume of support requests. This can lead to wait times for those seeking support.If you are in a crisis, please alert higher ups once in a chat with the command ``.crisis``. We will prioritize your ticket and make sure you get the help you need. We can also give you your local suicide hotline number.*")
embed.set_footer(text="The Butterfly Project Team")
embed.set_image(url="https://i.imgur.com/dQk2HRM.png")
m = await channel.send(embed=embed)
await m.add_reaction("✅")
data["ticketSetupMessageId"] = m.id
write_json(data, "config")
``` |
{
"source": "joymax/arango-python",
"score": 3
} |
#### File: arango-python/arango/aql.py
```python
import logging
from .cursor import Cursor
__all__ = ("AQLQuery", "F", "V", "S")
logger = logging.getLogger(__name__)
class Variable(object):
"""
AQL Variable
"""
def __init__(self, value):
self.value = value
self.invert = False
def __invert__(self):
self.invert = True
return self
def __repr__(self):
return "<AQL Variable: {}>".format(self.value)
class Func(object):
"""
AQL Function instance
"""
def __init__(self, name, *args, **kwargs):
self.name = name
self.args = args
def proceed_list(self, l):
"""
Process all arguments of Function
"""
result = []
for item in l:
if isinstance(item, dict):
result.append(self.proceed_dict(item))
continue
if issubclass(type(item), AQLQuery):
result.append(item.build_query())
continue
if issubclass(type(item), Variable):
if item.invert is True:
result.append('"{}"'.format(item.value))
else:
result.append(item.value)
continue
result.append(item)
return result
def proceed_dict(self, d):
"""
Process all arguments which is dict
"""
pairs = []
for key, val in d.items():
if isinstance(val, (list, tuple)):
val = self.proceed_list(val)
elif isinstance(val, dict):
val = self.proceed_dict(val)
elif issubclass(type(val), AQLQuery):
val = val.build_query()
elif issubclass(type(val), Variable):
val = val.value
else:
val = "\"{}\"".format(val)
pairs.append("\"{name}\": {value}".format(
name=key, value=val))
return "{{{}}}".format(", ".join(pairs))
def build_query(self):
"""
Proceed list of arguments
"""
return "{}({})".format(self.name, ", ".join(
self.proceed_list(self.args)))
class FuncFactory(object):
"""
AQL Function factory.
This is ``F`` object in ``arango.aql`` module.
.. code::
from arango.aql import F
c.test.query.over(F.PATH("a", "b", "c")).execute()
Execute query::
FOR obj IN PATH(a, b, c)
RETURN obj
"""
def __getattribute__(self, name):
def f(*args, **kwargs):
return Func(name, *args, **kwargs)
return f
def V(name):
"""
Factory for defining variables in requests.
By default in functions arguments which are dicts
all fields wrapped with double quoutes ``"``.
To specify members of variables defined above
``V`` factory should be used.
.. testcode::
expect = 'MERGE({"user1": u.name}, {"user1": "name"})'
assert F.MERGE(
{"user1": V("u.name")},
{"user1": "name"}).build_query() == expect
"""
return Variable(name)
class AQLQuery(object):
"""
An abstraction layer to generate simple AQL queries.
"""
def __init__(self, connection=None, collection=None, no_cache=False):
self.collection = collection
self.connection = connection
self.let_expr = []
self.for_var = "obj"
self.for_expr = None
self.filter_expr = []
self.collect_expr = []
self.sort_expr = []
self.limit_expr = None, None
self.nested_expr = []
self.return_expr = None
self.bind_vars = {}
self._built_query = None
self._no_cache = no_cache
self.cursor_args = {}
def cursor(self, **kwargs):
"""
Method to provide custom arguments for
:py:attr:`arango.cursor.Cursor` instance. All
keywords arguments except ``bindVars`` may be changed.
"""
self.cursor_args = kwargs
return self
def iter(self, name):
"""
``FOR`` cycle temporary variable,
``variable-name`` in AQL expression::
FOR variable-name IN expression
"""
self.for_var = name
return self
def over(self, expression):
"""
``expression`` in ``FOR`` cycle
.. code::
FOR variable-name IN expression
"""
self.for_expr = expression
return self
def nested(self, *args):
self.nested_expr.extend(args)
return self
def let(self, name, value):
"""
Add ``LET`` operation
.. code::
LET variable-name = expression
"""
self.let_expr.append((name, value))
return self
def filter(self, condition):
"""
Filter query by condition ``condition``.
It's possible to add multiple filter
expressions.
.. code::
FILTER condition
For exmaple code in python
.. code::
c.test.query
.filter("a==b && c==d")
.filter("d == m")
"""
self.filter_expr.append(condition)
return self
def collect(self, *pairs, **kwargs):
"""
Specify ``COLLECT`` operators, it's possible
to use it multiple times
.. code::
COLLECT variable-name = expression
COLLECT variable-name = expression INTO groups
In python
.. code::
c.test.query
.collect("emails", "u.email")
.collect("names", "u.name", into="eml")
.result(emails="eml",
names="names")
"""
if len(pairs) % 2 != 0:
raise ValueError(
"Arguments should be pairs variable-name and expression")
into = kwargs.get("into")
self.collect_expr.append((pairs, into))
return self
def sort(self, *args):
"""
Sort result by criterias from ``args``.
.. code::
query.sort("u.email", "u.first_name DESC")
.sort("u.last_name")
"""
self.sort_expr.extend(args)
return self
def limit(self, count, offset=None):
"""
Limit results with ``count`` items. By default
``offset`` is ``0``.
.. code::
query.limit(100, offset=10)
"""
self.limit_expr = count, offset
return self
def bind(self, **kwargs):
"""
Bind some data to AQL Query. Technically it's
just a proxy to :py:attr:`arango.cursor.Cursor.bind`
method which attach variables to the ``Cursor``.
It's mostly for avoding any kind of query injetions.
.. testcode::
data = c.test.query.filter("obj.name == @name")\\
.bind(name="Jane")\\
.execute().first
assert data != None
assert data.body["name"] == "Jane"
"""
self.bind_vars.update(kwargs)
return self
def result(self, *args, **kwargs):
"""
Expression which will be added as ``RETURN`` of **AQL**.
You can specify:
- single name, like ``q.result("u")``
- named arguments, like ``q.result(users="u", members="m")``
which transform into ``RETURN {users: u, members: m}``
- ``fields`` named argument, like ``q.result(fields={"key-a": "a"})``
to work with names which are not supported by Python syntax.
"""
self.return_expr = args or kwargs.get("fields") or kwargs
return self
def process_expr(self, expr, parentheses=True):
if issubclass(type(expr), Func):
return expr.build_query()
if issubclass(type(expr), AQLQuery):
if parentheses:
return "({})".format(expr.build_query())
return expr.build_query()
return expr
@property
def expr_return(self):
"""
Build expression
"""
return_expr = self.return_expr or self.for_var
if isinstance(return_expr, dict):
pairs = []
for key in sorted(self.return_expr):
expr = self.process_expr(self.return_expr[key])
pairs.append('"{}": {}'.format(key, expr))
return "{{{}}}".format(", ".join(pairs))
elif return_expr and isinstance(return_expr, (tuple, list)):
return_expr = return_expr[0]
if issubclass(type(return_expr), Func):
return_expr = return_expr.build_query()
return return_expr
@property
def expr_for(self):
for_expr = self.for_expr or self.collection
return self.process_expr(for_expr)
@property
def expr_nested(self):
if not self.nested_expr:
return ""
queries = []
for n, expr in enumerate(self.nested_expr):
if not issubclass(type(expr), AQLQuery):
raise ValueError(
"Nested expressions have to be"
"subclass of AQLQuery")
queries.append(expr.build_nested_query(n + 1))
return "\n".join(queries)
@property
def expr_let(self):
pairs = []
for name, expr in self.let_expr:
pairs.append("LET {name} = {expr}".format(
name=name, expr=self.process_expr(expr)))
return "\n".join(pairs)
@property
def expr_filter(self):
conds = []
for cond in self.filter_expr:
conds.append("FILTER {}".format(cond))
return "\n".join(conds)
@property
def expr_collect(self):
collect = []
for pairs, into in self.collect_expr:
exprs = []
into = " INTO {}".format(into) if into else ""
for name, expr in zip(pairs[0::2], pairs[1::2]):
exprs.append("{} = {}".format(name, self.process_expr(expr)))
collect.append("COLLECT {pairs}{into}".format(
pairs=", ".join(exprs), into=into))
return "\n".join(collect)
@property
def expr_sort(self):
if not self.sort_expr:
return ""
return "SORT {}\n".format(", ".join(self.sort_expr))
@property
def expr_limit(self):
count, offset = self.limit_expr
if count is None and offset is None:
return ""
if offset is None:
return "LIMIT {}\n".format(count)
return "LIMIT {}, {}\n".format(offset, count)
def build_nested_query(self, n):
"""
Build simplified query ONLY as nested:
skip all paramas except ``for_var`` and ``for_expr``
"""
for_var = self.for_var
if for_var == "obj":
for_var = "obj{}".format(n)
return "FOR {for_var} IN {for_expr}\n".format(
for_var=for_var,
for_expr=self.expr_for)
def build_query(self):
"""
Build AQL query and return it as
a string. This is good start to
debug generated AQL queries.
"""
if self._built_query is not None and self._no_cache is False:
return self._built_query
query = """
FOR {for_var} IN {for_expr}
{for_nested}{let_expr}{filter_expr}
{collect_expr}{sort_expr}{limit_expr}
RETURN
{return_expr}
""".format(
for_var=self.for_var,
for_expr=self.expr_for,
for_nested=self.expr_nested,
let_expr=self.expr_let,
filter_expr=self.expr_filter,
collect_expr=self.expr_collect,
sort_expr=self.expr_sort,
limit_expr=self.expr_limit,
return_expr=self.expr_return)
logger.debug(query)
self._built_query = query
return query
def execute(self, wrapper=None):
"""
Execute query: create cursor, put binded variables
and return instance of :py:attr:`arango.cursor.Cursor` object
"""
self.cursor_args.update({"bindVars": self.bind_vars})
if wrapper is not None:
self.cursor_args.update({"wrapper": wrapper})
return Cursor(
self.connection, self.build_query(), **self.cursor_args)
def __repr__(self):
return "<AQLQuery: {}>".format(self.build_query())
F = FuncFactory()
```
#### File: arango-python/arango/cursor.py
```python
import logging
from .document import Document
from .exceptions import AqlQueryError
__all__ = ("Cursor",)
logger = logging.getLogger(__name__)
class Cursor(object):
"""
Work with **Cursors** in ArangoDB.
At the moment, it's
common routine to work with **AQL** from this driver.
.. note:: the server will also destroy abandoned
cursors automatically after a certain
server-controlled timeout to
avoid resource leakage.
- ``query`` - contains the query string to be executed (mandatory)
- ``count`` - boolean flag that indicates whether the
number of documents found should be
returned as "count" attribute in the
result set (optional). Calculating the
"count" attribute might have a performance
penalty for some queries so this option
is turned off by default.
- ``batchSize`` - maximum number of result documents to be
transferred from the server to the client in
one roundtrip (optional).
If this attribute is not set, a server-controlled
default value will be used.
- ``bindVars`` - key/value list of bind parameters (optional).
- ``wrapper`` - by default it's ``Document.load``
class, wrap result into
"""
CREATE_CURSOR_PATH = "/_api/cursor"
DELETE_CURSOR_PATH = "/_api/cursor/{0}"
READ_NEXT_BATCH_PATH = "/_api/cursor/{0}"
def __init__(self, connection, query,
count=True, batchSize=None, bindVars=None,
wrapper=Document.load):
self.connection = connection
self.query = query
# boolean flag: show count in results or not
self.count = count
self.wrapper = wrapper
self.batchSize = batchSize
self.bindVars = bindVars if \
isinstance(bindVars, dict) else {}
# current position in dataset
self._position = 0
# ID of Cursor object within databse
self._cursor_id = None
# has more batch or not. By default it's true
# to fetch at least first dataset/response
self._has_more = True
# data from current batch
self._dataset = []
# total count of results, extracted from Database
self._count = 0
def bind(self, bind_vars):
"""
Bind variables to the cursor
"""
self.bindVars = bind_vars
return self
def __iter__(self):
return self
@property
def first(self):
"""
Get first element from resultset
"""
if not self._dataset:
self.bulk()
try:
return self.wrapper(self.connection, self._dataset[0])
except IndexError:
return None
@property
def last(self):
"""
Return last element from ``current bulk``. It's
**NOT** last result in *entire dataset*.
"""
if not self._dataset:
self.bulk()
try:
return self.wrapper(self.connection, self._dataset[-1])
except IndexError:
return None
def next(self):
"""
Iterator though resultset (lazy)
"""
self._position += 1
try:
item = self._dataset.pop(0)
return self.wrapper(self.connection, item)
except IndexError:
if self._has_more:
self.bulk()
return self.next()
raise StopIteration
__next__ = next
def bulk(self):
"""
Getting initial or next bulk of results from Database
"""
if not self._cursor_id:
response = self.connection.post(self.CREATE_CURSOR_PATH, data={
"query": self.query,
"count": self.count,
"batchSize": self.batchSize,
"bindVars": self.bindVars})
self._cursor_id = response.get("id", None)
else:
response = self.connection.put(
self.READ_NEXT_BATCH_PATH.format(self._cursor_id))
if response.status not in [200, 201]:
raise AqlQueryError(
response.data.get("errorMessage", "Unknown error"),
num=response.data.get("errorNum", -1),
code=response.status)
self._has_more = response.get("hasMore", False)
self._count = int(response.get("count", 0))
self._dataset = response["result"] if "result" in response else []
def __len__(self):
if not self._cursor_id:
self.bulk()
return self._count
def __repr__(self):
return "<ArangoDB Cursor Object: {0}>".format(self.query)
```
#### File: arango-python/arango/exceptions.py
```python
__all__ = ("InvalidCollectionId", "CollectionIdAlreadyExist",
"InvalidCollection", "DocumentAlreadyCreated",
"DocumentIncompatibleDataType", "WrongIndexType",
"EmptyFields", "EdgeAlreadyCreated",
"DocumentNotFound", "EdgeNotYetCreated",
"EdgeIncompatibleDataType", "EdgeNotFound",
"DocuemntUpdateError", "AqlQueryError", "DatabaseAlreadyExist",
"DatabaseSystemError")
class DatabaseSystemError(Exception):
"""Raises in case something went completely wrong"""
class InvalidCollection(Exception):
"""Collection should exist and be subclass of Collection object"""
class InvalidCollectionId(Exception):
"""Invalid name of the collection provided"""
class CollectionIdAlreadyExist(Exception):
"""Raise in case you try to rename collection and new name already
available"""
class DocumentAlreadyCreated(Exception):
"""Raise in case document already exist and you try to
call `create` method"""
class DocumentIncompatibleDataType(Exception):
"""Raises in case you trying to update document
with non-dict or non-list data"""
class DocumentNotFound(Exception):
"""Raises in case Document not exist in database"""
class DocuemntUpdateError(Exception):
"""In case can't save document"""
class WrongIndexType(Exception):
"""Raises in case index type is undefined"""
class EmptyFields(Exception):
"""Raises in case no fields for index provied"""
class EdgeAlreadyCreated(Exception):
"""Raises in case Edge have identifier and already created"""
class EdgeNotYetCreated(Exception):
"""Raises in case you try to update Edge which is not created"""
class EdgeIncompatibleDataType(Exception):
"""Raises when you provide new body not None or not dict"""
class EdgeNotFound(Exception):
"""Raised in case Edge not found"""
class AqlQueryError(Exception):
"""In case AQL query cannot be executed"""
def __init__(self, message, num=0, code=0):
self.num = num
self.code = code
self.message = message
super(AqlQueryError, self).__init__(message)
class DatabaseAlreadyExist(Exception):
"""Raises in case database already exists"""
```
#### File: arango-python/arango/mixins.py
```python
__all__ = ("ComparsionMixin", "LazyLoadMixin")
class ComparsionMixin(object):
"""
Mixin to help compare two instances
"""
def __eq__(self, other):
"""
Compare two items
"""
if not issubclass(type(other), self.__class__):
return False
if (self.body == other.body and self._id == other._id and
self._rev == other._rev):
return True
keys = lambda o: [key for key in o.body.keys()
if key not in self.IGNORE_KEYS]
# compare keys only
if keys(self) != keys(other):
return False
# compare bodies but ignore sys keys
if (self.body is not None and other.body is not None):
for key in keys(other):
if self.body.get(key, None) != other.body.get(key, None):
return False
if (self._id is not None and self._rev is not None and
(self._id != other._id or str(self._rev) != str(other._rev))):
return False
return True
class LazyLoadMixin(object):
"""
Mixin to lazily load some objects
before processing some of methods.
Required attributes:
* LAZY_LOAD_HANDLERS - list of methods which should be handled
* lazy_loader - method which should check status of loading
and make decision about loading something
or simply process next method in chain
* _lazy_loaded - property which provide status of the lazy loading,
should be False by default
"""
def __getattribute__(self, name):
"""Fetching lazy document"""
if name in object.__getattribute__(self, "LAZY_LOAD_HANDLERS"):
object.__getattribute__(self, "_handle_lazy")()
return object.__getattribute__(self, name)
def _handle_lazy(self):
if self._lazy_loaded is False:
self._lazy_loaded = True
self.lazy_loader()
```
#### File: arango/tests/tests_base.py
```python
import os
import types
import unittest
from nose import SkipTest
from mock import patch, MagicMock
from arango.clients import Client
from arango.clients.base import RequestsBase
from arango.core import Connection
__all__ = ("TestsBase",)
class TestsBase(unittest.TestCase):
methods = ["post", "put", "get", "delete"]
def setUp(self):
if "NOSMOKE" in os.environ:
raise SkipTest
self.conn = Connection()
for m in self.methods:
setattr(
self, m,
patch.object(self.conn.client, m, MagicMock()))
getattr(self, m).start()
self.url = "{0}{1}".format(self.conn.url, "/document")
def tearDown(self):
for m in self.methods:
try:
getattr(self, m).stop()
except RuntimeError:
pass
self.conn.client = Client
def build_mock_response(self, *args, **kwargs):
return RequestsBase.build_response(
200, "{}", [], "{}")
def response_mock(self, status_code=200, text='', method="get"):
mock_method = lambda self, *a, **kw: RequestsBase.build_response(
status_code, "", [], text)
mock_method = types.MethodType(mock_method, Client)
patcher = patch.object(Client, method, mock_method)
return patcher
```
#### File: arango/tests/tests_core.py
```python
from nose.tools import assert_equal, assert_not_equal, \
assert_true, assert_false, raises
from mock import Mock
from .tests_base import TestsBase
from arango import create
from arango.core import Connection, Response, Resultset
from arango.clients import Client
class TestConnectionInit(TestsBase):
def test_basic(self):
conn = Connection()
assert_equal(conn.prefix, "http://")
assert_equal(conn.url(), "http://localhost:8529")
def test_create_shortcut(self):
assert_equal(repr(Connection().collection), repr(create()))
assert_equal(repr(Connection()), repr(create().connection))
def test_modify(self):
conn = Connection()
conn.is_https = True
conn.port = 1234
assert_equal(conn.url(), "https://localhost:1234")
conn.is_https = False
conn.port = 9922
assert_not_equal(conn.url(), "http://localhost:9922")
def test_repr(self):
conn = Connection()
assert_equal(
str(conn),
"<Connection to ArangoDB (http://localhost:8529)>")
def test_database_prefix(self):
conn = Connection(db="test")
assert_equal(conn.url(), "http://localhost:8529/_db/test")
assert_equal(conn.url(db_prefix=False), "http://localhost:8529")
class TestConnectionRequestsFactory(TestsBase):
methods = ["post", "put", "get", "delete"]
def test_http_methods_factory(self):
conn = Connection()
for method in self.methods:
assert_equal(
getattr(conn, method)("/"),
conn.requests_factory(method=method)("/")
)
def test_http_methods_execution(self):
conn = Connection()
url = "{0}{1}".format(conn.url, "/")
for method in self.methods:
assert_equal(
getattr(conn, method)("/"),
Response(
url,
getattr(Client, method)(url)
)
)
@raises(AttributeError)
def test_wrong_http_method(self):
conn = Connection()
conn.wrong("/")
class TestResponse(TestsBase):
def response(self, status=500, text="text"):
response_mock = Mock()
response_mock.status_code = status
response_mock.text = text
return Response(self.url, response_mock)
def test_unparseable_response(self):
response = self.response()
assert_equal(response.status, 500)
assert_true(response.is_error)
assert_true(
"Can't parse response from ArangoDB: " in
response.message)
def test_repr(self):
response = self.response()
assert_equal(
str(response),
"<Response for {0}: {1}>".format(
repr(response.__dict__),
self.url
)
)
def test_parse_response(self):
response = self.response(
status=200,
text='{"status": 200, "message": "test", "value": 1}'
)
assert_false(response.is_error)
assert_equal(response.status, 200)
assert_equal(
response.get("message"),
"test"
)
assert_equal(
response.get("value"),
1
)
class TestResultset(TestsBase):
def setUp(self):
super(TestResultset, self).setUp()
self.Base = Mock()
self.data = list(range(3))
def iterate_mock(rs):
for item in rs.data:
yield item
def prepare_resultset_mock(rs, args=None, kwargs=None):
response = {"sample": 1}
rs.response = response
rs.count = len(self.data)
data = self.data[rs._offset:]
if rs._limit is not None:
data = self.data[:rs._limit]
rs.data = data
self.Base.iterate = iterate_mock
self.Base.prepare_resultset = prepare_resultset_mock
self.rs = Resultset(base=self.Base)
self.rs.base._cursor = lambda *a, **k: list(range(3))
def test_init(self):
rs = Resultset(self.Base, 1, 2, field=True, field2=False)
assert_equal(rs._args, (1, 2))
assert_equal(rs._kwargs, {"field": True, "field2": False})
def test_response(self):
assert_equal(self.rs.response, None)
test_value = {"value": 1}
self.rs._response = test_value
assert_equal(self.rs.response, test_value)
def test_iter(self):
assert_equal(
[item for item in self.rs],
self.data
)
def test_first_shortcut(self):
assert_equal(
self.rs.first,
self.data[0]
)
def test_first_last_shourcut_exceed(self):
def iterate_mock(arg):
for item in []:
yield item
self.Base.iterate = iterate_mock
rs = Resultset(base=self.Base)
assert_equal(rs.first, None)
assert_equal(rs.last, None)
def test_offset(self):
assert_equal(
len([item for item in self.rs.offset(1)]),
2
)
def test_limit(self):
assert_equal(
len([item for item in self.rs.limit(2)]),
2
)
def test_data(self):
assert_equal(self.rs.data, None)
self.rs.data = 1
assert_equal(self.rs.data, 1)
def test_count_shortcut(self):
assert_equal(self.rs.count, 3)
def test_repr(self):
assert_equal(
str(self.rs),
"<Resultset: {0}>".format(
", ".join([str(i) for i in self.data])
)
)
def test_repr_large_resultset(self):
dataset = list(range(self.rs.max_repr_items * 2))
def iterate_large_dataset(rs):
for item in dataset:
yield item
custom_base = Mock()
custom_base.iterate = iterate_large_dataset
assert_equal(
str(Resultset(base=custom_base)),
"<Resultset: {0}... more>".format(
", ".join([
str(i) for i in dataset[:self.rs.max_repr_items + 1]])
)
)
```
#### File: arango/tests/tests_edge.py
```python
from mock import Mock, patch
from nose.tools import assert_equal, assert_true, raises, \
assert_not_equal
from arango.edge import Edges, Edge
from arango.utils import json
from arango.exceptions import EdgeAlreadyCreated
from .tests_document import TestDocumentBase
__all__ = ("TestEdge",)
class TestEdge(TestDocumentBase):
def setUp(self):
super(TestEdge, self).setUp()
self.c = self.conn.collection.test
def delete_edge_response_mock(self):
return self.response_mock(
status_code=204,
text=json.dumps(dict(
_from="7848004/9289796",
_to="7848004/9355332",
_rev=30967598,
_id=1,
error=False,
code=204
)),
method="delete"
)
def create_edge_response_mock(self, body=None):
body = body if body is not None else {}
defaults = dict(
_from="7848004/9289796",
_to="7848004/9355332",
_rev=30967598,
_id="7848004/9683012",
error=False,
code=201
)
defaults.update(body)
patcher = self.response_mock(
status_code=201,
text=json.dumps(defaults),
method="post"
)
return patcher
def create_edge(self, from_doc, to_doc, body):
patcher = self.create_edge_response_mock(body=body)
patcher.start()
edge = self.c.edges.create(
from_doc,
to_doc,
body)
patcher.stop()
return edge
def test_collection_shortcut(self):
assert_equal(type(self.c.edges), Edges)
def test_edge_create(self):
body = dict(
key="value",
num=1)
doc1 = self.create_document(123, body)
doc2 = self.create_document(234, body)
edge = self.create_edge(doc1, doc2, body)
for k in body.keys():
assert_true(k in edge._body)
@raises(EdgeAlreadyCreated)
def test_edge_create_of_created(self):
body = {"value": "test"}
edge = Edge(self.c)
edge._id = 1
edge.create(None, None, body)
def test_get_edge_fields(self):
body = {
"array": [1, 2, 3],
"options": None,
"number": 5.5,
"tree": {
"sample1": "1",
"sample2": "2"
}
}
doc1 = self.create_document(123, body)
doc2 = self.create_document(234, body)
edge = self.create_edge(doc1, doc2, body)
assert_equal(
edge.get("array", default=None),
[1, 2, 3]
)
for key in body.keys():
assert_true(key in edge.get().keys())
assert_equal(
edge["tree"]["sample1"],
body["tree"]["sample1"]
)
def test_edge_deletion(self):
body = {"value": "test"}
doc1 = self.create_document(123, body)
doc2 = self.create_document(234, body)
edge = self.create_edge(doc1, doc2, body)
patcher = self.delete_edge_response_mock()
patcher.start()
edge._id = 1
edge._rev = 1
edge._body = {}
assert_true(edge.delete())
assert_equal(edge.id, None)
assert_equal(edge.rev, None)
assert_equal(edge.body, None)
assert_equal(edge.from_document, None)
assert_equal(edge.to_document, None)
patcher.stop()
def test_edge_comparsion(self):
body = {"value": 1}
doc1 = self.create_document(123, body)
doc2 = self.create_document(234, body)
edge1 = self.create_edge(doc1, doc2, body)
edge2 = self.create_edge(doc1, doc2, body)
mock_from = Mock()
mock_to = Mock()
with patch("arango.document.Document.lazy_loader"):
edge1._from = mock_from
edge1._to = mock_to
edge2._from = mock_from
edge2._to = mock_to
assert_equal(edge1, edge2)
edge2._to_document = None
edge2._to = Mock()
assert_not_equal(edge1, edge2)
```
#### File: joymax/arango-python/bootstrap.py
```python
import os
import subprocess
import optparse
import sys
try:
from urllib2 import Request, urlopen, HTTPError, URLError
except ImportError:
from urllib.request import Request, urlopen, \
HTTPError, URLError
BOOTSTRAP_MOD = 'bootstrap'
BOOTSTRAP_ETAG = '._' + BOOTSTRAP_MOD + '.etag'
BOOTSTRAP_PY = BOOTSTRAP_MOD + '.py'
BOOTSTRAP_URL = 'https://raw.github.com/jellycrystal/bootstrap/master/bootstrap.py'
DEFAULT_PRE_REQS = ['virtualenv']
def _warn(msg):
sys.stderr.write("Warn: %s\n" % (msg,))
def _err(msg):
sys.stderr.write("Error: %s\n" % (msg,))
sys.exit(1)
def get_pre_reqs(pre_req_txt):
"""Getting list of pre-requirement executables"""
try:
pre_reqs = open(pre_req_txt).readlines()
except IOError:
_warn("Couldn't find pre-reqs file: %s, use default pre-reqs" % pre_req_txt)
# There are no pre-reqs yet.
pre_reqs = DEFAULT_PRE_REQS
for pre_req in pre_reqs:
pre_req = pre_req.strip()
# Skip empty lines and comments
if not pre_req or pre_req.startswith('#'):
continue
yield pre_req
def check_pre_req(pre_req):
"""Check for pre-requirement"""
if subprocess.call(['which', pre_req],
stderr=subprocess.PIPE, stdout=subprocess.PIPE) == 1:
_err("Couldn't find '%s' in PATH" % pre_req)
def provide_virtualenv(ve_target, no_site=True, interpreter=None):
"""Provide virtualenv"""
args = ['--distribute']
if no_site:
args.append('--no-site')
if interpreter is not None:
args.append('--python={0}'.format(interpreter))
if not os.path.exists(ve_target):
subprocess.call(['virtualenv'] + args + [ve_target])
def install_pip_requirements(ve_target, upgrade=False):
"""Install required Python packages into virtualenv"""
version = sys.version_info
prefix = "py"
if hasattr(sys, "pypy_version_info"):
version = sys.pypy_version_info
prefix = "pypy"
elif isinstance(version, tuple):
major, minor, micro, t, b = version
else:
major = version.major
minor = version.minor
micro = version.micro
req_name = "requirements"
extensions = [
"generic",
"txt",
"{0}_{1}".format(prefix, major),
"{0}_{1}{2}".format(prefix, major, minor),
"{0}_{1}{2}{3}".format(prefix, major, minor, micro)
]
pip_path = os.path.join(ve_target, 'bin', 'pip')
for ext in extensions:
filename = "{0}.{1}".format(req_name, ext)
if os.path.exists(filename):
sys.stderr.write("Installing {0}...".format(filename))
call_args = [pip_path, 'install', '-r', filename]
if upgrade:
call_args.append('--upgrade')
try:
if subprocess.call(call_args):
_err("Failed to install requirements")
except OSError:
_err("Something went wrong during installation requirements: " + \
" ".join(call_args))
def pass_control_to_doit(ve_target):
"""Passing further control to doit"""
try:
import dodo
except ImportError:
return
if hasattr(dodo, 'task_bootstrap'):
doit = os.path.join(ve_target, 'bin', 'doit')
subprocess.call([doit, 'bootstrap'])
def do(func, *args, **kwargs):
"""Announce func.__doc__ and run func with provided arguments"""
doc = getattr(func, '__doc__')
if doc is None:
doc = func.__name__
func_args = ', '.join(str(a) for a in args)
func_kwargs = ', '.join("%s=%s" % (k, str(kwargs.get(k)))
for k in kwargs.keys())
msg = "%s... %s %s\n" % (doc, func_args, func_kwargs)
sys.stderr.write(msg)
return func(*args, **kwargs)
def bootstrap(pre_req_txt, ve_target, no_site=True,
upgrade=False, interpreter=None):
ve_target = os.path.normpath(os.path.abspath(ve_target))
os.environ['BOOTSTRAP_VIRTUALENV_TARGET'] = ve_target
for pre_req in do(get_pre_reqs, pre_req_txt):
do(check_pre_req, pre_req)
do(provide_virtualenv, ve_target, no_site=no_site, interpreter=interpreter)
do(install_pip_requirements, ve_target, upgrade=upgrade)
do(pass_control_to_doit, ve_target)
def main(args):
parser = optparse.OptionParser()
parser.add_option("-p", "--pre-requirements", dest="pre_requirements",
default="pre-reqs.txt", action="store", type="string",
help="File with list of pre-reqs")
parser.add_option("-E", "--virtualenv", dest="virtualenv",
default='ve', action="store", type="string",
help="Path to virtualenv to use")
parser.add_option("-P", "--python", dest="interpreter",
default=None, action="store", type="string",
help="Path to Python Interpreter to use")
parser.add_option("-s", "--no-site", dest="no_site",
default=False, action="store_true",
help="Don't use global site-packages on create virtualenv")
parser.add_option("-u", "--upgrade", dest="upgrade",
default=False, action="store_true",
help="Upgrade packages")
options, args = parser.parse_args(args)
bootstrap(
pre_req_txt=options.pre_requirements,
ve_target=options.virtualenv,
no_site=options.no_site,
interpreter=options.interpreter,
upgrade=options.upgrade)
if __name__ == '__main__':
main(sys.argv)
``` |
{
"source": "joymax/django-debug-toolbar",
"score": 2
} |
#### File: debug_toolbar/panels/state.py
```python
import re
from django.db import models
from django.db.models.signals import post_save, post_delete
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from debug_toolbar.panels import DebugPanel
class StateDebugPanel(DebugPanel):
name = "State"
has_content = True
ADD = 1
UPDATE = 2
DELETE = 3
objs_created = 0
objs_deleted = 0
objs_updated = 0
objects_state = {}
prev_objects_state = {}
log_data = []
keys = {ADD: 'created', UPDATE: 'updated', DELETE: 'deleted'}
def nav_title(self):
return _("State")
def nav_subtitle(self):
return mark_safe(_("%(created)d created, "\
"%(updated)d changed, %(deleted)d deleted") % {
'created': self.objs_created,
'deleted': self.objs_deleted,
'updated': self.objs_updated,
})
def title(self):
return _("Objects State")
def url(self):
return ''
def _track_save(cls, *args, **kwargs):
"""Method to track every post_save signal and increase amount of
create/change action for corresponding object"""
is_created = False
action = cls.UPDATE
if 'created' in kwargs and kwargs['created']:
cls.objs_created += 1
is_created = True
action = cls.ADD
else:
cls.objs_updated += 1
cls.log_data.append({
'action': action,
'sender': kwargs['sender'],
'pk': kwargs['instance'].pk,
'is_created': is_created,
})
track_save = classmethod(_track_save)
def _track_delete(cls, *args, **kwargs):
cls.objs_deleted += 1
cls.log_data.append({
'action': cls.DELETE,
'pk': kwargs['instance'].pk,
'sender': kwargs['sender'],
})
track_delete = classmethod(_track_delete)
def _connect(cls):
post_save.connect(cls.track_save)
post_delete.connect(cls.track_delete)
cls.update_objects_state()
connect = classmethod(_connect)
def _update_objects_state(cls):
for md in models.get_models():
model_name = cls.prepare_model_name(md)
try:
cls.objects_state[model_name] = md.objects.count()
except Exception:
pass
update_objects_state = classmethod(_update_objects_state)
def renew_state(self):
cls = self.__class__
cls.prev_objects_state = cls.objects_state.copy()
self.update_objects_state()
cls.objs_created = 0
cls.objs_updated = 0
cls.objs_deleted = 0
cls.log_data = []
def _prepare_model_name(cls, mdl):
return re.sub(r'(\<class|[>\']*)', '', str(mdl)).strip()
prepare_model_name = classmethod(_prepare_model_name)
def _statistic(self):
cls = self.__class__
data = self.log_data
stat = {}
for item in data:
sender = cls.prepare_model_name(item['sender'])
action = item['action']
if not sender in stat:
stat[sender] = dict((key, 0) for key in self.keys.values())
stat[sender][self.keys[action]] += 1
return stat
statistic = property(_statistic)
def merge_states(self, stat, cur, prev):
rv = []
keys = self.keys.values()
for md, cur in cur.iteritems():
prev_amt = prev.get(md, -1)
md_stat = stat.get(md, None)
md_data = {
'prev': prev_amt,
'cur': cur,
'model': md,
}
if md_stat:
[md_data.update({
'have_%s' % key: True,
'%s_amount' % key: md_stat[key],
}) for key in keys if md_stat[key] > 0]
rv.append(md_data)
# sort by C/U/D
[rv.sort(reverse=True, key=lambda obj: obj.get(key, 0)) \
for key in ["%s_amount" % c_key for c_key in keys]]
return rv
def content(self):
context = self.context.copy()
statistic = self.statistic.copy()
context.update({
'objs_created': self.objs_created,
'objs_updated': self.objs_updated,
'objs_deleted': self.objs_deleted,
'stat': statistic,
'objects_state': self.merge_states(statistic, self.objects_state, \
self.prev_objects_state),
})
# we should do it because we save state to
# class, not to particular object instance
self.renew_state()
return render_to_string('debug_toolbar/panels/state.html', context)
# initialize tracking signals
StateDebugPanel.connect()
``` |
{
"source": "joymaxnascimento/python",
"score": 4
} |
#### File: python/hackerrank/find-a-string.py
```python
def count_substring(string, sub_string):
count = 0
string2 = string[string.find(sub_string):len(string)+1]
while sub_string in string2:
count += 1
string2 = string2[string2.find(sub_string)+1:len(string2)]
return count
```
#### File: python/hackerrank/text-wrap.py
```python
import textwrap
def wrap(string, max_width):
return textwrap.fill(string, max_width)
``` |
{
"source": "JoyMbugua/arifa",
"score": 3
} |
#### File: arifa/markets/models.py
```python
from django.db import models
from django.template.defaultfilters import slugify
# Create your models here.
class Market(models.Model):
name = models.CharField(max_length=30)
slug = models.SlugField(unique=True)
def __str__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(Market, self).save(*args, **kwargs)
def get_experts(self):
return self.experts.all()
```
#### File: arifa/markets/tests.py
```python
from django.test import TestCase
from .models import Market
# Create your tests here.
class MarketModelTests(TestCase):
def setUp(self):
self.new_market = Market(name='stocks', slug='stocks')
def test_instance(self):
self.assertTrue(isinstance(self.new_market, Market))
```
#### File: arifa/markets/views.py
```python
from django.shortcuts import render
from django.views.generic import ListView
from .models import Market
from profiles.models import Profile
# Create your views here.
def experts_List(request, market_name):
market = Market.objects.get(slug=market_name)
experts = market.get_experts()
print('EXPERTS', experts)
return render(request, 'markets/experts.html', {"market": market, "experts": experts})
```
#### File: arifa/posts/models.py
```python
from django.db import models
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.contrib.contenttypes.fields import GenericRelation
from comment.models import Comment
class Post(models.Model):
author = models.ForeignKey(get_user_model(),on_delete=models.CASCADE, related_name='posts')
body = models.TextField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
comments = GenericRelation(Comment)
likes = models.ManyToManyField(get_user_model(), related_name='posts_liked', blank=True)
def __str__(self):
return self.body[:20]
def get_absolute_url(self):
return reverse('post_detail', args=[str(self.id)])
def get_likes(self):
return self.likes.all().count()
class BlogPost(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=100)
body = models.TextField()
link = models.URLField()
image_url = models.URLField()
pub_time = models.DateTimeField()
def __str__(self):
return self.title
```
#### File: arifa/profiles/views.py
```python
from django.http.response import HttpResponseRedirect
from django.shortcuts import render, redirect
from django.urls import reverse_lazy
from django.views.generic import DetailView, UpdateView, ListView, CreateView
from .models import Profile, Message, Client
from .forms import ReviewForm, UserProfileForm, MessageForm, ReplyForm
from django.contrib.auth.mixins import LoginRequiredMixin
from users.email import send_client_email
from django.contrib.auth import get_user_model
from django.core.mail import send_mail
from django.contrib.auth import get_user_model
from users.decorators import expert_required
from django.http import JsonResponse
from django.views.decorators.http import require_POST
from django.views.decorators.csrf import csrf_exempt
class ProfileDetailView(LoginRequiredMixin, DetailView):
model = Profile
template_name = 'profiles/profile_detail.html'
def get_context_data(self, **kwargs):
kwargs['form'] = ReviewForm()
return super().get_context_data(**kwargs)
class ProfileUpdateView(LoginRequiredMixin, UpdateView):
model = Profile
fields = ('bio', 'photo', 'market')
template_name = 'profiles/profile_edit.html'
class ProfileListView(LoginRequiredMixin, ListView):
model = Profile
context_object_name = 'profiles_list'
template_name = 'profiles/profiles_list.html'
class ProfileCreateView(LoginRequiredMixin, CreateView):
form_class = UserProfileForm
model = Profile
template_name = 'profiles/profile_reg.html'
def form_valid(self, form):
profile = form.save(commit=False)
profile.user = self.request.user
profile.save()
return super().form_valid(form)
@expert_required
def create_profile(request):
form = UserProfileForm()
if request.method == 'POST':
form = UserProfileForm(request.POST, request.FILES)
user = get_user_model().objects.order_by('-id')[:1]
profile = form.save(commit=False)
for item in user:
user = item
profile.user = user
profile.save()
return redirect('profile', profile.slug)
return render(request, 'profiles/profile_reg.html', {"form": form})
class CompleteProfileView(CreateView):
pass
def ask_expert(request, slug):
form = MessageForm()
profile = Profile.objects.get(slug=slug)
if request.method == 'POST':
form = MessageForm(request.POST)
if form.is_valid():
client = Client(name=request.user.username, email=request.user.email, client_of=profile)
client.save()
f_rom = request.user
to = profile
subject = form.cleaned_data['subject']
message = form.cleaned_data['message']
new_message = Message(to=to, subject=subject, message=message, f_rom=f_rom)
new_message.save()
# send_welcome_email(name, expert.user, expert.user.email)
return redirect('home')
return render(request, 'profiles/ask.html', {"form": form})
# for both or just the expert?
def retrieve_messages(request, slug):
profile = Profile.objects.get(slug=slug)
messages = profile.get_messages()
print("Profile",profile)
print("Messages",messages)
return render(request, 'profiles/dms.html', {"messages": messages, "profile": profile})
def MessageView(request, msg_id):
form = ReplyForm()
message = Message.objects.get(pk=msg_id)
if request.method == 'POST':
print("RESPONSE FROM EXPERT",request.user.is_expert)
form = ReplyForm(request.POST)
if form.is_valid():
reply = form.save(commit=False)
reply.sender = request.user
reply.message = message
reply.save()
return redirect('message', message.id)
return render(request, 'messages/message_details.html', {"message": message, "form": form})
# for both?
def reply_msg(request, msg_id):
form = ReplyForm()
message = Message.objects.get(id=msg_id)
if request.method == 'POST':
form = ReplyForm(request.POST)
if form.is_valid():
sender = request.user.email
name = message.f_rom.name
receiver = message.f_rom.email
subject = message.subject
response = form.cleaned_data['response']
send_client_email(subject, sender, response, name, receiver)
send_mail(subject, response, sender, [receiver],fail_silently=False,)
return redirect('message', message.id)
return render(request, 'email/reply.html', {"form": form, "message": message})
@require_POST
@csrf_exempt
def user_follow(request):
profile_id = request.POST.get('id')
action = request.POST.get('action')
profile = Profile.objects.get(id=profile_id)
if profile_id and action:
try:
to_follow = profile
followr = request.user
print(f"{followr} WANTS TO FOLLOW {to_follow}")
if action == 'follow':
profile.followers.add(followr)
else:
profile.followers.remove(followr)
return JsonResponse({"status": "ok"})
except get_user_model().DoesNotExist:
return JsonResponse({"status": "error"})
return JsonResponse({"status": "error"})
def review(request, slug):
form = ReviewForm()
profile = Profile.objects.get(slug=slug)
if request.method == 'POST':
form = ReviewForm(request.POST)
if request.user in profile.clients.all():
print('REPEAT!!')
return redirect('profile', slug)
if form.is_valid():
print("IT IS VALID")
review = form.save(commit=False)
review.user = request.user
review.reviewed = profile
review.save()
print("REVIEW",review.user, review.reviewed)
return redirect('profile', slug)
return render(request, 'profiles/review.html', {"form": form})
def show_followers(request, slug):
profile = Profile.objects.get(slug=slug)
profile_followers = profile.followers.all()
print("FOLLOWERS",profile_followers)
return render(request, 'profiles/followers.html', {"profile_followers": profile_followers})
def client_messages(request):
user = request.user
messages = Message.objects.filter(f_rom=user).all()
return render(request, 'messages/client_messages.html', {"messages": messages})
def search_profile(request):
search_term = request.GET.get('searchword')
results = Profile.search_profile(search_term)
return render(request, 'profiles/search.html', {"results": results, "term": search_term})
```
#### File: arifa/users/email.py
```python
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
from django.conf import settings
from profiles.models import Profile
def send_client_email(subject, sender, response, name, receiver):
# Creating message subject and sender
subject = subject
sender = sender
body = response
#passing in the context vairables
text_content = render_to_string('email/clientemail.txt',{"name": name, "body": body})
html_content = render_to_string('email/clientemail.html',{"name": name, "body": body })
msg = EmailMultiAlternatives(subject,text_content,sender,[receiver])
msg.attach_alternative(html_content,'text/html')
msg.send()
```
#### File: arifa/users/views.py
```python
from django.shortcuts import render, redirect
from django.views.generic import CreateView, View
from django.urls import reverse_lazy
from .forms import CustomUserCreationForm, ExpertSignUpForm, NewsLetterForm, InvestorSignUpForm
from .models import CustomUser
from django.http import JsonResponse
from django.contrib.auth import login
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.http import HttpResponse
from posts.forms import PostCreateForm
from posts.models import Post
from profiles.models import Profile
class SignUpView(CreateView):
form_class = CustomUserCreationForm
success_url = reverse_lazy('create_profile')
template_name = 'signup/signup.html'
def form_valid(self, form):
name = form.cleaned_data['username']
email = form.cleaned_data['email']
# send_welcome_email(name, email)
return super().form_valid(form)
class InvestorSignUpView(CreateView):
model = CustomUser
form_class = InvestorSignUpForm
template_name = 'signup/registration_form.html'
def get_context_data(self, **kwargs):
kwargs['user_type'] = 'investor'
return super().get_context_data(**kwargs)
def form_valid(self, form):
user = form.save()
login(self.request, user)
return redirect('home')
class ExpertSignUpView(CreateView):
model = CustomUser
form_class = ExpertSignUpForm
template_name = 'signup/registration_form.html'
def get_context_data(self, **kwargs):
kwargs['user_type'] = 'expert'
return super().get_context_data(**kwargs)
def form_valid(self, form):
user = form.save()
login(self.request, user)
return redirect('create_profile')
@login_required
def home(request):
form = PostCreateForm()
user = request.user
profiles = Profile.objects.all()
p = ''
val = False
if user.is_expert:
for profile in profiles:
if profile.user == user:
val = True
p = profile
print(val)
print(p)
posts = Post.objects.all().order_by('-pk')
context = {"form": form, "posts": posts, "profiles": profiles, "val": val, "p": p}
return render(request, 'home.html', context)
def landing_page(request):
form = NewsLetterForm()
return render(request, 'landing.html', {"form": form})
``` |
{
"source": "JoyMbugua/auth-app",
"score": 2
} |
#### File: auth-app/authusers/views.py
```python
from django.shortcuts import render
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status, permissions
import pyotp
import base64
from rest_framework_simplejwt.tokens import RefreshToken
from .serializers import CustomUserSerializer
from .models import CustomUser
from .models import MagicLink
from .utils import send_operations
class UserLogin(APIView):
"""
view for handling login post requests
"""
def post(self, request):
# check if a user with that email exists
email = request.data.get('email')
phone = request.data.get('phone_number')
user = None
try:
if email:
user = CustomUser.objects.get(email=email)
if phone:
users = CustomUser.objects.all()
user = CustomUser.objects.get(phone_number=phone)
except CustomUser.DoesNotExist:
return Response(status=status.HTTP_400_BAD_REQUEST)
send_operations(request, user)
return Response({'status':201, 'userdata': user.username})
class CustomUserCreate(APIView):
"""
Creates a user
"""
permission_classes = (permissions.AllowAny, )
def post(self, request):
serializer = CustomUserSerializer(data=request.data)
if serializer.is_valid():
user = serializer.save()
if user:
user.counter += 1
user.save()
send_operations(request, user)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(status=status.HTTP_400_BAD_REQUEST)
class DashboardView(APIView):
"""
a protected view
"""
permission_classes = (permissions.IsAuthenticated,)
def get(self, request):
return Response(data={"message": "welcome home"}, status=status.HTTP_200_OK)
class VerifyOTPView(APIView):
"""
verifies entered otp and manually generates a jwt token for a user
"""
def get_tokens_for_user(self, user, otp):
"""
generates jwt with otp code
"""
refresh = RefreshToken.for_user(user)
refresh['otp'] = otp
return {
'refresh': str(refresh),
'access': str(refresh.access_token),
}
def post(self, request):
username = request.data.get('username')
print("username",username)
user = CustomUser.objects.get(username=username)
if user is not None:
key = base64.b32encode(user.username.encode())
otp = pyotp.HOTP(key)
if otp.verify(request.data['otpCode'], user.counter):
user.isVerified = True
user.code = otp.at(user.counter)
user.save()
token = self.get_tokens_for_user(user, user.code)
return Response({'status': 200, 'message': 'otp verified', 'token': token})
else:
return Response({'status': 400, 'message': 'wrong otp code'})
return Response({'status': 400, 'message': 'user does not exist'})
class LoginUserFromEmail(APIView):
"""
creates a jwt from url associated with user
"""
def post(self,request):
user = CustomUser.objects.last()
if user is not None:
magic_link = MagicLink.objects.get(user=user)
magic_link_token = magic_link.get_tokens_for_user(user)
return Response({'status': 200, 'message': 'magiclink ok', 'token': magic_link_token})
return Response({'status': 400, 'message': 'user does not exist'})
``` |
{
"source": "JoyMbugua/code.ly",
"score": 2
} |
#### File: code.ly/profiles/views.py
```python
from rest_framework import serializers
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import render
from django.views.generic import ListView, DetailView, UpdateView
from rest_framework.response import Response
from rest_framework.views import APIView
from django.contrib.auth.decorators import login_required
from .models import Profile
from .serializer import ProfileSerializer
class ProfileDetailView(LoginRequiredMixin, DetailView):
model = Profile
template_name = 'profiles/profile_details.html'
class ProfileEditView(UpdateView):
model = Profile
fields = '__all__'
template_name = 'profiles/profile_edit.html'
class ProfileListView(LoginRequiredMixin, ListView):
model = Profile
template_name = 'profiles/profile_list.html'
context_object_name = 'profiles'
def get_queryset(self):
return Profile.objects.all().exclude(user=self.request.user)
@login_required
def my_profile(request, username):
username = request.user.username
profile = Profile.objects.filter(user__username=username)
print("**********************")
print('PROFILE:',profile)
return render(request, 'profiles/user_profile.html', {'user_profile': profile})
class ProfileList(APIView):
def get(self, request):
profiles = Profile.objects.all()
serializers = ProfileSerializer(profiles, many=True)
return Response(serializers.data)
```
#### File: code.ly/projcts/views.py
```python
from django.shortcuts import render
from django.http import JsonResponse
from django.contrib.auth.mixins import LoginRequiredMixin
from django.views.generic import ListView, DetailView
from django.views.generic.edit import DeleteView, UpdateView, CreateView
from django.urls import reverse_lazy
from rest_framework.response import Response
from rest_framework.views import APIView
from .models import Project, Review
from .serializer import ProjectSerializer
class ProjectCreateView(LoginRequiredMixin, CreateView):
model = Project
template_name = 'projcts/new_project.html'
fields = ('title', 'description', 'image', 'link',)
def form_valid(self, form):
form.instance.creator = self.request.user
return super().form_valid(form)
class ReviewCreateView(LoginRequiredMixin, CreateView):
model = Review
fields = ('project', 'design', 'usability', 'content',)
template_name = 'projcts/newreview.html'
def form_valid(self, form):
form.instance.reviewer = self.request.user
return super().form_valid(form)
class ProjectListView(ListView):
model = Project
template_name = 'projcts/projects_list.html'
class ProjectDetailView(DetailView):
model = Project
context_object_name = 'project'
template_name = 'projcts/project_details.html'
class ProjectEditView(UpdateView):
model = Project
fields = ('title', 'description',)
template_name = 'projcts/project_edit.html'
class ProjectDeleteView(DeleteView):
model = Project
template_name = 'projcts/project_delete.html'
success_url = reverse_lazy('project_list')
def project_search(request):
keyword = request.GET.get('searchword')
results = Project.search_project(keyword)
message = f"{keyword}".capitalize()
return render(request, 'projcts/search.html', {"message": message, "results": results})
class ProjectsList(APIView):
def get(self, request):
projects = Project.objects.all()
serializers = ProjectSerializer(projects, many=True)
return Response(serializers.data)
``` |
{
"source": "JoyMbugua/csv_project",
"score": 2
} |
#### File: csv_project/orders/models.py
```python
from django.db import models
from postgres_copy import CopyManager
# Create your models here.
class Order(models.Model):
invoice_no = models.CharField(max_length=15, null=True)
stock_code = models.CharField(max_length=15, null=True)
description = models.CharField(max_length=100, null=True)
quantity = models.IntegerField()
invoice_date = models.DateTimeField(auto_now_add=True, null=True)
customer_id = models.IntegerField(blank=True, null=True)
unit_price = models.DecimalField(max_digits=15, decimal_places=2, null=True)
country = models.CharField(max_length=100, null=True)
objects = CopyManager()
def __str__(self):
return self.description
class Data(models.Model):
file_name = models.FileField(upload_to='csvs')
uploaded = models.DateTimeField(auto_now_add=True)
entered = models.BooleanField(default=False)
class Meta:
verbose_name_plural = "Data"
def __str__(self):
return f"File: {self.pk}"
``` |
{
"source": "JoyMbugua/Fitness-api",
"score": 3
} |
#### File: v1/model/exercise.py
```python
class ExerciseModel:
exercises = []
def __init__(self, name,period, description):
self.exercise_id = len(ExerciseModel.exercises) + 1
self.name = name
self.period = period
self.description = description
def save(self):
data = dict(
exercise_id = self.exercise_id,
name = self.name,
period = self.period,
description = self.description
)
ExerciseModel.exercises.append(data)
return ExerciseModel.exercises
@classmethod
def get_exercise_by_id(cls, id):
for exercise in ExerciseModel.exercises:
if exercise['exercise_id'] == id:
return exercise
# # for a keywords search/search endpoint? # return a boolean
# @classmethod
# def get_exercise_by_name(cls, name):
# for exercise in ExerciseModel.exercises:
# if exercise['name'] == name:
# return exercise
@classmethod
def get_all_exercises(cls):
return ExerciseModel.exercises
def search_exercise(self, name):
search_results = []
for exercise in ExerciseModel.exercises:
if exercise['name'] == self.name:
search_results.append(exercise)
return search_results
else:
return {
"status": "Ok",
"search_results": len(search_results)
}
def delete_ex(self):
return ExerciseModel.exercises.remove(self)
```
#### File: v1/model/workout_routine.py
```python
from .users import UserModels
class WorkoutRoutineModel:
workout_routines = []
def __init__(self, workout, sets, duration_in_mins, complete):
self.id = len(WorkoutRoutineModel.workout_routines) + 1
self.workout = workout
self.sets = sets
self.duration_in_mins = duration_in_mins
self.complete = complete
# self.username = username
def save_workout_routine(self):
data = dict(
id = self.id,
workout = self.workout,
sets = self.sets,
duration_in_mins = self.duration_in_mins,
complete = self.complete,
# username = self.username
)
WorkoutRoutineModel.workout_routines.append(data)
return self.workout_routines
@classmethod
def get_workout_routine_by_id(cls, id):
for workout_routine in WorkoutRoutineModel.workout_routines:
if workout_routine['id'] == id:
return workout_routine
def workout_complete(self, id):
workout = WorkoutRoutineModel.get_workout_routine_by_id(id)
workout['complete'] = True
return workout
def workout_delete(self):
return WorkoutRoutineModel.workout_routines.clear()
def delete_workout(self):
return WorkoutRoutineModel.workout_routines.remove(self)
# def get_all_workout_routines(self, user_id):
# user_workout_routine = []
#
# for workout_routine in WorkoutRoutineModel.workout_routines:
# if workout_routine['user_id'] == self.user_id:
# user_workout_routine.append(workout_routine)
# return user_workout_routine
def get_workout_plans(self):
return WorkoutRoutineModel.workout_routines
```
#### File: Fitness-api/app/__init__.py
```python
from flask import Flask
from config import config_options
from flask_sqlalchemy import SQLAlchemy
from flask_restful import Resource, Api
db = SQLAlchemy()
api = Api()
def create_app(config_name):
app = Flask(__name__)
app.config.from_object(config_options[config_name])
app.url_map.strict_slashes = False
api.init_app(app)
# blueprint
from .auth.v1 import version1 as version1_blueprint
app.register_blueprint(version1_blueprint, url_prefix='/api/v1', strict_slashes=False)
db.init_app(app)
return app
``` |
{
"source": "JoyMbugua/instaclone",
"score": 2
} |
#### File: instaclone/users/email.py
```python
from django.core.mail import EmailMultiAlternatives
from django.template.loader import render_to_string
def send_confirm_email(name,receiver):
# Creating message subject and sender
subject = 'Email Confirmation'
sender = '<EMAIL>'
#passing in the context vairables
text_content = render_to_string('email/confirm.txt',{"name": name})
html_content = render_to_string('email/confirm.html',{"name": name})
msg = EmailMultiAlternatives(subject,text_content,sender,[receiver])
msg.attach_alternative(html_content,'text/html')
msg.send()
```
#### File: instaclone/users/tests.py
```python
from django.test import TestCase
from .models import Image, Profile, Comment, User
class ImageModelTest(TestCase):
def setUp(self):
self.image_name = 'vacay'
self.user = User(username='sam')
self.user.save()
self.profile = Profile(bio="Never say never", user=self.user)
self.new_img = Image(id=40, caption="tembea kenya", profile=self.profile)
def test_instance(self):
self.assertTrue(isinstance(self.new_img, Image))
def test_delete_method(self):
self.new_img.delete_image()
self.assertTrue(self.new_img.DoesNotExist)
def test_update_method(self):
new_caption = 'testing'
self.new_img.update_image(new_caption)
self.assertEqual(self.new_img.caption, 'testing')
class ProfileModelTest(TestCase):
def setUp(self):
bio = 'here to stay'
user = User(username='sam')
user.save()
self.profile = Profile(bio=bio, user=user)
def test_instance(self):
self.assertTrue(isinstance(self.profile, Profile))
def test_profile_images(self):
self.assertTrue(self.profile.profile_images != 0)
class CommentModelTest(TestCase):
def setUp(self):
user = User(username='sam')
body = 'nice!!'
self.comment = Comment(user=user, body=body)
def test_instance(self):
self.assertTrue(isinstance(self.comment, Comment))
```
#### File: instaclone/users/views.py
```python
from django.http.response import Http404, HttpResponseRedirect
from django.shortcuts import render, redirect
from .forms import ProfileUpdateForm, NewImageForm, CommentModelForm
from .models import Profile, Image, Like
from django.contrib.auth.decorators import login_required
from django.views.generic import ListView, DetailView
from .email import send_confirm_email
@login_required
def home(request):
profile = Profile.objects.get(user=request.user)
suggestions = Profile.objects.all()
# check who we are following
users = [user for user in profile.following.all()]
images = []
# get the posts of people we are following
for u in users:
p = Profile.objects.get(user=u)
p_images = p.images.all()
images.append(p_images)
my_posts = profile.profile_images()
images.append(my_posts)
return render(request, 'insta/home.html', {'profile': profile, 'images': images, 'suggestions': suggestions})
@login_required
def profile(request):
profile = Profile.objects.get(user=request.user)
return render(request, 'insta/profile.html', {"profile": profile})
@login_required
def update_profile(request):
form = ProfileUpdateForm(request.POST, request.FILES)
return render(request, 'insta/update_profile.html', {"form": form})
@login_required
def upload(request):
current_user = request.user
if request.method == 'POST':
form = NewImageForm(request.POST, request.FILES)
if form.is_valid():
image = form.save(commit=False)
image.profile = current_user.profile
image.save()
return redirect('home')
else:
form = NewImageForm()
return render(request, 'insta/upload.html', {"form": form})
@login_required
def show_image(request, id):
image = Image.objects.get(id=id)
profile = Profile.objects.get(user=request.user)
if request.method == 'POST':
form = CommentModelForm(request.POST)
if form.is_valid():
comment = form.save(commit=False)
comment.user = request.user
image_id = request.POST.get('image_id')
comment.image = Image.objects.get(id=image_id)
comment.save()
form = CommentModelForm()
return redirect('show_image', image.id)
else:
form = CommentModelForm()
return render(request, 'insta/image_details.html', {"image": image, "profile": profile, "form": form})
@login_required
def image_like(request):
user = request.user
if request.method == 'POST':
image_id = request.POST.get('image_id')
image_obj = Image.objects.get(id=image_id)
# profile = Profile.objects.get(user=user)
if user in image_obj.Likes.all():
image_obj.Likes.remove(user)
else:
image_obj.Likes.add(user)
like, created = Like.objects.get_or_create(user=user, image_id=image_id)
if not created:
if like.value == 'Like':
like.value = 'Unlike'
else:
like.value = 'Like'
else:
like.value = 'Like'
image_obj.save()
like.save()
return redirect('home')
@login_required
def search_results(request):
keyword = request.GET.get('image')
images = Image.search_by_term(keyword)
message = f"{keyword}".capitalize()
return render(request, 'insta/search.html', {"message": message, "images": images})
@login_required
def follow_unfollow(request):
if request.method == 'POST':
my_profile = Profile.objects.get(user=request.user)
pk = request.POST.get('profile_pk')
obj = Profile.objects.get(pk=pk)
if obj.user in my_profile.following.all():
my_profile.following.remove(obj.user)
else:
my_profile.following.add(obj.user)
return redirect(request.META.get('HTTP_REFERER'))
return redirect('newprofile')
class ProfileListView(ListView):
model = Profile
template_name = 'profiles/peoplelist.html'
context_object_name = 'profiles'
def get_queryset(self):
return Profile.objects.all().exclude(user=self.request.user)
class ProfileDetailView(DetailView):
model = Profile
template_name = 'profiles/detail.html'
def get_object(self, **kwargs):
pk = self.kwargs.get('pk')
view_profile = Profile.objects.get(pk=pk)
return view_profile
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
view_profile = self.get_object()
my_profile = Profile.objects.get(user=self.request.user)
if view_profile.user in my_profile.following.all():
follow = True
else:
follow = False
context['follow'] = follow
return context
def confirmation(request):
user = request.user
email = user.email
send_confirm_email(user, email)
HttpResponseRedirect('login')
``` |
{
"source": "JoyMbugua/jirani-app",
"score": 3
} |
#### File: jirani-app/profiles/tests.py
```python
from django.test import TestCase
from .models import Profile
from django.contrib.auth import get_user_model
from locations.models import Neighborhood, Admin
class ProfileModelTest(TestCase):
def setUp(self):
self.user1 = get_user_model().objects.create_user(username = 'testuser', email = '<EMAIL>', password='<PASSWORD>')
self.admin1 = Admin(person=self.user1)
self.admin1.save()
self.area1 = Neighborhood.objects.create(name='area51', location='nairobi', admin=self.admin1)
self.area1.save()
self.new_profile = Profile(user = self.user1, bio='here to test', neighborhood=self.area1)
def test_profile_instance(self):
self.assertTrue(isinstance(self.new_profile, Profile))
def test_string_method(self):
self.assertEqual(str(self.new_profile), self.user1.username)
``` |
{
"source": "joy-mdy-geo/ufc",
"score": 3
} |
#### File: joy-mdy-geo/ufc/main.py
```python
from flask import Flask, render_template, jsonify, request
import controller
import pyperclip
app = Flask(__name__)
@app.route('/')
@app.route('/index')
def index():
return render_template('index.html')
@app.route('/detect')
def detect():
return render_template('detect.html')
@app.route('/about')
def about():
return render_template('about.html')
@app.route('/faq')
def faq():
return render_template('faq.html')
@app.route('/_convert', methods=["POST"])
def convert():
from_text = request.form['from_text']
from_encoding = request.form['from_encoding']
to_encoding = request.form['to_encoding']
res = controller.convert(from_encoding, to_encoding, from_text)
return jsonify(result=res)
@app.route('/_copy', methods=["POST"])
def copy():
txt = request.form['to_text']
pyperclip.copy(txt)
return jsonify(result=txt)
@app.route('/_detect', methods=["POST"])
def detecttt():
txt = request.form['text']
txt = controller.detect(txt)
return jsonify(result=txt)
if __name__ == "__main__":
app.run(debug=True)
``` |
{
"source": "JoyMonteiro/climt-1",
"score": 2
} |
#### File: _components/dcmip/component.py
```python
from sympl import (
DiagnosticComponent, initialize_numpy_arrays_with_properties,
get_constant)
import logging
import numpy as np
try:
from . import _dcmip
except ImportError as error:
logging.warning(
'Import Failed. DCMIP initial conditions will not be available.')
print(error)
class DcmipInitialConditions(DiagnosticComponent):
"""
Climt interface to the DCMIP initial conditions.
Currently only provides interfaces to tests 4 and 5.
"""
input_properties = {
'latitude': {
'dims': ['*'],
'units': 'degrees_north',
},
'longitude': {
'dims': ['*'],
'units': 'degrees_east',
},
'air_pressure': {
'dims': ['mid_levels', '*'],
'units': 'Pa',
},
'atmosphere_hybrid_sigma_pressure_a_coordinate_on_interface_levels': {
'dims': ['interface_levels', '*'],
'units': 'dimensionless',
'alias': 'ak'
},
'atmosphere_hybrid_sigma_pressure_b_coordinate_on_interface_levels': {
'dims': ['interface_levels', '*'],
'units': 'dimensionless',
'alias': 'bk'
},
}
diagnostic_properties = {
'eastward_wind': {
'dims': ['mid_levels', '*'],
'units': 'm s^-1',
},
'northward_wind': {
'dims': ['mid_levels', '*'],
'units': 'm s^-1',
},
'air_temperature': {
'dims': ['mid_levels', '*'],
'units': 'degK',
},
'surface_geopotential': {
'dims': ['*'],
'units': 'm^2 s^-2',
},
'surface_air_pressure': {
'dims': ['*'],
'units': 'Pa',
},
'specific_humidity': {
'dims': ['mid_levels', '*'],
'units': 'g/g',
},
'air_pressure': {
'dims': ['mid_levels', '*'],
'units': 'Pa',
},
'air_pressure_on_interface_levels': {
'dims': ['interface_levels', '*'],
'units': 'Pa',
},
}
def __init__(self,
condition_type='baroclinic_wave',
add_perturbation=True,
moist=False,
**kwargs):
"""
Initialize the DCMIP module.
Args:
condition_type (str, optional):
The type of initial conditions desired. Can be
one of :code:`'baroclinic_wave'` or
:code:`'tropical_cyclone'`.
add_perturbation (bool, optional):
Whether a perturbation must be added. Only applies
to the baroclinic wave test.
"""
if condition_type not in ['baroclinic_wave', 'tropical_cyclone']:
raise ValueError("type_of_output has to be one \
of 'baroclinic_wave' or 'tropical_cyclone'")
if condition_type is 'tropical_cyclone' and moist is False:
raise ValueError("moist_simulation must be True for tropical cyclone test")
self._condition_type = condition_type
self._add_perturbation = add_perturbation
self._moist = moist
super(DcmipInitialConditions, self).__init__(**kwargs)
def array_call(self, state):
toa_pressure = get_constant('top_of_model_pressure', 'Pa')
rd = get_constant('gas_constant_of_dry_air', 'J kg^-1 K^-1')
cpd = get_constant('heat_capacity_of_dry_air_at_constant_pressure', 'J kg^-1 K^-1')
longitude = np.radians(state['longitude'])
latitude = np.radians(state['latitude'])
diagnostics = initialize_numpy_arrays_with_properties(
self.diagnostic_properties, state, self.input_properties
)
if self._condition_type is 'baroclinic_wave':
u, v, t, q, p_surface, phi_surface = _dcmip.get_baroclinic_wave_ics(
state['air_pressure'],
longitude, latitude,
perturb=self._add_perturbation,
moist_sim=self._moist)
elif self._condition_type is 'tropical_cyclone':
u, v, t, q, p_surface, phi_surface = _dcmip.get_tropical_cyclone_ics(
state['air_pressure'],
longitude, latitude,
perturb=self._add_perturbation,
moist_sim=self._moist)
diagnostics['eastward_wind'][:] = u
diagnostics['northward_wind'][:] = v
diagnostics['air_temperature'][:] = t
diagnostics['surface_geopotential'][:] = phi_surface
diagnostics['specific_humidity'][:] = q
diagnostics['surface_air_pressure'][:] = p_surface
p_interface = (
state['ak'] + state['bk']*(p_surface - toa_pressure))
delta_p = p_interface[1:, :] - p_interface[:-1, :]
rk = rd/cpd
diagnostics['air_pressure_on_interface_levels'][:] = p_interface
diagnostics['air_pressure'][:] = (
(p_interface[1:, :]**(rk+1) - p_interface[:-1, :]**(rk+1)) / (
(rk+1) * delta_p
)
) ** (1./rk)
return diagnostics
``` |
{
"source": "JoyMonteiro/MetPy",
"score": 2
} |
#### File: metpy/plots/cartopy_utils.py
```python
import cartopy.feature as cfeat
import cartopy.io.shapereader as shpreader
from ..cbook import get_test_data
class USCountiesFeature(cfeat.NaturalEarthFeature):
"""A simple interface to US County shapefiles."""
def __init__(self, scale, **kwargs):
"""Create USCountiesFeature instance."""
super(USCountiesFeature, self).__init__('', 'us_counties', scale, **kwargs)
def geometries(self):
"""Return an iterator of (shapely) geometries for this feature."""
# Ensure that the associated files are in the cache
fname = 'us_counties_{}'.format(self.scale)
for extension in ['.dbf', '.shx']:
get_test_data(fname + extension)
path = get_test_data(fname + '.shp', as_file_obj=False)
return iter(tuple(shpreader.Reader(path).geometries()))
def with_scale(self, new_scale):
"""
Return a copy of the feature with a new scale.
Parameters
----------
new_scale
The new dataset scale, i.e. one of '500k', '5m', or '20m'.
Corresponding to 1:500,000, 1:5,000,000, and 1:20,000,000
respectively.
"""
return USCountiesFeature(new_scale, **self.kwargs)
USCOUNTIES = USCountiesFeature('20m', facecolor='None')
``` |
{
"source": "JoyMonteiro/parSPectral",
"score": 3
} |
#### File: parSPectral/pspec/forcing.py
```python
from numpy import arange, pi, zeros, exp
#from numpy.fft.helper import ifftshift
from scipy import *
from spectralTransform import specTrans2d;
class specForcing(object):
# Forcing is between kmin and kmax
# correlation function = 0(white noise) and 1(infinitely correlated)
# default value = 0.5 (Ref. Maltrud (1990)).
def __init__(self, numPointsX, numPointsY,kmin=20.,kmax=30.,
magnitude=1e4, correlation=0.5, length = 2*pi,
xType='Fourier', yType='Fourier'):
self.xn = numPointsX;
self.yn = numPointsY;
self.xType = xType;
self.yType = yType;
self.kmin = kmin;
self.kmax = kmax;
self.magnitude = magnitude;
self.corr = correlation;
self.trans = specTrans2d(numPointsX, numPointsY, xType, yType);
#Prepare the wavenumber arrays
self.kxx = (2*pi/length)*concatenate((arange(0,numPointsX/2),arange(-numPointsX/2,0)));
self.kyy = (2*pi/length)*concatenate((arange(0,numPointsY/2),arange(-numPointsY/2,0)));
# Forcing is defined in wavenumber space and later transformed to real space
def forcingFn(self,F0):
[kx,ky]=meshgrid(self.kxx,self.kyy);
# Forcing defined as a shell in wavenumber space
A = zeros((self.yn,self.xn));
A[sqrt(kx**2+ky**2) < self.kmax] = 1.0;
A[sqrt(kx**2+ky**2) < self.kmin] = 0.0;
signal = self.magnitude * A * exp(rand(self.yn,self.xn)*1j*2*pi);
# Markovian forcing
F = (sqrt(1-self.corr**2))*signal + self.corr*F0
self.trans.invTrans(F);
return self.trans.outArr.real.copy();
```
#### File: parSPectral/pspec/inversion.py
```python
import numpy as np;
from numpy import arange, pi, zeros, exp;
from spectralTransform import specTrans2d;
from pylab import *;
class specInv(object):
def __init__(self, numPointsX, numPointsY, length=2*pi,
xType='Fourier', yType='Fourier'):
self.xn = numPointsX;
self.yn = numPointsY;
self.trans = specTrans2d(numPointsX, numPointsY, xType, yType);
#Prepare the wavenumber arrays
kxx = 2*pi*(concatenate(( arange(0,numPointsX/2),\
arange(-numPointsX/2,0)))) /length;
kyy = 2*pi*(concatenate((arange(0,numPointsY/2),\
arange(-numPointsY/2,0)))) /length;
[self.kx,self.ky] = meshgrid(kxx,kyy);
def laplacian(self, field):
self.trans.fwdTrans(field);
temp = self.trans.intArr;
self.temp1 = self.trans.intArr.copy();
delsq = -(self.kx**2+self.ky**2);
#delsq[0,0] = 1;
# Filter
temp[sqrt(self.kx**2+self.ky**2) > min(self.xn, self.yn)/2.5] = 0;
temp *= delsq;
self.trans.invTrans();
return self.trans.outArr.real.copy();
def invLaplacian(self, field):
self.trans.fwdTrans(field);
temp = self.trans.intArr;
delsq = -(self.kx**2+self.ky**2);
delsq[0,0] = 1;
temp /= delsq;
# Filter
temp[sqrt(self.kx**2+self.ky**2) > min(self.xn, self.yn)/2.5] = 0;
self.trans.invTrans();
return self.trans.outArr.real.copy();
```
#### File: parSPectral/pspec/sphTrans.py
```python
import numpy as np
import shtns
class Spharmt(object):
"""
wrapper class for commonly used spectral transform operations in
atmospheric models. Provides an interface to shtns compatible
with pyspharm (pyspharm.googlecode.com).
"""
def __init__(self,nlons,nlats,ntrunc,rsphere,gridtype='gaussian'):
"""initialize
nlons: number of longitudes
nlats: number of latitudes"""
self._shtns = shtns.sht(ntrunc, ntrunc, 1, \
shtns.sht_orthonormal+shtns.SHT_NO_CS_PHASE)
if gridtype == 'gaussian':
#self._shtns.set_grid(nlats,nlons,shtns.sht_gauss_fly|shtns.SHT_PHI_CONTIGUOUS,1.e-10)
self._shtns.set_grid(nlats,nlons,shtns.sht_quick_init|shtns.SHT_PHI_CONTIGUOUS,1.e-10)
elif gridtype == 'regular':
self._shtns.set_grid(nlats,nlons,shtns.sht_reg_dct|shtns.SHT_PHI_CONTIGUOUS,1.e-10)
self.lats = np.arcsin(self._shtns.cos_theta)
self.lons = (2.*np.pi/nlons)*np.arange(nlons)
self.nlons = nlons
self.nlats = nlats
self.ntrunc = ntrunc
self.nlm = self._shtns.nlm
self.degree = self._shtns.l
self.lap = -self.degree*(self.degree+1.0).astype(np.complex)
self.invlap = np.zeros(self.lap.shape, self.lap.dtype)
self.invlap[1:] = 1./self.lap[1:]
self.rsphere = rsphere
self.lap = self.lap/self.rsphere**2
self.invlap = self.invlap*self.rsphere**2
def grdtospec(self,data):
"""compute spectral coefficients from gridded data"""
return self._shtns.analys(data)
def spectogrd(self,dataspec):
"""compute gridded data from spectral coefficients"""
return self._shtns.synth(dataspec)
def getuv(self,vrtspec,divspec):
"""compute wind vector from spectral coeffs of vorticity and divergence"""
return self._shtns.synth((self.invlap/self.rsphere)*vrtspec, (self.invlap/self.rsphere)*divspec)
def getvrtdivspec(self,u,v):
"""compute spectral coeffs of vorticity and divergence from wind vector"""
vrtspec, divspec = self._shtns.analys(u, v)
return self.lap*self.rsphere*vrtspec, self.lap*self.rsphere*divspec
def getgrad(self,divspec):
"""compute gradient vector from spectral coeffs"""
vrtspec = np.zeros(divspec.shape, dtype=np.complex)
u,v = self._shtns.synth(vrtspec,divspec)
return u/self.rsphere, v/self.rsphere
```
#### File: parSPectral/pspec/test2DTurb.py
```python
from pylab import *;
import diffusion;
import forcing;
import pSpectral;
import RungeKutta;
import inversion;
ion();
Nx = 256*4;
Ny = 256*4;
xx = linspace(0,2*pi-2*pi/Nx,Nx);
yy = linspace(0,2*pi-2*pi/Ny,Ny);
[x,y]= meshgrid(xx,yy);
a = sin(x)+ cos(y)+ sin(2*x)* cos(2*y);
#a = sin(50*x)+cos(40*y)*sin(50*x);
diff = diffusion.specDiffusion(Nx,Ny, alpha=0, nu=1e-15, order = 8.);
p = pSpectral.parSpectral(Nx,Ny);
inv = inversion.specInv(Nx,Ny);
def dfdt(t,f, args=None):
omega = p.laplacian(f);
rhs = -p.jacobian(f,omega) ;
out = inv.invLaplacian(rhs);
return out;
def diffusion(dt, f):
#omega = p.laplacian(f);
out = diff.diffusionFn(dt, f);
#print amax(abs(omega - out));
#return inv.invLaplacian(out);
return (out);
delta = 2*pi/max(Nx,Ny);
stepfwd = RungeKutta.RungeKutta4(delta,dfdt, diffusion ,1);
t=0;
f=a;
dt=0.02;
while (t<50):
tnew,fnew = stepfwd.integrate(t,f,dt);
t = tnew;
f = fnew;
imshow(p.laplacian(fnew));
colorbar();
pause(1e-3);
clf();
```
#### File: parSPectral/pspec/testGillDim.py
```python
from pylab import *
import diffusion
import forcing
import pSpectral
import RungeKutta
import inversion
ion()
Nx = 256
Ny = 128
lengthX = 10*pi
lengthY = 4.
tx = linspace(-pi,pi-2*pi/Nx,Nx)
ty = linspace(0,pi,Ny)
xx = (lengthX)*(tx)/(2*pi)
yy = (lengthY/2.)*cos(ty)
[x,y] = meshgrid(xx,yy)
diff = diffusion.specDiffusion(Nx,Ny, alpha=0, nu=1e-10);
p = pSpectral.parSpectral(Nx,Ny, lengthX, lengthY, 'Fourier', 'Cosine')
def dfdt(t,f, args=None):
[u,v,n] = f
u1 = -g*p.partialX(n) +f0*v -epsilon*u
v1 = -g*p.ChebMatY(n) -f0*u -epsilon*v
n1 = -n0 -(H+n)*( p.partialX(u) + p.ChebMatY(v)) - epsilon*n
return u1,v1,n1
def diffusion(dt,f):
[u,v,n] = f
u1 = diff.diffusionFn(dt,u)
v1 = diff.diffusionFn(dt,v)
n1 = diff.diffusionFn(dt,n)
return u1,v1,n1
delta = min(2*pi/Nx, 1./Ny)
epsilon = 0.005
stepfwd = RungeKutta.RungeKutta4(delta,dfdt, diffusion ,1)
t=0
u0 = zeros((Ny,Nx))
v0 = zeros((Ny,Nx))
#n0 = 0.001*exp(- (x**2/0.1 + y**2/0.1))
L=2.
F = cos(pi/2./L * x)
F[abs(x)>L]=0
n0 =0.001* F * exp(-10*y**2/4.)
# n is the height perturbation
u = u0
v = v0
n = n0
f0 = y
g=1
H=0.01
c = sqrt(g*H)
dt=0.5*delta/c
ii = 0
while (t<1000):
tnew,[unew,vnew,nnew] = stepfwd.integrate(t,[u,v,n],dt)
t = tnew
[u,v,n] = [unew,vnew,nnew]
ii = ii+1
if mod(ii,10)==0:
clf();
#pcolormesh(x,y,u)
#pcolormesh(x,y,v)
contourf(x,y,n)
#contour(x,y,n,10,colors='k')
#xlim(-pi,pi)
#ylim(-pi,pi)
colorbar()
pause(1e-3)
```
#### File: parSPectral/pspec/testGillNonDim.py
```python
from pylab import *
import diffusion
import forcing
import pSpectral
import RungeKutta
import inversion
ion()
Nx = 256
Ny = 128
tx = linspace(-pi,pi-2*pi/Nx,Nx)
ty = linspace(0,pi,Ny)
lengthX = 10*pi
lengthY = 8.
xx = (lengthX)*(tx)/(2*pi)
yy = (lengthY/2.)*cos(ty)
[x,y] = meshgrid(xx,yy)
diff = diffusion.specDiffusion(Nx,Ny, alpha=0, nu=1e-6, order=8., length =
min(lengthX,lengthY));
p = pSpectral.parSpectral(Nx,Ny, lengthX, lengthY,'Fourier', 'Cosine')
def dfdt(t,f, args=None):
[u,v,pr] = f
u1 = -p.partialX(pr) + y*v - epsilon*u
v1 = -p.ChebMatY(pr) - y*u - epsilon*v
pr1 = (-Q - p.partialX(u) -p.ChebMatY(v))*tau -epsilon*pr
return u1,v1,pr1
def diffusion(dt,f):
[u,v,pr] = f
u1 = diff.diffusionFn(dt,u)
v1 = diff.diffusionFn(dt,v)
pr1 = diff.diffusionFn(dt,pr)
return u1, v1, pr1
delta = min(lengthX/Nx, lengthY/Ny)
stepfwd = RungeKutta.RungeKutta4(delta,dfdt, diffusion ,1)
L = 2.0
epsilon = 0.1
tau = 1.
t=0
F = cos(pi/2./L * x)
F[abs(x)>L]=0
Q = -F * exp(-10*y**2/4.)
#Q = exp(-10*y**2/4.-1*x**2/4.)
u0 = zeros((Ny,Nx))
v0 = zeros((Ny,Nx))
u = u0
v = v0
pr = -Q
c=1.
dt=0.5*delta/c
dt=0.003
ii = -1
while (t<10000):
tnew,[unew,vnew,prnew] = stepfwd.integrate(t,[u,v,pr],dt)
t = tnew
[u,v,pr] = [unew,vnew,prnew]
ii = ii+1
if mod(ii,100)==0:
clf();
#pcolormesh(x,y,u)
#pcolormesh(x,y,v)
contourf(x,y,pr,10)
colorbar()
pause(1e-3)
```
#### File: parSPectral/pspec/testIntegrator.py
```python
from pylab import *;
import RungeKutta;
def dummy(t, f, args):
return zeros(f.shape);
def dummyVel(f, args):
return 1.3;
u = zeros((10,10));
v = zeros((10,10));
z = zeros((10,10));
delta = 0.1;
stepfwd = RungeKutta.RungeKutta4(delta, dummy, dummy, dummyVel);
tnew, fnew = stepfwd.integrate(0, [u,v,z],0.1);
print tnew;
``` |
{
"source": "JoyMonteiro/sympl",
"score": 3
} |
#### File: sympl/_core/combine_properties.py
```python
from .exceptions import InvalidPropertyDictError
from .tracers import get_tracer_input_properties
from .units import units_are_compatible
def combine_dims(dims1, dims2):
"""
Takes in two dims specifications and returns a single specification that
satisfies both, if possible. Raises an InvalidPropertyDictError if not.
Parameters
----------
dims1 : iterable of str
dims2 : iterable of str
Returns
-------
dims : iterable of str
Raises
------
InvalidPropertyDictError
If the two dims specifications cannot be combined
"""
if dims1 == dims2:
return dims1
dims_out = []
dims1 = set(dims1)
dims2 = set(dims2)
dims1_wildcard = '*' in dims1
dims1.discard('*')
dims2_wildcard = '*' in dims2
dims2.discard('*')
unmatched_dims = set(dims1).union(dims2).difference(dims_out)
shared_dims = set(dims2).intersection(dims2)
if dims1_wildcard and dims2_wildcard:
dims_out.insert(0, '*') # either dim can match anything
dims_out.extend(unmatched_dims)
elif not dims1_wildcard and not dims2_wildcard:
if shared_dims != set(dims1) or shared_dims != set(dims2):
raise InvalidPropertyDictError(
'dims {} and {} are incompatible'.format(dims1, dims2))
dims_out.extend(unmatched_dims)
elif dims1_wildcard:
if shared_dims != set(dims2):
raise InvalidPropertyDictError(
'dims {} and {} are incompatible'.format(dims1, dims2))
dims_out.extend(unmatched_dims)
elif dims2_wildcard:
if shared_dims != set(dims1):
raise InvalidPropertyDictError(
'dims {} and {} are incompatible'.format(dims1, dims2))
dims_out.extend(unmatched_dims)
return dims_out
def combine_component_properties(component_list, property_name, input_properties=None):
property_list = []
for component in component_list:
property_list.append(getattr(component, property_name))
if property_name == 'input_properties' and getattr(component, 'uses_tracers', False):
tracer_dims = list(component.tracer_dims)
if 'tracer' not in tracer_dims:
raise InvalidPropertyDictError(
"tracer_dims must include a 'tracer' dimension indicating "
"tracer number"
)
tracer_dims.remove('tracer')
property_list.append(get_tracer_input_properties(getattr(component, 'prepend_tracers', ()), tracer_dims))
return combine_properties(property_list, input_properties)
def combine_properties(property_list, input_properties=None):
if input_properties is None:
input_properties = {}
return_dict = {}
for property_dict in property_list:
for name, properties in property_dict.items():
if name not in return_dict:
return_dict[name] = {}
return_dict[name].update(properties)
if 'dims' not in properties.keys():
if name in input_properties.keys() and 'dims' in input_properties[name].keys():
return_dict[name]['dims'] = input_properties[name]['dims']
else:
raise InvalidPropertyDictError()
elif not units_are_compatible(
properties['units'], return_dict[name]['units']):
raise InvalidPropertyDictError(
'Cannot combine components with incompatible units '
'{} and {} for quantity {}'.format(
return_dict[name]['units'],
properties['units'], name))
else:
if 'dims' in properties.keys():
new_dims = properties['dims']
elif name in input_properties.keys() and 'dims' in input_properties[name].keys():
new_dims = input_properties[name]['dims']
else:
raise InvalidPropertyDictError()
try:
dims = combine_dims(return_dict[name]['dims'], new_dims)
return_dict[name]['dims'] = dims
except InvalidPropertyDictError as err:
raise InvalidPropertyDictError(
'Incompatibility between dims of quantity {}: {}'.format(
name, err.args[0]))
return return_dict
```
#### File: sympl/_core/get_np_arrays.py
```python
import numpy as np
from .exceptions import InvalidStateError
from .wildcard import get_wildcard_matches_and_dim_lengths, flatten_wildcard_dims
def get_numpy_arrays_with_properties(state, property_dictionary):
out_dict = {}
wildcard_names, dim_lengths = get_wildcard_matches_and_dim_lengths(
state, property_dictionary)
# Now we actually retrieve output arrays since we know the precise out dims
for name, properties in property_dictionary.items():
ensure_quantity_has_units(state[name], name)
try:
quantity = state[name].to_units(properties['units'])
except ValueError:
raise InvalidStateError(
'Could not convert quantity {} from units {} to units {}'.format(
name, state[name].attrs['units'], properties['units']
)
)
out_dims = []
out_dims.extend(properties['dims'])
has_wildcard = '*' in out_dims
if has_wildcard:
i_wildcard = out_dims.index('*')
out_dims[i_wildcard:i_wildcard+1] = wildcard_names
out_array = get_numpy_array(
quantity, out_dims=out_dims, dim_lengths=dim_lengths)
if has_wildcard:
out_array = flatten_wildcard_dims(
out_array, i_wildcard, i_wildcard + len(wildcard_names))
if 'alias' in properties.keys():
out_name = properties['alias']
else:
out_name = name
out_dict[out_name] = out_array
return out_dict
def get_numpy_array(data_array, out_dims, dim_lengths):
"""
Gets a numpy array from the data_array with the desired out_dims, and a
dict of dim_lengths that will give the length of any missing dims in the
data_array.
"""
if len(data_array.values.shape) == 0 and len(out_dims) == 0:
return data_array.values # special case, 0-dimensional scalar array
else:
missing_dims = [dim for dim in out_dims if dim not in data_array.dims]
for dim in missing_dims:
data_array = data_array.expand_dims(dim)
numpy_array = data_array.transpose(*out_dims).values
if len(missing_dims) == 0:
out_array = numpy_array
else: # expand out missing dims which are currently length 1.
out_shape = [dim_lengths.get(name, 1) for name in out_dims]
if out_shape == list(numpy_array.shape):
out_array = numpy_array
else:
out_array = np.empty(out_shape, dtype=numpy_array.dtype)
out_array[:] = numpy_array
return out_array
def ensure_quantity_has_units(quantity, quantity_name):
if 'units' not in quantity.attrs:
raise InvalidStateError(
'quantity {} is missing units attribute'.format(quantity_name))
```
#### File: sympl/_core/wrappers.py
```python
from .._core.base_components import (
TendencyComponent, DiagnosticComponent, ImplicitTendencyComponent, Stepper
)
class ScalingWrapper(object):
"""
Wraps any component and scales either inputs, outputs or tendencies
by a floating point value.
Example
-------
This is how the ScalingWrapper can be used to wrap a TendencyComponent.
>>> scaled_component = ScalingWrapper(
>>> RRTMRadiation(),
>>> input_scale_factors = {
>>> 'specific_humidity' = 0.2},
>>> tendency_scale_factors = {
>>> 'air_temperature' = 1.5})
"""
def __init__(self,
component,
input_scale_factors=None,
output_scale_factors=None,
tendency_scale_factors=None,
diagnostic_scale_factors=None):
"""
Initializes the ScaledInputOutputWrapper object.
Args
----
component : TendencyComponent, Stepper, DiagnosticComponent, ImplicitTendencyComponent
The component to be wrapped.
input_scale_factors : dict
a dictionary whose keys are the inputs that will be scaled
and values are floating point scaling factors.
output_scale_factors : dict
a dictionary whose keys are the outputs that will be scaled
and values are floating point scaling factors.
tendency_scale_factors : dict
a dictionary whose keys are the tendencies that will be scaled
and values are floating point scaling factors.
diagnostic_scale_factors : dict
a dictionary whose keys are the diagnostics that will be scaled
and values are floating point scaling factors.
Returns
-------
scaled_component : ScaledInputOutputWrapper
the scaled version of the component
Raises
------
TypeError
The component is not of type Stepper or TendencyComponent.
ValueError
The keys in the scale factors do not correspond to valid
input/output/tendency for this component.
"""
if not any(
isinstance(component, t) for t in [
DiagnosticComponent, TendencyComponent, ImplicitTendencyComponent, Stepper]):
raise TypeError(
'component must be a component type (DiagnosticComponent, TendencyComponent, '
'ImplicitTendencyComponent, or Stepper)'
)
self._component = component
self._input_scale_factors = dict()
if input_scale_factors is not None:
for input_field in input_scale_factors.keys():
if input_field not in component.input_properties.keys():
raise ValueError(
"{} is not a valid input quantity.".format(input_field))
self._input_scale_factors = input_scale_factors
self._diagnostic_scale_factors = dict()
if diagnostic_scale_factors is not None:
if not hasattr(component, 'diagnostic_properties'):
raise TypeError(
'Cannot apply diagnostic scale factors to component without '
'diagnostic output.')
self._ensure_fields_have_properties(
diagnostic_scale_factors, component.diagnostic_properties, 'diagnostic')
self._diagnostic_scale_factors = diagnostic_scale_factors
self._output_scale_factors = dict()
if output_scale_factors is not None:
if not hasattr(component, 'output_properties'):
raise TypeError(
'Cannot apply output scale factors to component without '
'output_properties.')
self._ensure_fields_have_properties(
output_scale_factors, component.output_properties, 'output')
self._output_scale_factors = output_scale_factors
self._tendency_scale_factors = dict()
if tendency_scale_factors is not None:
if not hasattr(component, 'tendency_properties'):
raise TypeError(
'Cannot apply tendency scale factors to component that does '
'not output tendencies.')
self._ensure_fields_have_properties(
tendency_scale_factors, component.tendency_properties, 'tendency')
self._tendency_scale_factors = tendency_scale_factors
def _ensure_fields_have_properties(
self, scale_factors, properties, properties_name):
for field in scale_factors.keys():
if field not in properties.keys():
raise ValueError(
"{} is not a {} quantity in the given component"
", but was given a scale factor.".format(field, properties_name))
def __getattr__(self, item):
return getattr(self._component, item)
def __call__(self, state, timestep=None):
"""
Call the underlying component, applying scaling.
Parameters
----------
state : dict
A model state dictionary.
timestep : timedelta, optional
A time step. If the underlying component does not use a timestep,
this will be discarded. If it does, this argument is required.
Returns
-------
*args
The return values of the underlying component.
"""
scaled_state = {}
if 'time' in state:
scaled_state['time'] = state['time']
for input_field in self.input_properties.keys():
if input_field in self._input_scale_factors:
scale_factor = self._input_scale_factors[input_field]
scaled_state[input_field] = state[input_field]*float(scale_factor)
scaled_state[input_field].attrs = state[input_field].attrs
else:
scaled_state[input_field] = state[input_field]
if isinstance(self._component, Stepper):
if timestep is None:
raise TypeError('Must give timestep to call Stepper.')
diagnostics, new_state = self._component(scaled_state, timestep)
for name in self._output_scale_factors.keys():
scale_factor = self._output_scale_factors[name]
new_state[name] *= float(scale_factor)
for name in self._diagnostic_scale_factors.keys():
scale_factor = self._diagnostic_scale_factors[name]
diagnostics[name] *= float(scale_factor)
return diagnostics, new_state
elif isinstance(self._component, TendencyComponent):
tendencies, diagnostics = self._component(scaled_state)
for tend_field in self._tendency_scale_factors.keys():
scale_factor = self._tendency_scale_factors[tend_field]
tendencies[tend_field] *= float(scale_factor)
for name in self._diagnostic_scale_factors.keys():
scale_factor = self._diagnostic_scale_factors[name]
diagnostics[name] *= float(scale_factor)
return tendencies, diagnostics
elif isinstance(self._component, ImplicitTendencyComponent):
if timestep is None:
raise TypeError('Must give timestep to call ImplicitTendencyComponent.')
tendencies, diagnostics = self._component(scaled_state, timestep)
for tend_field in self._tendency_scale_factors.keys():
scale_factor = self._tendency_scale_factors[tend_field]
tendencies[tend_field] *= float(scale_factor)
for name in self._diagnostic_scale_factors.keys():
scale_factor = self._diagnostic_scale_factors[name]
diagnostics[name] *= float(scale_factor)
return tendencies, diagnostics
elif isinstance(self._component, DiagnosticComponent):
diagnostics = self._component(scaled_state)
for name in self._diagnostic_scale_factors.keys():
scale_factor = self._diagnostic_scale_factors[name]
diagnostics[name] *= float(scale_factor)
return diagnostics
else: # Should never reach this
raise RuntimeError(
'Unknown component type, seems to be a bug in ScalingWrapper')
class UpdateFrequencyWrapper(object):
"""
Wraps a component object so that when it is called, it only computes new
output if sufficient time has passed, and otherwise returns its last
computed output.
Example
-------
This how the wrapper should be used on a fictional TendencyComponent class
called MyPrognostic.
>>> from datetime import timedelta
>>> prognostic = UpdateFrequencyWrapper(MyPrognostic(), timedelta(hours=1))
"""
def __init__(self, component, update_timedelta):
"""
Initialize the UpdateFrequencyWrapper object.
Args
----
component : TendencyComponent, Stepper, DiagnosticComponent, ImplicitTendencyComponent
The component to be wrapped.
update_timedelta : timedelta
The amount that state['time'] must differ from when output
was cached before new output is computed.
"""
self.component = component
self._update_timedelta = update_timedelta
self._cached_output = None
self._last_update_time = None
def __call__(self, state, timestep=None, **kwargs):
"""
Call the underlying component, or return cached values instead if
insufficient time has passed since the last time cached values were
stored.
Parameters
----------
state : dict
A model state dictionary.
timestep : timedelta, optional
A time step. If the underlying component does not use a timestep,
this will be discarded. If it does, this argument is required.
Returns
-------
*args
The return values of the underlying component.
"""
if ((self._last_update_time is None) or
(state['time'] >= self._last_update_time +
self._update_timedelta)):
if timestep is not None:
try:
self._cached_output = self.component(state, timestep, **kwargs)
except TypeError:
self._cached_output = self.component(state, **kwargs)
else:
self._cached_output = self.component(state, **kwargs)
self._last_update_time = state['time']
return self._cached_output
def __getattr__(self, item):
return getattr(self.component, item)
``` |
{
"source": "joynahid/cfbotalpha",
"score": 2
} |
#### File: cfbotalpha/controllers/facebook_api.py
```python
import requests, os
TOKEN = os.environ['TOKEN']
class facebookApi:
def send(self, request_body):
resp = requests.post('https://graph.facebook.com/v5.0/me/messages?access_token='+TOKEN, json=request_body, headers = {'Content-type': 'application/json'})
def send_message(self, msg=None,recipient_id=None):
try:
if msg:
request_body = {
'recipient': {
'id': recipient_id
},
'message': {
"text": msg
}
}
self.send(request_body)
except:
pass
def send_list_item(self, List, recipient_id):
try:
if List:
request_body = {
'recipient': {
'id' : recipient_id
},
'message' : {
'attachment': {
'type':'template',
'payload': {
"template_type": "generic",
"elements": List
}
}
}
}
self.send(request_body)
except:
pass
facebook = facebookApi()
```
#### File: cfbotalpha/controllers/rating_calculator.py
```python
from dataclasses import dataclass
import numpy as np
from numpy.fft import fft, ifft
def intdiv(x, y):
return -(-x // y) if x < 0 else x // y
@dataclass
class Contestant:
party: str
points: float
penalty: int
rating: int
need_rating: int = 0
delta: int = 0
rank: float = 0.0
seed: float = 0.0
class CodeforcesRatingCalculator:
def __init__(self, standings):
"""Calculate Codeforces rating changes and seeds given contest and user information."""
self.contestants = []
for i in standings:
self.contestants.append(Contestant(i['handle'], i['points'], i['penalty'], i['rating']))
self._precalc_seed()
self._reassign_ranks()
self._process()
self._update_delta()
def calculate_rating_changes(self):
"""Return a mapping between contestants and their corresponding delta."""
return {contestant.party: contestant.delta for contestant in self.contestants}
def get_seed(self, rating, me=None):
"""Get seed given a rating and user."""
seed = self.seed[rating]
if me:
seed -= self.elo_win_prob[rating - me.rating]
return seed
def _precalc_seed(self):
MAX = 6144
# Precompute the ELO win probability for all possible rating differences.
self.elo_win_prob = np.roll(1 / (1 + pow(10, np.arange(-MAX, MAX) / 400)), -MAX)
# Compute the rating histogram.
count = np.zeros(2 * MAX)
for a in self.contestants:
count[a.rating] += 1
# Precompute the seed for all possible ratings using FFT.
self.seed = 1 + ifft(fft(count) * fft(self.elo_win_prob)).real
def _reassign_ranks(self):
"""Find the rank of each contestant."""
contestants = self.contestants
contestants.sort(key=lambda o: (-o.points, o.penalty))
points = penalty = rank = None
for i in reversed(range(len(contestants))):
if contestants[i].points != points or contestants[i].penalty != penalty:
rank = i + 1
points = contestants[i].points
penalty = contestants[i].penalty
contestants[i].rank = rank
def _process(self):
"""Process and assign approximate delta for each contestant."""
for a in self.contestants:
a.seed = self.get_seed(a.rating, a)
mid_rank = (a.rank * a.seed) ** 0.5
a.need_rating = self._rank_to_rating(mid_rank, a)
a.delta = intdiv(a.need_rating - a.rating, 2)
def _rank_to_rating(self, rank, me):
"""Binary Search to find the performance rating for a given rank."""
left, right = 1, 8000
while right - left > 1:
mid = (left + right) // 2
if self.get_seed(mid, me) < rank:
right = mid
else:
left = mid
return left
def _update_delta(self):
"""Update the delta of each contestant."""
contestants = self.contestants
n = len(contestants)
contestants.sort(key=lambda o: -o.rating)
correction = intdiv(-sum(c.delta for c in contestants), n) - 1
for contestant in contestants:
contestant.delta += correction
zero_sum_count = min(4 * round(n ** 0.5), n)
delta_sum = -sum(contestants[i].delta for i in range(zero_sum_count))
correction = min(0, max(-10, intdiv(delta_sum, zero_sum_count)))
for contestant in contestants:
contestant.delta += correction
```
#### File: cfbotalpha/controllers/rating_change_controller.py
```python
import requests, time, asyncio
from controllers.rating_calculator import CodeforcesRatingCalculator
from controllers.network.api_urls import contest_api, user_api
from controllers.network.fetch_data import async_request
CF_CONTEST_URL = 'https://codeforces.com/api/contest.standings'
class ratingChangeControl:
def __init__(self, username, contest_id, sender, db):
self.db = db
if not contest_id: self.save_state = asyncio.create_task(self.get_latest_contestid())
self.contest_id = contest_id
if sender: self.sender = sender
self.error = ratingChangeError()
if username: self.username = str(username).lower()
else: self.get_cf_handle()
# Message Generator
async def fetch_rating_change_message(self):
try:
if self.contest_id == None:
await self.save_state
if self.contest_id == None:
return 'Couldn\'t fetch last rated contest. Codeforces didn\'t respond :/'
print(self.contest_id, self.username)
if self.username == None:
return self.error.usernameNotFound()['error']
data = await asyncio.create_task(self.generate_rating_change())
if 'error' in data: return data['error']
if 'official' in data:
oldRating, delta, contest_name = data['official']
msg = '{} ' + str(oldRating) + ' to ' + str(oldRating+delta) + ' [{}' + str(delta) + ']'
build_msg = contest_name, msg.format('Rating changed from', '+' if delta>=0 else '')
return build_msg
if 'prediction' in data:
oldRating, delta, contest_name = data['prediction']
msg = '{} ' + str(oldRating) + ' to ' + str(oldRating+delta) + ' [{}' + str(delta) + ']'
build_msg = contest_name, msg.format('Predicting ', '+' if delta>=0 else '')
return build_msg
except Exception as e:
print('Fetching Rating Error: ', e)
pass
return None
# Last Contest if Contest ID was not given
async def get_latest_contestid(self):
try:
url = contest_api.list()
lis = await asyncio.create_task(async_request.unit_call(url))
for i in lis['result']:
if i['phase'] != 'BEFORE' and 'unrated' not in i['name'].lower():
self.contest_id = str(i['id'])
break
except Exception as e:
self.contest_id = None
print("Couldn't fecth contest", e)
# Get CF Handle if not given
def get_cf_handle(self):
try:
self.username = self.db.collection('profiles').document(self.sender).get().to_dict()['username']
except Exception as e:
print('Database error',e)
self.username = None
# Generate Rating Change
async def generate_rating_change(self):
try:
rating_change_url = contest_api.ratingChanges(self.contest_id)
async_request.clear_urls()
async_request.add_url(rating_change_url)
rating_changed = await asyncio.create_task(async_request.call())
rating_changed = rating_changed[0]
if rating_changed['status'] == 'OK' and rating_changed['result']:
for user in rating_changed['result']:
if user['handle'].lower() == self.username:
res = {'official' : (user['oldRating'], user['newRating']-user['oldRating'], user['contestName'])}
return res
return self.error.userNotRated(self.username) #Didn't Participate or rated
else:
if 'comment' in rating_changed and 'finished yet' not in rating_changed['comment']:
return self.error.invalidContestID() # Invalid Contest
rated_userlist_url = user_api.ratedList(self.contest_id,'true')
current_ranklist_url = contest_api.standings(self.contest_id)
async_request.clear_urls()
async_request.add_url(rated_userlist_url)
async_request.add_url(current_ranklist_url)
rated_userlist, current_ranklist = await asyncio.create_task(async_request.call())
current_rating = {}
for user in rated_userlist['result']:
current_rating[user['handle'].lower()] = user['rating']
data = []
for user in current_ranklist['result']['rows']:
handle = user['party']['members'][0]['handle'].lower()
if handle not in current_rating:
current_rating[handle] = 1500
if current_rating[handle]>=2100 and 'Educational' in current_ranklist['result']['contest']['name']: continue
data.append({
'handle': handle,
'points': float(user['points']),
'penalty': int(user['penalty']),
'rating': int(current_rating[handle])
})
calculate = CodeforcesRatingCalculator(data)
predicted_rating_change = calculate.calculate_rating_changes()
contest_name = current_ranklist['result']['contest']['name']
if self.username in predicted_rating_change:
res = {'prediction' : (current_rating[self.username], predicted_rating_change[self.username], contest_name)}
return res
else: return self.error.userNotRated(self.username) # Not rated or didn't participate
except Exception as e:
print('Rating Change Error', e)
pass
return None
# rating = makeRatingChangeMessage(1408, 'joynahiid')
# print(rating.fetch_rating_change())
class ratingChangeError:
def usernameNotFound(self):
text = {'error' : ('Codeforces Handle was not found.', 'Please send \'Remember <YOURCFHANDLE>\' to perform this query')}
return text
def invalidContestID(self):
text = {'error' : 'Invalid Contest ID. Please send contest ID from the contest URL/ Link'}
return text
def userNotRated(self, username):
text = {'error' : '{} was not rated or didn\'t participate'.format(username)}
return text
# loop = asyncio.get_event_loop()
# test = ratingChangeControl('joynahiid','1421','','')
# got = loop.run_until_complete(test.generate_rating_change())
# print(got)
``` |
{
"source": "joynahid/pyvjudge",
"score": 2
} |
#### File: pyvjudge/pyvjudge/main.py
```python
import datetime
import logging
import re
from typing import Union, List, Dict
from pyvjudge.models.contest import Submission, Verdict, Standing
from pyvjudge.models.user import User
from pyvjudge.vjudge.api_client import VjudgeClient
from pyvjudge.vjudge.contest_scraper import ContestScraper
logging.basicConfig()
LOGGER = logging.getLogger("PyVjudge")
class PyVjudge:
def __init__(self):
self.client = VjudgeClient()
self.contest_scraper = ContestScraper(self.client)
def get_standing(self, contest_id: Union[str, int], contest_password: str = None) -> Standing:
if not (contest_password is None):
LOGGER.info("Password was provided, registering the contest...")
self.client.register_contest(contest_id, contest_password)
LOGGER.info(f"Scraping contest info")
contest_info = self.contest_scraper.scrape_contest_info(contest_id)
LOGGER.info(f"Fetching standing ({contest_id})")
single_ranklist = self.client.get_single_ranklist(contest_id)
submissions = self.build_submissions(contest_info, single_ranklist)
return Standing(contest=contest_info, submissions=submissions)
def __del__(self):
self.client.close()
@staticmethod
def build_submissions(contest_info, single_ranklist) -> List[Submission]:
problem_hashtable = dict()
for p in contest_info.problems:
problem_hashtable[p.index] = p
users_hashtable: Dict[str, List[str]] = single_ranklist.participants
submissions: List[Submission] = []
for s in single_ranklist.submissions:
cur_user = users_hashtable[str(s[0])]
sub = Submission(
contest=contest_info,
problem=problem_hashtable[s[1]],
verdict=Verdict.ACCEPTED if s[2] else Verdict.REJECTED,
submitted_at=contest_info.started_at + datetime.timedelta(seconds=s[3]),
submitted_by=User(id=s[0], username=cur_user[0], nickname=cur_user[1], avatar=cur_user[2])
)
submissions.append(sub)
return submissions
@staticmethod
def find_contest_id(url: str) -> int:
"""Finds Contest ID from vjudge contest url"""
m = re.match(r'.*/contest/(\d+).+$', url, re.I)
groups = m.groups()
if len(groups) > 0:
return int(groups[0])
```
#### File: pyvjudge/vjudge/contest_scraper.py
```python
import datetime
import json
from typing import Dict, List
from bs4 import BeautifulSoup
from pyvjudge.models.contest import ContestInfo, Problem, ProblemProperty, Description
from pyvjudge.models.user import User
from pyvjudge.vjudge.api_client import VjudgeClient
CONTEST_URL: str = "/contest/{contest_id}"
class ContestScraper:
def __init__(
self, client: VjudgeClient
) -> None:
"""
This scraper will scrape contest from the contest url. It leverages the dataJson hidden inside the html
Args:
client (VjudgeClient): Vjudge APIClient
"""
self.client = client
def scrape_contest_info(self, contest_id) -> ContestInfo:
req_url = CONTEST_URL.format(contest_id=contest_id)
resp = self.client.get(req_url)
if resp.status_code != 200:
raise BadVjudgeResponse()
html = resp.text
soup = BeautifulSoup(html, "html.parser")
data = self.find_data_json(soup)
self.is_contest_accessible(data.get("openness"))
return ContestInfo(
id=data['id'],
title=data['title'],
url=req_url,
started_at=datetime.datetime.fromtimestamp(data['begin'] // 1000),
ended_at=datetime.datetime.fromtimestamp(data['end'] // 1000),
created_at=datetime.datetime.fromtimestamp(data['createTime'] // 1000),
announcement=data['announcement'],
description=Description(**data['description']),
penalty_second=data['penalty'],
manager=User(username=data['managerName'], id=data["managerId"]),
problems=list(self.parse_problems_from_data_json(data['problems'], contest_id))
)
@staticmethod
def is_contest_accessible(openness: int) -> bool:
if openness == 0:
return True
raise ContestNotAccessible()
@staticmethod
def parse_problems_from_data_json(problems: List, contest_id: int) -> List[Problem]:
problem_uri = CONTEST_URL.format(contest_id=contest_id) + "#problem/{}"
for p in problems:
yield Problem(
id=p['pid'],
num=p['num'],
title=p['title'],
oj=p['oj'],
prob_num=p.get('probNum'),
weight=p['weight'],
languages=p.get('languages'),
properties=[ProblemProperty(**x) for x in p['properties']],
url=problem_uri.format(p["num"])
)
@staticmethod
def find_data_json(soup: BeautifulSoup) -> Dict:
"""Extracts json data from html returned by vjudge contest_url
Args:
soup (BeautifulSoup): Html soup
"""
assert isinstance(soup, BeautifulSoup)
data = soup.find("textarea", {"name": "dataJson"})
if not data:
raise ValueError("HTML doesn't contain any \"dataJson\" named attribute")
return json.loads(data.text)
class BadVjudgeResponse(Exception):
pass
class ContestNotAccessible(Exception):
def __init__(self):
super().__init__(
"Contest isn't directly accessible. "
"Make sure the account has access to the contest directly. "
"If necessary, login or register the contest first with valid password."
)
```
#### File: pyvjudge/tests/conftest.py
```python
import os
import pytest
ENV_VARS = {
"VJUDGE_BASE_URL": "https://vjudge.net",
"VJUDGE_USERNAME": "vjudge",
"VJUDGE_PASSWORD": "password",
}
@pytest.fixture(scope="session", autouse=True)
def tests_setup_and_teardown():
# Will be executed before the first test
old_environ = dict(os.environ)
os.environ.update(ENV_VARS)
yield
# Will be executed after the last test
os.environ.clear()
os.environ.update(old_environ)
```
#### File: pyvjudge/tests/test_pyvjudge.py
```python
import unittest
from pyvjudge import PyVjudge
from pyvjudge.models.contest import Standing
class TestPyVjudge(unittest.TestCase):
def __init__(self, *args) -> None:
self.pv = PyVjudge()
super().__init__(*args)
def test_get_standing(self):
standing = self.pv.get_standing(PyVjudge.find_contest_id("https://vjudge.net/contest/458956#overview"))
assert isinstance(standing, Standing)
def test_find_contest_id(self):
assert PyVjudge.find_contest_id("https://vjudge.net/contest/458956#overview") == 458956
``` |
{
"source": "Joy-nath/telegram-pdf-bot",
"score": 3
} |
#### File: pdf_bot/files/ocr.py
```python
import os
import tempfile
import ocrmypdf
from ocrmypdf.exceptions import PriorOcrFoundError
from telegram import ReplyKeyboardRemove
from telegram.ext import ConversationHandler
from pdf_bot.analytics import TaskType
from pdf_bot.consts import PDF_INFO
from pdf_bot.language import set_lang
from pdf_bot.utils import check_user_data, send_result_file
def add_ocr_to_pdf(update, context):
if not check_user_data(update, context, PDF_INFO):
return ConversationHandler.END
_ = set_lang(update, context)
update.effective_message.reply_text(
_("Adding an OCR text layer to your PDF file"),
reply_markup=ReplyKeyboardRemove(),
)
with tempfile.NamedTemporaryFile() as tf:
user_data = context.user_data
file_id, file_name = user_data[PDF_INFO]
pdf_file = context.bot.get_file(file_id)
pdf_file.download(custom_path=tf.name)
with tempfile.TemporaryDirectory() as dir_name:
out_fn = os.path.join(dir_name, f"OCR_{os.path.splitext(file_name)[0]}.pdf")
try:
# logging.getLogger("ocrmypdf").setLevel(logging.WARNING)
ocrmypdf.ocr(tf.name, out_fn, deskew=True, progress_bar=False)
send_result_file(update, context, out_fn, TaskType.ocr_pdf)
except PriorOcrFoundError:
update.effective_message.reply_text(
_("Your PDF file already has a text layer")
)
# Clean up memory
if user_data[PDF_INFO] == file_id:
del user_data[PDF_INFO]
return ConversationHandler.END
```
#### File: telegram-pdf-bot/pdf_bot/__init__.py
```python
import os
from http import HTTPStatus
from dotenv import load_dotenv
from flask import Flask, Response, request
from telegram import Update
from telegram.ext import messagequeue as mq
from telegram.ext.dispatcher import Dispatcher
from telegram.utils.request import Request
import pdf_bot.dispatcher as dp
import pdf_bot.logging as log
from pdf_bot.mq_bot import MQBot
load_dotenv()
TELEGRAM_TOKEN = os.environ.get("TELEGRAM_TOKEN")
def create_app():
log.setup_logging()
# Create and configure the app
app = Flask(__name__, instance_relative_config=True)
# Ensure the instance folder exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
q = mq.MessageQueue(all_burst_limit=3, all_time_limit_ms=3000)
req = Request(con_pool_size=8, connect_timeout=10, read_timeout=10)
bot = MQBot(TELEGRAM_TOKEN, request=req, mqueue=q)
dispatcher = Dispatcher(bot=bot, update_queue=None, workers=0)
dp.setup_dispatcher(dispatcher)
@app.route("/", methods=["POST"])
def index() -> Response:
dispatcher.process_update(Update.de_json(request.get_json(force=True), bot))
return Response("", HTTPStatus.NO_CONTENT)
return app
```
#### File: telegram-pdf-bot/pdf_bot/language.py
```python
import gettext
from google.cloud import datastore
from telegram import CallbackQuery, InlineKeyboardButton, InlineKeyboardMarkup, Update
from telegram.chataction import ChatAction
from telegram.ext import CallbackContext
from pdf_bot.consts import LANGUAGE, LANGUAGES, USER
from pdf_bot.store import client
def send_lang(update: Update, context: CallbackContext, query: CallbackQuery = None):
update.effective_message.reply_chat_action(ChatAction.TYPING)
lang = get_lang(update, context, query)
langs = [
InlineKeyboardButton(key, callback_data=key)
for key, value in sorted(LANGUAGES.items(), key=lambda x: x[1])
if value != lang
]
keyboard_size = 2
keyboard = [
langs[i : i + keyboard_size] for i in range(0, len(langs), keyboard_size)
]
reply_markup = InlineKeyboardMarkup(keyboard)
_ = set_lang(update, context)
update.effective_message.reply_text(
_("Select your language"), reply_markup=reply_markup
)
def get_lang(update: Update, context: CallbackContext, query: CallbackQuery = None):
if context.user_data is not None and LANGUAGE in context.user_data:
lang = context.user_data[LANGUAGE]
else:
if query is None:
sender = update.effective_message.from_user or update.effective_chat
user_id = sender.id
else:
user_id = query.from_user.id
user_key = client.key(USER, user_id)
user = client.get(key=user_key)
if user is None or LANGUAGE not in user:
lang = "en_GB"
else:
lang = user[LANGUAGE]
if lang == "en":
lang = "en_GB"
if context.user_data is None:
context.user_data = {LANGUAGE: lang}
else:
context.user_data[LANGUAGE] = lang
return lang
def store_lang(update, context, query):
lang_code = LANGUAGES[query.data]
with client.transaction():
user_key = client.key(USER, query.from_user.id)
user = client.get(key=user_key)
if user is None:
user = datastore.Entity(user_key)
user[LANGUAGE] = lang_code
client.put(user)
context.user_data[LANGUAGE] = lang_code
_ = set_lang(update, context)
query.message.edit_text(
_("Your language has been set to {language}").format(language=query.data)
)
def set_lang(update, context, query=None):
lang = get_lang(update, context, query)
t = gettext.translation("pdf_bot", localedir="locale", languages=[lang])
return t.gettext
```
#### File: telegram-pdf-bot/pdf_bot/utils.py
```python
import os
import tempfile
from threading import Lock
from PyPDF2 import PdfFileReader, PdfFileWriter
from PyPDF2.utils import PdfReadError
from telegram import (
ChatAction,
InlineKeyboardButton,
InlineKeyboardMarkup,
ReplyKeyboardMarkup,
ReplyKeyboardRemove,
Update,
)
from telegram.constants import MAX_FILESIZE_DOWNLOAD, MAX_FILESIZE_UPLOAD
from telegram.ext import CallbackContext, ConversationHandler
from pdf_bot.analytics import EventAction, TaskType, send_event
from pdf_bot.consts import (
CANCEL,
CHANNEL_NAME,
PAYMENT,
PDF_INFO,
PDF_INVALID_FORMAT,
PDF_OK,
PDF_TOO_LARGE,
)
from pdf_bot.language import set_lang
def cancel(update, context):
_ = set_lang(update, context)
update.effective_message.reply_text(
_("Action cancelled"), reply_markup=ReplyKeyboardRemove()
)
return ConversationHandler.END
def reply_with_cancel_btn(update: Update, context: CallbackContext, text: str):
_ = set_lang(update, context)
reply_markup = ReplyKeyboardMarkup(
[[_(CANCEL)]], resize_keyboard=True, one_time_keyboard=True
)
update.effective_message.reply_text(text, reply_markup=reply_markup)
def check_pdf(update, context, send_msg=True):
"""
Validate the PDF file
Args:
update: the update object
context: the context object
send_msg: the bool indicating to send a message or not
Returns:
The variable indicating the validation result
"""
pdf_status = PDF_OK
message = update.effective_message
pdf_file = message.document
_ = set_lang(update, context)
if not pdf_file.mime_type.endswith("pdf"):
pdf_status = PDF_INVALID_FORMAT
if send_msg:
message.reply_text(_("Your file is not a PDF file, please try again"))
elif pdf_file.file_size >= MAX_FILESIZE_DOWNLOAD:
pdf_status = PDF_TOO_LARGE
if send_msg:
message.reply_text(
"{desc_1}\n\n{desc_2}".format(
desc_1=_("Your file is too large for me to download and process"),
desc_2=_(
"Note that this is a Telegram Bot limitation and there's "
"nothing I can do unless Telegram changes this limit"
),
)
)
return pdf_status
def check_user_data(
update: Update, context: CallbackContext, key: str, lock: Lock = None
) -> bool:
"""
Check if the specified key exists in user_data
Args:
update: the update object
context: the context object
key: the string of key
Returns:
The boolean indicating if the key exists or not
"""
data_ok = True
if lock is not None:
lock.acquire()
if key not in context.user_data:
data_ok = False
_ = set_lang(update, context)
update.effective_message.reply_text(
_("Something went wrong, please start over again")
)
if lock is not None:
lock.release()
return data_ok
def process_pdf(
update,
context,
task_type: TaskType,
encrypt_pw=None,
rotate_degree=None,
scale_by=None,
scale_to=None,
):
with tempfile.NamedTemporaryFile() as tf:
user_data = context.user_data
file_id, file_name = user_data[PDF_INFO]
if encrypt_pw is not None:
pdf_reader = open_pdf(update, context, file_id, tf.name, task_type)
else:
pdf_reader = open_pdf(update, context, file_id, tf.name)
if pdf_reader is not None:
pdf_writer = PdfFileWriter()
for page in pdf_reader.pages:
if rotate_degree is not None:
pdf_writer.addPage(page.rotateClockwise(rotate_degree))
elif scale_by is not None:
page.scale(scale_by[0], scale_by[1])
pdf_writer.addPage(page)
elif scale_to is not None:
page.scaleTo(scale_to[0], scale_to[1])
pdf_writer.addPage(page)
else:
pdf_writer.addPage(page)
if encrypt_pw is not None:
pdf_writer.encrypt(encrypt_pw)
# Send result file
write_send_pdf(update, context, pdf_writer, file_name, task_type)
# Clean up memory
if user_data[PDF_INFO] == file_id:
del user_data[PDF_INFO]
def open_pdf(update, context, file_id, file_name, task_type=None):
"""
Download, open and validate PDF file
Args:
update: the update object
context: the context object
file_id: the string of the file ID
file_name: the string of the file name
file_type: the string of the file type
Returns:
The PdfFileReader object or None
"""
_ = set_lang(update, context)
pdf_file = context.bot.get_file(file_id)
pdf_file.download(custom_path=file_name)
pdf_reader = None
try:
pdf_reader = PdfFileReader(open(file_name, "rb"))
except PdfReadError:
update.effective_message.reply_text(
_("Your file is invalid and I couldn't open and process it")
)
if pdf_reader is not None and pdf_reader.isEncrypted:
if task_type is not None:
if task_type == TaskType.encrypt_pdf:
text = _("Your PDF file is already encrypted")
else:
text = _(
"Your PDF file is encrypted and you'll have to decrypt it first"
)
else:
text = _("Your PDF file is encrypted and you'll have to decrypt it first")
pdf_reader = None
update.effective_message.reply_text(text)
return pdf_reader
def send_file_names(update, context, file_names, file_type):
"""
Send a list of file names to user
Args:
update: the update object
context: the context object
file_names: the list of file names
file_type: the string of file type
Returns:
None
"""
_ = set_lang(update, context)
text = "{desc}\n".format(
desc=_("You've sent me these {file_type} so far:").format(file_type=file_type)
)
for i, filename in enumerate(file_names):
text += f"{i + 1}: {filename}\n"
update.effective_message.reply_text(text)
def write_send_pdf(update, context, pdf_writer, file_name, task_type: TaskType):
with tempfile.TemporaryDirectory() as dir_name:
new_fn = f"{task_type.value.title()}_{file_name}"
out_fn = os.path.join(dir_name, new_fn)
with open(out_fn, "wb") as f:
pdf_writer.write(f)
send_result_file(update, context, out_fn, task_type)
def send_result_file(
update: Update, context: CallbackContext, output_filename: str, task: TaskType
):
_ = set_lang(update, context)
message = update.effective_message
reply_markup = get_support_markup(update, context)
if os.path.getsize(output_filename) >= MAX_FILESIZE_UPLOAD:
message.reply_text(
"{desc_1}\n\n{desc_2}".format(
desc_1=_("The result file is too large for me to send to you"),
desc_2=_(
"Note that this is a Telegram Bot limitation and there's "
"nothing I can do unless Telegram changes this limit"
),
),
reply_markup=reply_markup,
)
else:
if output_filename.endswith(".png"):
message.chat.send_action(ChatAction.UPLOAD_PHOTO)
message.reply_photo(
open(output_filename, "rb"),
caption=_("Here is your result file"),
reply_markup=reply_markup,
)
else:
message.chat.send_action(ChatAction.UPLOAD_DOCUMENT)
message.reply_document(
document=open(output_filename, "rb"),
caption=_("Here is your result file"),
reply_markup=reply_markup,
)
send_event(update, context, task, EventAction.complete)
def get_support_markup(update, context):
"""
Create the reply markup
Returns:
The reply markup object
"""
_ = set_lang(update, context)
keyboard = [
[
InlineKeyboardButton(_("Join Channel"), f"https://t.me/{CHANNEL_NAME}"),
InlineKeyboardButton(_("Support PDF Bot"), callback_data=PAYMENT),
]
]
reply_markup = InlineKeyboardMarkup(keyboard)
return reply_markup
``` |
{
"source": "Joynice/PicGo_Web",
"score": 2
} |
#### File: PicGo_Web/front/views.py
```python
__author__ = 'Joynice'
from flask import (
Blueprint,
views,
render_template,
request
)
import os
from .models import Images
from utils import field
from utils.utils import *
from utils.GithubApi import GithubTools
from exts import db
front_bp = Blueprint('front', __name__, url_prefix='/')
class IndexView(views.MethodView):
'''
首页视图
'''
def get(self):
images = Images.query.order_by(Images.create_time.desc()).all()
context = {
'images': images
}
return render_template('front/front_index.html', **context)
def post(self):
return field.params_error(message='不支持该方法')
class ImageView(views.MethodView):
'''
上传图片文件
'''
def get(self):
return field.params_error(message='不支持该发方法')
def post(self):
file = request.files['file']
filename = file.filename
content = file.read()
if not file:
return field.params_error(message='未接收到文件')
if not allowed_file(file.filename, ALLOWED_EXTENSIONS=config.ALLOWED_PIC_EXTENSIONS):
return field.params_error(message='图片格式不合法')
if not file_len(content):
return field.params_error(message='图片大小超过{}M'.format(int(config.ALLOWED_PIC_LEN / 1024 / 1024)))
new_name = rename(filename)
if config.STORE_TYPE == 'github': # github存储
git = GithubTools()
code, link = git.create_file('{}{}'.format(config.PATH, '/' + new_name), content)
if code:
image = Images(name=new_name, link=link, type='github')
db.session.add(image)
db.session.commit()
return field.layui_success(message='上传成功,请拷贝链接使用', data={'link': link, 'id': image.id})
else:
return field.params_error(message=link)
elif config.STORE_TYPE == 'server': # 本地存储
img = open(os.path.join(config.LOCAL_STORAGE_PATH, new_name), 'wb')
img.write(content)
img.close()
link = request.url_root + 'static/images/' + new_name
image = Images(name=new_name, link=link, type='server')
db.session.add(image)
db.session.commit()
return field.layui_success(message='上传成功,请拷贝链接使用', data={'link': link, 'id': image.id})
else:
return field.params_error('配置文件中STORE_TYPE字段设置不正确')
class AboutView(views.MethodView):
'''
关于视图
'''
def get(self):
return render_template('front/front_about.html')
def post(self):
return field.params_error(message='不支持该方法')
class DeleteImageView(views.MethodView):
'''
删除图片
'''
def get(self):
return field.params_error(message='不支持改方法')
def post(self):
id = request.form.get('id')
if not id:
return field.params_error(message='参数缺失')
image = Images.query.get(id)
if not image:
return field.params_error(message='没有改图片')
filename = image.name
if image.type == 'github':
git = GithubTools()
code, message = git.delete_file('{}{}'.format(config.PATH, '/' + filename))
db.session.delete(image)
db.session.commit()
return field.success(message=message)
elif image.type == 'server':
path = os.path.join(config.LOCAL_STORAGE_PATH, filename)
if os.path.exists(path):
os.remove(path)
db.session.delete(image)
db.session.commit()
return field.success(message='删除成功')
else:
return field.params_error(message='没有改图片')
else:
return field.params_error('配置文件中STORE_TYPE字段设置不正确')
front_bp.add_url_rule('', view_func=IndexView.as_view('index'))
front_bp.add_url_rule('about/', view_func=AboutView.as_view('about'))
front_bp.add_url_rule('images/', view_func=ImageView.as_view('images'))
front_bp.add_url_rule('delete/', view_func=DeleteImageView.as_view('delete'))
```
#### File: Joynice/PicGo_Web/run.py
```python
__author__ = 'Joynice'
from flask import Flask
from flask_wtf.csrf import CSRFProtect
from front import front_bp
import config
from exts import db
def create_app():
app = Flask(__name__)
csrf = CSRFProtect()
csrf.init_app(app=app)
app.config.from_object(config.config)
app.register_blueprint(front_bp)
db.init_app(app=app)
return app
app = create_app()
if __name__ == '__main__':
app.run()
``` |
{
"source": "Joyoe/Magisk_jojo_build23013",
"score": 3
} |
#### File: jni/zygisk/gen_jni_hooks.py
```python
primitives = ['jint', 'jboolean', 'jlong']
class JType:
def __init__(self, cpp, jni):
self.cpp = cpp
self.jni = jni
class JArray(JType):
def __init__(self, type):
if type.cpp in primitives:
name = type.cpp + 'Array'
else:
name = 'jobjectArray'
super().__init__(name, '[' + type.jni)
class Argument:
def __init__(self, name, type, set_arg = False):
self.name = name
self.type = type
self.set_arg = set_arg
def cpp(self):
return f'{self.type.cpp} {self.name}'
# Args we don't care, give it an auto generated name
class Anon(Argument):
cnt = 0
def __init__(self, type):
super().__init__(f'_{Anon.cnt}', type)
Anon.cnt += 1
class Return:
def __init__(self, value, type):
self.value = value
self.type = type
class Method:
def __init__(self, name, ret, args):
self.name = name
self.ret = ret
self.args = args
def cpp(self):
return ', '.join(map(lambda x: x.cpp(), self.args))
def name_list(self):
return ', '.join(map(lambda x: x.name, self.args))
def jni(self):
args = ''.join(map(lambda x: x.type.jni, self.args))
return f'({args}){self.ret.type.jni}'
def body(self):
return ''
class JNIHook(Method):
def __init__(self, ver, ret, args):
name = f'{self.base_name()}_{ver}'
super().__init__(name, ret, args)
def base_name(self):
return ''
def orig_method(self):
return f'reinterpret_cast<decltype(&{self.name})>({self.base_name()}_orig)'
def ind(i):
return '\n' + ' ' * i
# Common types
jint = JType('jint', 'I')
jintArray = JArray(jint)
jstring = JType('jstring', 'Ljava/lang/String;')
jboolean = JType('jboolean', 'Z')
jlong = JType('jlong', 'J')
void = JType('void', 'V')
class ForkAndSpec(JNIHook):
def __init__(self, ver, args):
super().__init__(ver, Return('ctx.pid', jint), args)
def base_name(self):
return 'nativeForkAndSpecialize'
def init_args(self):
return 'AppSpecializeArgsImpl args(uid, gid, gids, runtime_flags, mount_external, se_info, nice_name, instruction_set, app_data_dir);'
def body(self):
decl = ''
decl += ind(1) + self.init_args()
for a in self.args:
if a.set_arg:
decl += ind(1) + f'args.{a.name} = &{a.name};'
decl += ind(1) + 'HookContext ctx;'
decl += ind(1) + 'ctx.env = env;'
decl += ind(1) + 'ctx.raw_args = &args;'
decl += ind(1) + f'ctx.{self.base_name()}_pre();'
decl += ind(1) + self.orig_method() + '('
decl += ind(2) + f'env, clazz, {self.name_list()}'
decl += ind(1) + ');'
decl += ind(1) + f'ctx.{self.base_name()}_post();'
return decl
class SpecApp(ForkAndSpec):
def __init__(self, ver, args):
super().__init__(ver, args)
self.ret = Return('', void)
def base_name(self):
return 'nativeSpecializeAppProcess'
class ForkServer(ForkAndSpec):
def base_name(self):
return 'nativeForkSystemServer'
def init_args(self):
return 'ServerSpecializeArgsImpl args(uid, gid, gids, runtime_flags, permitted_capabilities, effective_capabilities);'
# Common args
uid = Argument('uid', jint)
gid = Argument('gid', jint)
gids = Argument('gids', jintArray)
runtime_flags = Argument('runtime_flags', jint)
rlimits = Argument('rlimits', JArray(jintArray))
mount_external = Argument('mount_external', jint)
se_info = Argument('se_info', jstring)
nice_name = Argument('nice_name', jstring)
fds_to_close = Argument('fds_to_close', jintArray)
instruction_set = Argument('instruction_set', jstring)
app_data_dir = Argument('app_data_dir', jstring)
# o
fds_to_ignore = Argument('fds_to_ignore', jintArray)
# p
is_child_zygote = Argument('is_child_zygote', jboolean, True)
# q_alt
is_top_app = Argument('is_top_app', jboolean, True)
# r
pkg_data_info_list = Argument('pkg_data_info_list', JArray(jstring), True)
whitelisted_data_info_list = Argument('whitelisted_data_info_list', JArray(jstring), True)
mount_data_dirs = Argument('mount_data_dirs', jboolean, True)
mount_storage_dirs = Argument('mount_storage_dirs', jboolean, True)
# server
permitted_capabilities = Argument('permitted_capabilities', jlong)
effective_capabilities = Argument('effective_capabilities', jlong)
# Method definitions
fas_l = ForkAndSpec('l', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, nice_name, fds_to_close, instruction_set, app_data_dir])
fas_o = ForkAndSpec('o', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, nice_name, fds_to_close, fds_to_ignore, instruction_set, app_data_dir])
fas_p = ForkAndSpec('p', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir])
fas_q_alt = ForkAndSpec('q_alt', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app])
fas_r = ForkAndSpec('r', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app,
pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs])
fas_samsung_m = ForkAndSpec('samsung_m', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, Anon(jint), Anon(jint), nice_name, fds_to_close, instruction_set, app_data_dir])
fas_samsung_n = ForkAndSpec('samsung_n', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, Anon(jint), Anon(jint), nice_name, fds_to_close, instruction_set, app_data_dir, Anon(jint)])
fas_samsung_o = ForkAndSpec('samsung_o', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, Anon(jint), Anon(jint), nice_name, fds_to_close, fds_to_ignore, instruction_set, app_data_dir])
fas_samsung_p = ForkAndSpec('samsung_p', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, Anon(jint), Anon(jint), nice_name, fds_to_close, fds_to_ignore, is_child_zygote,
instruction_set, app_data_dir])
spec_q = SpecApp('q', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, is_child_zygote, instruction_set, app_data_dir])
spec_q_alt = SpecApp('q_alt', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, is_child_zygote, instruction_set, app_data_dir, is_top_app])
spec_r = SpecApp('r', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name,
is_child_zygote, instruction_set, app_data_dir, is_top_app, pkg_data_info_list,
whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs])
spec_samsung_q = SpecApp('samsung_q', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, Anon(jint), Anon(jint), nice_name, is_child_zygote, instruction_set, app_data_dir])
server_l = ForkServer('l', [uid, gid, gids, runtime_flags, rlimits,
permitted_capabilities, effective_capabilities])
server_samsung_q = ForkServer('samsung_q', [uid, gid, gids, runtime_flags, Anon(jint), Anon(jint), rlimits,
permitted_capabilities, effective_capabilities])
hook_map = {}
def gen_jni_def(clz, methods):
if clz not in hook_map:
hook_map[clz] = []
decl = ''
for m in methods:
decl += ind(0) + f'{m.ret.type.cpp} {m.name}(JNIEnv *env, jclass clazz, {m.cpp()}) {{'
decl += m.body()
if m.ret.value:
decl += ind(1) + f'return {m.ret.value};'
decl += ind(0) + '}'
decl += ind(0) + f'const JNINativeMethod {m.base_name()}_methods[] = {{'
for m in methods:
decl += ind(1) + '{'
decl += ind(2) + f'"{m.base_name()}",'
decl += ind(2) + f'"{m.jni()}",'
decl += ind(2) + f'(void *) &{m.name}'
decl += ind(1) + '},'
decl += ind(0) + '};'
decl = ind(0) + f'void *{m.base_name()}_orig = nullptr;' + decl
decl += ind(0) + f'constexpr int {m.base_name()}_methods_num = std::size({m.base_name()}_methods);'
decl += ind(0)
hook_map[clz].append(m.base_name())
return decl
def gen_jni_hook():
decl = ''
decl += ind(0) + 'unique_ptr<JNINativeMethod[]> hookAndSaveJNIMethods(const char *className, const JNINativeMethod *methods, int numMethods) {'
decl += ind(1) + 'unique_ptr<JNINativeMethod[]> newMethods;'
decl += ind(1) + 'int clz_id = -1;'
decl += ind(1) + 'int hook_cnt = 0;'
decl += ind(1) + 'do {'
for index, (clz, methods) in enumerate(hook_map.items()):
decl += ind(2) + f'if (className == "{clz}"sv) {{'
decl += ind(3) + f'clz_id = {index};'
decl += ind(3) + f'hook_cnt = {len(methods)};'
decl += ind(3) + 'break;'
decl += ind(2) + '}'
decl += ind(1) + '} while (false);'
decl += ind(1) + 'if (hook_cnt) {'
decl += ind(2) + 'newMethods = make_unique<JNINativeMethod[]>(numMethods);'
decl += ind(2) + 'memcpy(newMethods.get(), methods, sizeof(JNINativeMethod) * numMethods);'
decl += ind(1) + '}'
decl += ind(1) + 'auto &class_map = (*jni_method_map)[className];'
decl += ind(1) + 'for (int i = 0; i < numMethods; ++i) {'
for index, methods in enumerate(hook_map.values()):
decl += ind(2) + f'if (hook_cnt && clz_id == {index}) {{'
for m in methods:
decl += ind(3) + f'HOOK_JNI({m})'
decl += ind(2) + '}'
decl += ind(2) + 'class_map[methods[i].name][methods[i].signature] = methods[i].fnPtr;'
decl += ind(1) + '}'
decl += ind(1) + 'return newMethods;'
decl += ind(0) + '}'
return decl
with open('jni_hooks.hpp', 'w') as f:
f.write('// Generated by gen_jni_hooks.py\n')
zygote = 'com/android/internal/os/Zygote'
methods = [fas_l, fas_o, fas_p, fas_q_alt, fas_r, fas_samsung_m, fas_samsung_n, fas_samsung_o, fas_samsung_p]
f.write(gen_jni_def(zygote, methods))
methods = [spec_q, spec_q_alt, spec_r, spec_samsung_q]
f.write(gen_jni_def(zygote, methods))
methods = [server_l, server_samsung_q]
f.write(gen_jni_def(zygote, methods))
f.write(gen_jni_hook())
f.write('\n')
``` |
{
"source": "Joyoe/Magisk-nosbin_magisk-nohide",
"score": 2
} |
#### File: selinux/gui/loginsPage.py
```python
import sys
try:
from subprocess import getstatusoutput
except ImportError:
from commands import getstatusoutput
from gi.repository import GObject, Gtk
import seobject
from semanagePage import *
##
## I18N
##
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
class loginsPage(semanagePage):
def __init__(self, xml):
self.firstTime = False
semanagePage.__init__(self, xml, "logins", _("User Mapping"))
self.store = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING, GObject.TYPE_STRING)
self.view.set_model(self.store)
self.store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Login\nName"), Gtk.CellRendererText(), text=0)
col.set_sort_column_id(0)
col.set_resizable(True)
self.view.append_column(col)
col = Gtk.TreeViewColumn(_("SELinux\nUser"), Gtk.CellRendererText(), text=1)
col.set_resizable(True)
self.view.append_column(col)
col = Gtk.TreeViewColumn(_("MLS/\nMCS Range"), Gtk.CellRendererText(), text=2)
col.set_resizable(True)
self.view.append_column(col)
self.load()
self.loginsNameEntry = xml.get_object("loginsNameEntry")
self.loginsSelinuxUserCombo = xml.get_object("loginsSelinuxUserCombo")
self.loginsMLSEntry = xml.get_object("loginsMLSEntry")
def load(self, filter=""):
self.filter = filter
self.login = seobject.loginRecords()
dict = self.login.get_all(0)
self.store.clear()
for k in sorted(dict.keys()):
range = seobject.translate(dict[k][1])
if not (self.match(k, filter) or self.match(dict[k][0], filter) or self.match(range, filter)):
continue
iter = self.store.append()
self.store.set_value(iter, 0, k)
self.store.set_value(iter, 1, dict[k][0])
self.store.set_value(iter, 2, range)
self.view.get_selection().select_path((0,))
def __dialogSetup(self):
if self.firstTime:
return
self.firstTime = True
liststore = Gtk.ListStore(GObject.TYPE_STRING)
self.loginsSelinuxUserCombo.set_model(liststore)
cell = Gtk.CellRendererText()
self.loginsSelinuxUserCombo.pack_start(cell, True)
self.loginsSelinuxUserCombo.add_attribute(cell, 'text', 0)
selusers = seobject.seluserRecords().get_all(0)
for k in sorted(selusers.keys()):
if k != "system_u":
self.loginsSelinuxUserCombo.append_text(k)
iter = liststore.get_iter_first()
while liststore.get_value(iter, 0) != "user_u":
iter = liststore.iter_next(iter)
self.loginsSelinuxUserCombo.set_active_iter(iter)
def dialogInit(self):
self.__dialogSetup()
store, iter = self.view.get_selection().get_selected()
self.loginsNameEntry.set_text(store.get_value(iter, 0))
self.loginsNameEntry.set_sensitive(False)
self.loginsMLSEntry.set_text(store.get_value(iter, 2))
seuser = store.get_value(iter, 1)
liststore = self.loginsSelinuxUserCombo.get_model()
iter = liststore.get_iter_first()
while iter != None and liststore.get_value(iter, 0) != seuser:
iter = liststore.iter_next(iter)
if iter != None:
self.loginsSelinuxUserCombo.set_active_iter(iter)
def dialogClear(self):
self.__dialogSetup()
self.loginsNameEntry.set_text("")
self.loginsNameEntry.set_sensitive(True)
self.loginsMLSEntry.set_text("s0")
def delete(self):
store, iter = self.view.get_selection().get_selected()
try:
login = store.get_value(iter, 0)
if login == "root" or login == "__default__":
raise ValueError(_("Login '%s' is required") % login)
self.wait()
(rc, out) = getstatusoutput("semanage login -d %s" % login)
self.ready()
if rc != 0:
self.error(out)
return False
store.remove(iter)
self.view.get_selection().select_path((0,))
except ValueError as e:
self.error(e.args[0])
def add(self):
target = self.loginsNameEntry.get_text().strip()
serange = self.loginsMLSEntry.get_text().strip()
if serange == "":
serange = "s0"
list_model = self.loginsSelinuxUserCombo.get_model()
iter = self.loginsSelinuxUserCombo.get_active_iter()
seuser = list_model.get_value(iter, 0)
self.wait()
(rc, out) = getstatusoutput("semanage login -a -s %s -r %s %s" % (seuser, serange, target))
self.ready()
if rc != 0:
self.error(out)
return False
iter = self.store.append()
self.store.set_value(iter, 0, target)
self.store.set_value(iter, 1, seuser)
self.store.set_value(iter, 2, seobject.translate(serange))
def modify(self):
target = self.loginsNameEntry.get_text().strip()
serange = self.loginsMLSEntry.get_text().strip()
if serange == "":
serange = "s0"
list_model = self.loginsSelinuxUserCombo.get_model()
iter = self.loginsSelinuxUserCombo.get_active_iter()
seuser = list_model.get_value(iter, 0)
self.wait()
(rc, out) = getstatusoutput("semanage login -m -s %s -r %s %s" % (seuser, serange, target))
self.ready()
if rc != 0:
self.error(out)
return False
store, iter = self.view.get_selection().get_selected()
self.store.set_value(iter, 0, target)
self.store.set_value(iter, 1, seuser)
self.store.set_value(iter, 2, seobject.translate(serange))
```
#### File: selinux/gui/modulesPage.py
```python
import sys
from subprocess import Popen, PIPE
try:
from subprocess import getstatusoutput
except ImportError:
from commands import getstatusoutput
from gi.repository import GObject, Gtk
import selinux
from semanagePage import *
##
## I18N
##
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
class modulesPage(semanagePage):
def __init__(self, xml):
semanagePage.__init__(self, xml, "modules", _("Policy Module"))
self.module_filter = xml.get_object("modulesFilterEntry")
self.module_filter.connect("focus_out_event", self.filter_changed)
self.module_filter.connect("activate", self.filter_changed)
self.audit_enabled = False
self.store = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING,
GObject.TYPE_STRING)
self.view.set_model(self.store)
self.store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Module Name"), Gtk.CellRendererText(), text=0)
col.set_sort_column_id(0)
col.set_resizable(True)
self.view.append_column(col)
self.store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Priority"), Gtk.CellRendererText(), text=1)
self.enable_audit_button = xml.get_object("enableAuditButton")
self.enable_audit_button.connect("clicked", self.enable_audit)
self.new_button = xml.get_object("newModuleButton")
self.new_button.connect("clicked", self.new_module)
col.set_sort_column_id(1)
col.set_resizable(True)
self.view.append_column(col)
self.store.set_sort_column_id(2, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Kind"), Gtk.CellRendererText(), text=2)
col.set_sort_column_id(2)
col.set_resizable(True)
self.view.append_column(col)
self.store.set_sort_func(1, self.sort_int, "")
status, self.policy_type = selinux.selinux_getpolicytype()
self.load()
def sort_int(self, treemodel, iter1, iter2, user_data):
try:
p1 = int(treemodel.get_value(iter1, 1))
p2 = int(treemodel.get_value(iter1, 1))
if p1 > p2:
return 1
if p1 == p2:
return 0
return -1
except:
return 0
def load(self, filter=""):
self.filter = filter
self.store.clear()
try:
fd = Popen("semodule -lfull", shell=True, stdout=PIPE).stdout
l = fd.readlines()
fd.close()
for i in l:
priority, module, kind = i.decode('utf-8').split()
if not (self.match(module, filter) or self.match(priority, filter)):
continue
iter = self.store.append()
self.store.set_value(iter, 0, module.strip())
self.store.set_value(iter, 1, priority.strip())
self.store.set_value(iter, 2, kind.strip())
except:
pass
self.view.get_selection().select_path((0,))
def new_module(self, args):
try:
Popen(["selinux-polgengui"])
except ValueError as e:
self.error(e.args[0])
def delete(self):
store, iter = self.view.get_selection().get_selected()
module = store.get_value(iter, 0)
priority = store.get_value(iter, 1)
try:
self.wait()
status, output = getstatusoutput("semodule -X %s -r %s" % (priority, module))
self.ready()
if status != 0:
self.error(output)
else:
store.remove(iter)
self.view.get_selection().select_path((0,))
except ValueError as e:
self.error(e.args[0])
def enable_audit(self, button):
self.audit_enabled = not self.audit_enabled
try:
self.wait()
if self.audit_enabled:
status, output = getstatusoutput("semodule -DB")
button.set_label(_("Disable Audit"))
else:
status, output = getstatusoutput("semodule -B")
button.set_label(_("Enable Audit"))
self.ready()
if status != 0:
self.error(output)
except ValueError as e:
self.error(e.args[0])
def disable_audit(self, button):
try:
self.wait()
status, output = getstatusoutput("semodule -B")
self.ready()
if status != 0:
self.error(output)
except ValueError as e:
self.error(e.args[0])
def propertiesDialog(self):
# Do nothing
return
def addDialog(self):
dialog = Gtk.FileChooserDialog(_("Load Policy Module"),
None,
Gtk.FileChooserAction.OPEN,
(Gtk.STOCK_CANCEL, Gtk.ResponseType.CANCEL,
Gtk.STOCK_OPEN, Gtk.ResponseType.OK))
dialog.set_default_response(Gtk.ResponseType.OK)
filter = Gtk.FileFilter()
filter.set_name("Policy Files")
filter.add_pattern("*.pp")
dialog.add_filter(filter)
response = dialog.run()
if response == Gtk.ResponseType.OK:
self.add(dialog.get_filename())
dialog.destroy()
def add(self, file):
try:
self.wait()
status, output = getstatusoutput("semodule -i %s" % file)
self.ready()
if status != 0:
self.error(output)
else:
self.load()
except ValueError as e:
self.error(e.args[0])
```
#### File: selinux/gui/polgengui.py
```python
import signal
import string
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
import os
from gi.repository import GObject
import sys
try:
import sepolicy
except ValueError as e:
sys.stderr.write("%s: %s\n" % (e.__class__.__name__, str(e)))
sys.exit(1)
import sepolicy.generate
import sepolicy.interface
try:
from subprocess import getstatusoutput
except ImportError:
from commands import getstatusoutput
import re
def get_all_modules():
try:
all_modules = []
rc, output = getstatusoutput("semodule -l 2>/dev/null")
if rc == 0:
l = output.split("\n")
for i in l:
all_modules.append(i.split()[0])
except:
pass
return all_modules
##
## I18N
##
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
version = "1.0"
sys.path.append('/usr/share/system-config-selinux')
sys.path.append('.')
# From <NAME> http://www.daa.com.au/pipermail/pygtk/2003-February/004454.html
def foreach(model, path, iter, selected):
selected.append(model.get_value(iter, 0))
##
## Pull in the Glade file
##
xml = Gtk.Builder()
xml.set_translation_domain(PROGNAME)
if os.access("polgen.ui", os.F_OK):
xml.add_from_file("polgen.ui")
else:
xml.add_from_file("/usr/share/system-config-selinux/polgen.ui")
FILE = 1
DIR = 2
class childWindow:
START_PAGE = 0
SELECT_TYPE_PAGE = 0
APP_PAGE = 1
EXISTING_USER_PAGE = 2
TRANSITION_PAGE = 3
USER_TRANSITION_PAGE = 4
ADMIN_PAGE = 5
ROLE_PAGE = 6
IN_NET_PAGE = 7
OUT_NET_PAGE = 8
COMMON_APPS_PAGE = 9
FILES_PAGE = 10
BOOLEAN_PAGE = 11
SELECT_DIR_PAGE = 12
FINISH_PAGE = 12
def __init__(self):
self.xml = xml
self.notebook = xml.get_object("notebook")
self.label_dict = {}
self.tooltip_dict = {}
label = xml.get_object("select_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("select_user_roles_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("select_dir_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("select_domain_admin_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("select_in_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("select_out_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("select_common_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("select_manages_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("select_booleans_label")
self.label_dict[label] = label.get_text()
label = xml.get_object("existing_user_treeview")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("transition_treeview")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("in_tcp_all_checkbutton")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("in_tcp_reserved_checkbutton")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("in_tcp_unreserved_checkbutton")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("in_tcp_entry")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("in_udp_all_checkbutton")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("in_udp_reserved_checkbutton")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("in_udp_unreserved_checkbutton")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("in_udp_entry")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("out_tcp_entry")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("out_udp_entry")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("out_tcp_all_checkbutton")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("out_udp_all_checkbutton")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("boolean_treeview")
self.tooltip_dict[label] = label.get_tooltip_text()
label = xml.get_object("write_treeview")
self.tooltip_dict[label] = label.get_tooltip_text()
try:
self.all_types = sepolicy.generate.get_all_types()
self.all_modules = get_all_modules()
self.all_roles = sepolicy.generate.get_all_roles()
self.all_users = sepolicy.generate.get_all_users()
except RuntimeError as e:
self.all_types = []
self.all_modules = []
self.all_roles = []
self.all_users = []
self.error(str(e))
self.name = ""
handlers = {
"on_delete_clicked": self.delete,
"on_delete_boolean_clicked": self.delete_boolean,
"on_exec_select_clicked": self.exec_select,
"on_init_script_select_clicked": self.init_script_select,
"on_add_clicked": self.add,
"on_add_boolean_clicked": self.add_boolean,
"on_add_dir_clicked": self.add_dir,
"on_about_clicked": self.on_about_clicked
}
xml.connect_signals(handlers)
xml.get_object("cancel_button").connect("clicked", self.quit)
self.forward_button = xml.get_object("forward_button")
self.forward_button.connect("clicked", self.forward)
self.back_button = xml.get_object("back_button")
self.back_button.connect("clicked", self.back)
self.boolean_dialog = xml.get_object("boolean_dialog")
self.boolean_name_entry = xml.get_object("boolean_name_entry")
self.boolean_description_entry = xml.get_object("boolean_description_entry")
self.pages = {}
for i in sepolicy.generate.USERS:
self.pages[i] = [self.SELECT_TYPE_PAGE, self.APP_PAGE, self.TRANSITION_PAGE, self.ROLE_PAGE, self.IN_NET_PAGE, self.OUT_NET_PAGE, self.BOOLEAN_PAGE, self.SELECT_DIR_PAGE]
self.pages[sepolicy.generate.RUSER] = [self.SELECT_TYPE_PAGE, self.APP_PAGE, self.ADMIN_PAGE, self.USER_TRANSITION_PAGE, self.BOOLEAN_PAGE, self.SELECT_DIR_PAGE]
self.pages[sepolicy.generate.LUSER] = [self.SELECT_TYPE_PAGE, self.APP_PAGE, self.TRANSITION_PAGE, self.IN_NET_PAGE, self.OUT_NET_PAGE, self.BOOLEAN_PAGE, self.SELECT_DIR_PAGE]
self.pages[sepolicy.generate.SANDBOX] = [self.SELECT_TYPE_PAGE, self.APP_PAGE, self.IN_NET_PAGE, self.OUT_NET_PAGE, self.BOOLEAN_PAGE, self.SELECT_DIR_PAGE]
self.pages[sepolicy.generate.EUSER] = [self.SELECT_TYPE_PAGE, self.EXISTING_USER_PAGE, self.TRANSITION_PAGE, self.ROLE_PAGE, self.IN_NET_PAGE, self.OUT_NET_PAGE, self.BOOLEAN_PAGE, self.SELECT_DIR_PAGE]
for i in sepolicy.generate.APPLICATIONS:
self.pages[i] = [self.SELECT_TYPE_PAGE, self.APP_PAGE, self.IN_NET_PAGE, self.OUT_NET_PAGE, self.COMMON_APPS_PAGE, self.FILES_PAGE, self.BOOLEAN_PAGE, self.SELECT_DIR_PAGE]
self.pages[sepolicy.generate.USER] = [self.SELECT_TYPE_PAGE, self.APP_PAGE, self.USER_TRANSITION_PAGE, self.IN_NET_PAGE, self.OUT_NET_PAGE, self.COMMON_APPS_PAGE, self.FILES_PAGE, self.BOOLEAN_PAGE, self.SELECT_DIR_PAGE]
self.current_page = 0
self.back_button.set_sensitive(0)
self.network_buttons = {}
self.in_tcp_all_checkbutton = xml.get_object("in_tcp_all_checkbutton")
self.in_tcp_reserved_checkbutton = xml.get_object("in_tcp_reserved_checkbutton")
self.in_tcp_unreserved_checkbutton = xml.get_object("in_tcp_unreserved_checkbutton")
self.in_tcp_entry = self.xml.get_object("in_tcp_entry")
self.network_buttons[self.in_tcp_all_checkbutton] = [self.in_tcp_reserved_checkbutton, self.in_tcp_unreserved_checkbutton, self.in_tcp_entry]
self.out_tcp_all_checkbutton = xml.get_object("out_tcp_all_checkbutton")
self.out_tcp_reserved_checkbutton = xml.get_object("out_tcp_reserved_checkbutton")
self.out_tcp_unreserved_checkbutton = xml.get_object("out_tcp_unreserved_checkbutton")
self.out_tcp_entry = self.xml.get_object("out_tcp_entry")
self.network_buttons[self.out_tcp_all_checkbutton] = [self.out_tcp_entry]
self.in_udp_all_checkbutton = xml.get_object("in_udp_all_checkbutton")
self.in_udp_reserved_checkbutton = xml.get_object("in_udp_reserved_checkbutton")
self.in_udp_unreserved_checkbutton = xml.get_object("in_udp_unreserved_checkbutton")
self.in_udp_entry = self.xml.get_object("in_udp_entry")
self.network_buttons[self.in_udp_all_checkbutton] = [self.in_udp_reserved_checkbutton, self.in_udp_unreserved_checkbutton, self.in_udp_entry]
self.out_udp_all_checkbutton = xml.get_object("out_udp_all_checkbutton")
self.out_udp_entry = self.xml.get_object("out_udp_entry")
self.network_buttons[self.out_udp_all_checkbutton] = [self.out_udp_entry]
for b in self.network_buttons.keys():
b.connect("clicked", self.network_all_clicked)
self.boolean_treeview = self.xml.get_object("boolean_treeview")
self.boolean_store = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING)
self.boolean_treeview.set_model(self.boolean_store)
self.boolean_store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Name"), Gtk.CellRendererText(), text=0)
self.boolean_treeview.append_column(col)
col = Gtk.TreeViewColumn(_("Description"), Gtk.CellRendererText(), text=1)
self.boolean_treeview.append_column(col)
self.role_treeview = self.xml.get_object("role_treeview")
self.role_store = Gtk.ListStore(GObject.TYPE_STRING)
self.role_treeview.set_model(self.role_store)
self.role_treeview.get_selection().set_mode(Gtk.SelectionMode.MULTIPLE)
self.role_store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Role"), Gtk.CellRendererText(), text=0)
self.role_treeview.append_column(col)
self.existing_user_treeview = self.xml.get_object("existing_user_treeview")
self.existing_user_store = Gtk.ListStore(GObject.TYPE_STRING)
self.existing_user_treeview.set_model(self.existing_user_store)
self.existing_user_store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Existing_User"), Gtk.CellRendererText(), text=0)
self.existing_user_treeview.append_column(col)
for i in self.all_roles:
iter = self.role_store.append()
self.role_store.set_value(iter, 0, i[:-2])
self.in_tcp_reserved_checkbutton = xml.get_object("in_tcp_reserved_checkbutton")
self.transition_treeview = self.xml.get_object("transition_treeview")
self.transition_store = Gtk.ListStore(GObject.TYPE_STRING)
self.transition_treeview.set_model(self.transition_store)
self.transition_treeview.get_selection().set_mode(Gtk.SelectionMode.MULTIPLE)
self.transition_store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Application"), Gtk.CellRendererText(), text=0)
self.transition_treeview.append_column(col)
self.user_transition_treeview = self.xml.get_object("user_transition_treeview")
self.user_transition_store = Gtk.ListStore(GObject.TYPE_STRING)
self.user_transition_treeview.set_model(self.user_transition_store)
self.user_transition_treeview.get_selection().set_mode(Gtk.SelectionMode.MULTIPLE)
self.user_transition_store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Application"), Gtk.CellRendererText(), text=0)
self.user_transition_treeview.append_column(col)
for i in self.all_users:
iter = self.user_transition_store.append()
self.user_transition_store.set_value(iter, 0, i[:-2])
iter = self.existing_user_store.append()
self.existing_user_store.set_value(iter, 0, i[:-2])
self.admin_treeview = self.xml.get_object("admin_treeview")
self.admin_store = Gtk.ListStore(GObject.TYPE_STRING)
self.admin_treeview.set_model(self.admin_store)
self.admin_treeview.get_selection().set_mode(Gtk.SelectionMode.MULTIPLE)
self.admin_store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Application"), Gtk.CellRendererText(), text=0)
self.admin_treeview.append_column(col)
try:
for u in sepolicy.interface.get_user():
iter = self.transition_store.append()
self.transition_store.set_value(iter, 0, u)
for a in sepolicy.interface.get_admin():
iter = self.admin_store.append()
self.admin_store.set_value(iter, 0, a)
except ValueError as e:
self.error(e.message)
def confine_application(self):
return self.get_type() in sepolicy.generate.APPLICATIONS
def forward(self, arg):
type = self.get_type()
if self.current_page == self.START_PAGE:
self.back_button.set_sensitive(1)
if self.pages[type][self.current_page] == self.SELECT_TYPE_PAGE:
if self.on_select_type_page_next():
return
if self.pages[type][self.current_page] == self.IN_NET_PAGE:
if self.on_in_net_page_next():
return
if self.pages[type][self.current_page] == self.OUT_NET_PAGE:
if self.on_out_net_page_next():
return
if self.pages[type][self.current_page] == self.APP_PAGE:
if self.on_name_page_next():
return
if self.pages[type][self.current_page] == self.EXISTING_USER_PAGE:
if self.on_existing_user_page_next():
return
if self.pages[type][self.current_page] == self.SELECT_DIR_PAGE:
outputdir = self.output_entry.get_text()
if not os.path.isdir(outputdir):
self.error(_("%s must be a directory") % outputdir)
return False
if self.pages[type][self.current_page] == self.FINISH_PAGE:
self.generate_policy()
self.xml.get_object("cancel_button").set_label(Gtk.STOCK_CLOSE)
else:
self.current_page = self.current_page + 1
self.notebook.set_current_page(self.pages[type][self.current_page])
if self.pages[type][self.current_page] == self.FINISH_PAGE:
self.forward_button.set_label(Gtk.STOCK_APPLY)
def back(self, arg):
type = self.get_type()
if self.pages[type][self.current_page] == self.FINISH_PAGE:
self.forward_button.set_label(Gtk.STOCK_GO_FORWARD)
self.current_page = self.current_page - 1
self.notebook.set_current_page(self.pages[type][self.current_page])
if self.pages[type][self.current_page] == self.START_PAGE:
self.back_button.set_sensitive(0)
def network_all_clicked(self, button):
active = button.get_active()
for b in self.network_buttons[button]:
b.set_sensitive(not active)
def verify(self, message, title=""):
dlg = Gtk.MessageDialog(None, 0, Gtk.MessageType.INFO,
Gtk.ButtonsType.YES_NO,
message)
dlg.set_title(title)
dlg.set_position(Gtk.WindowPosition.MOUSE)
dlg.show_all()
rc = dlg.run()
dlg.destroy()
return rc
def info(self, message):
dlg = Gtk.MessageDialog(None, 0, Gtk.MessageType.INFO,
Gtk.ButtonsType.OK,
message)
dlg.set_position(Gtk.WindowPosition.MOUSE)
dlg.show_all()
dlg.run()
dlg.destroy()
def error(self, message):
dlg = Gtk.MessageDialog(None, 0, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE,
message)
dlg.set_position(Gtk.WindowPosition.MOUSE)
dlg.show_all()
dlg.run()
dlg.destroy()
def get_name(self):
if self.existing_user_radiobutton.get_active():
store, iter = self.existing_user_treeview.get_selection().get_selected()
if iter == None:
raise ValueError(_("You must select a user"))
return store.get_value(iter, 0)
else:
return self.name_entry.get_text()
def get_type(self):
if self.sandbox_radiobutton.get_active():
return sepolicy.generate.SANDBOX
if self.cgi_radiobutton.get_active():
return sepolicy.generate.CGI
if self.user_radiobutton.get_active():
return sepolicy.generate.USER
if self.init_radiobutton.get_active():
return sepolicy.generate.DAEMON
if self.dbus_radiobutton.get_active():
return sepolicy.generate.DBUS
if self.inetd_radiobutton.get_active():
return sepolicy.generate.INETD
if self.login_user_radiobutton.get_active():
return sepolicy.generate.LUSER
if self.admin_user_radiobutton.get_active():
return sepolicy.generate.AUSER
if self.xwindows_user_radiobutton.get_active():
return sepolicy.generate.XUSER
if self.terminal_user_radiobutton.get_active():
return sepolicy.generate.TUSER
if self.root_user_radiobutton.get_active():
return sepolicy.generate.RUSER
if self.existing_user_radiobutton.get_active():
return sepolicy.generate.EUSER
def generate_policy(self, *args):
outputdir = self.output_entry.get_text()
try:
my_policy = sepolicy.generate.policy(self.get_name(), self.get_type())
iter = self.boolean_store.get_iter_first()
while(iter):
my_policy.add_boolean(self.boolean_store.get_value(iter, 0), self.boolean_store.get_value(iter, 1))
iter = self.boolean_store.iter_next(iter)
if self.get_type() in sepolicy.generate.APPLICATIONS:
my_policy.set_program(self.exec_entry.get_text())
my_policy.gen_symbols()
my_policy.set_use_syslog(self.syslog_checkbutton.get_active() == 1)
my_policy.set_use_tmp(self.tmp_checkbutton.get_active() == 1)
my_policy.set_use_uid(self.uid_checkbutton.get_active() == 1)
my_policy.set_use_pam(self.pam_checkbutton.get_active() == 1)
my_policy.set_use_dbus(self.dbus_checkbutton.get_active() == 1)
my_policy.set_use_audit(self.audit_checkbutton.get_active() == 1)
my_policy.set_use_terminal(self.terminal_checkbutton.get_active() == 1)
my_policy.set_use_mail(self.mail_checkbutton.get_active() == 1)
if self.get_type() is sepolicy.generate.DAEMON:
my_policy.set_init_script(self.init_script_entry.get_text())
if self.get_type() == sepolicy.generate.USER:
selected = []
self.user_transition_treeview.get_selection().selected_foreach(foreach, selected)
my_policy.set_transition_users(selected)
else:
if self.get_type() == sepolicy.generate.RUSER:
selected = []
self.admin_treeview.get_selection().selected_foreach(foreach, selected)
my_policy.set_admin_domains(selected)
selected = []
self.user_transition_treeview.get_selection().selected_foreach(foreach, selected)
my_policy.set_transition_users(selected)
else:
selected = []
self.transition_treeview.get_selection().selected_foreach(foreach, selected)
my_policy.set_transition_domains(selected)
selected = []
self.role_treeview.get_selection().selected_foreach(foreach, selected)
my_policy.set_admin_roles(selected)
my_policy.set_in_tcp(self.in_tcp_all_checkbutton.get_active(), self.in_tcp_reserved_checkbutton.get_active(), self.in_tcp_unreserved_checkbutton.get_active(), self.in_tcp_entry.get_text())
my_policy.set_in_udp(self.in_udp_all_checkbutton.get_active(), self.in_udp_reserved_checkbutton.get_active(), self.in_udp_unreserved_checkbutton.get_active(), self.in_udp_entry.get_text())
my_policy.set_out_tcp(self.out_tcp_all_checkbutton.get_active(), self.out_tcp_entry.get_text())
my_policy.set_out_udp(self.out_udp_all_checkbutton.get_active(), self.out_udp_entry.get_text())
iter = self.store.get_iter_first()
while(iter):
if self.store.get_value(iter, 1) == FILE:
my_policy.add_file(self.store.get_value(iter, 0))
else:
my_policy.add_dir(self.store.get_value(iter, 0))
iter = self.store.iter_next(iter)
self.info(my_policy.generate(outputdir))
return False
except ValueError as e:
self.error(e.message)
def delete(self, args):
store, iter = self.view.get_selection().get_selected()
if iter != None:
store.remove(iter)
self.view.get_selection().select_path((0,))
def delete_boolean(self, args):
store, iter = self.boolean_treeview.get_selection().get_selected()
if iter != None:
store.remove(iter)
self.boolean_treeview.get_selection().select_path((0,))
def add_boolean(self, type):
self.boolean_name_entry.set_text("")
self.boolean_description_entry.set_text("")
rc = self.boolean_dialog.run()
self.boolean_dialog.hide()
if rc == Gtk.ResponseType.CANCEL:
return
iter = self.boolean_store.append()
self.boolean_store.set_value(iter, 0, self.boolean_name_entry.get_text())
self.boolean_store.set_value(iter, 1, self.boolean_description_entry.get_text())
def __add(self, type):
rc = self.file_dialog.run()
self.file_dialog.hide()
if rc == Gtk.ResponseType.CANCEL:
return
for i in self.file_dialog.get_filenames():
iter = self.store.append()
self.store.set_value(iter, 0, i)
self.store.set_value(iter, 1, type)
def exec_select(self, args):
self.file_dialog.set_select_multiple(0)
self.file_dialog.set_title(_("Select executable file to be confined."))
self.file_dialog.set_action(Gtk.FileChooserAction.OPEN)
self.file_dialog.set_current_folder("/usr/sbin")
rc = self.file_dialog.run()
self.file_dialog.hide()
if rc == Gtk.ResponseType.CANCEL:
return
self.exec_entry.set_text(self.file_dialog.get_filename())
def init_script_select(self, args):
self.file_dialog.set_select_multiple(0)
self.file_dialog.set_title(_("Select init script file to be confined."))
self.file_dialog.set_action(Gtk.FileChooserAction.OPEN)
self.file_dialog.set_current_folder("/etc/rc.d/init.d")
rc = self.file_dialog.run()
self.file_dialog.hide()
if rc == Gtk.ResponseType.CANCEL:
return
self.init_script_entry.set_text(self.file_dialog.get_filename())
def add(self, args):
self.file_dialog.set_title(_("Select file(s) that confined application creates or writes"))
self.file_dialog.set_current_folder("/")
self.file_dialog.set_action(Gtk.FileChooserAction.OPEN)
self.file_dialog.set_select_multiple(1)
self.__add(FILE)
def add_dir(self, args):
self.file_dialog.set_title(_("Select directory(s) that the confined application owns and writes into"))
self.file_dialog.set_current_folder("/")
self.file_dialog.set_select_multiple(1)
self.file_dialog.set_action(Gtk.FileChooserAction.SELECT_FOLDER)
self.__add(DIR)
def on_about_clicked(self, args):
dlg = xml.get_object("about_dialog")
dlg.run()
dlg.hide()
def quit(self, args):
Gtk.main_quit()
def setupScreen(self):
# Bring in widgets from glade file.
self.mainWindow = self.xml.get_object("main_window")
self.druid = self.xml.get_object("druid")
self.type = 0
self.name_entry = self.xml.get_object("name_entry")
self.name_entry.connect("insert_text", self.on_name_entry_changed)
self.name_entry.connect("focus_out_event", self.on_focus_out_event)
self.exec_entry = self.xml.get_object("exec_entry")
self.exec_button = self.xml.get_object("exec_button")
self.init_script_entry = self.xml.get_object("init_script_entry")
self.init_script_button = self.xml.get_object("init_script_button")
self.output_entry = self.xml.get_object("output_entry")
self.output_entry.set_text(os.getcwd())
self.xml.get_object("output_button").connect("clicked", self.output_button_clicked)
self.xwindows_user_radiobutton = self.xml.get_object("xwindows_user_radiobutton")
self.terminal_user_radiobutton = self.xml.get_object("terminal_user_radiobutton")
self.root_user_radiobutton = self.xml.get_object("root_user_radiobutton")
self.login_user_radiobutton = self.xml.get_object("login_user_radiobutton")
self.admin_user_radiobutton = self.xml.get_object("admin_user_radiobutton")
self.existing_user_radiobutton = self.xml.get_object("existing_user_radiobutton")
self.user_radiobutton = self.xml.get_object("user_radiobutton")
self.init_radiobutton = self.xml.get_object("init_radiobutton")
self.inetd_radiobutton = self.xml.get_object("inetd_radiobutton")
self.dbus_radiobutton = self.xml.get_object("dbus_radiobutton")
self.cgi_radiobutton = self.xml.get_object("cgi_radiobutton")
self.sandbox_radiobutton = self.xml.get_object("sandbox_radiobutton")
self.tmp_checkbutton = self.xml.get_object("tmp_checkbutton")
self.uid_checkbutton = self.xml.get_object("uid_checkbutton")
self.pam_checkbutton = self.xml.get_object("pam_checkbutton")
self.dbus_checkbutton = self.xml.get_object("dbus_checkbutton")
self.audit_checkbutton = self.xml.get_object("audit_checkbutton")
self.terminal_checkbutton = self.xml.get_object("terminal_checkbutton")
self.mail_checkbutton = self.xml.get_object("mail_checkbutton")
self.syslog_checkbutton = self.xml.get_object("syslog_checkbutton")
self.view = self.xml.get_object("write_treeview")
self.file_dialog = self.xml.get_object("filechooserdialog")
self.store = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_INT)
self.view.set_model(self.store)
col = Gtk.TreeViewColumn("", Gtk.CellRendererText(), text=0)
col.set_resizable(True)
self.view.append_column(col)
self.view.get_selection().select_path((0,))
def output_button_clicked(self, *args):
self.file_dialog.set_title(_("Select directory to generate policy files in"))
self.file_dialog.set_action(Gtk.FileChooserAction.SELECT_FOLDER)
self.file_dialog.set_select_multiple(0)
rc = self.file_dialog.run()
self.file_dialog.hide()
if rc == Gtk.ResponseType.CANCEL:
return
self.output_entry.set_text(self.file_dialog.get_filename())
def on_name_entry_changed(self, entry, text, size, position):
if text.find(" ") >= 0:
entry.stop_emission_by_name("insert-text")
def on_focus_out_event(self, entry, third):
name = entry.get_text()
if self.name != name:
if name in self.all_types:
if self.verify(_("Type %s_t already defined in current policy.\nDo you want to continue?") % name, _("Verify Name")) == Gtk.ResponseType.NO:
entry.set_text("")
return False
if name in self.all_modules:
if self.verify(_("Module %s already loaded in current policy.\nDo you want to continue?") % name, _("Verify Name")) == Gtk.ResponseType.NO:
entry.set_text("")
return False
file = "/etc/rc.d/init.d/" + name
if os.path.isfile(file) and self.init_script_entry.get_text() == "":
self.init_script_entry.set_text(file)
file = "/usr/sbin/" + name
if os.path.isfile(file) and self.exec_entry.get_text() == "":
self.exec_entry.set_text(file)
self.name = name
return False
def on_in_net_page_next(self, *args):
try:
sepolicy.generate.verify_ports(self.in_tcp_entry.get_text())
sepolicy.generate.verify_ports(self.in_udp_entry.get_text())
except ValueError as e:
self.error(e.message)
return True
def on_out_net_page_next(self, *args):
try:
sepolicy.generate.verify_ports(self.out_tcp_entry.get_text())
sepolicy.generate.verify_ports(self.out_udp_entry.get_text())
except ValueError as e:
self.error(e.message)
return True
def on_select_type_page_next(self, *args):
self.exec_entry.set_sensitive(self.confine_application())
self.exec_button.set_sensitive(self.confine_application())
self.init_script_entry.set_sensitive(self.init_radiobutton.get_active())
self.init_script_button.set_sensitive(self.init_radiobutton.get_active())
def on_existing_user_page_next(self, *args):
store, iter = self.view.get_selection().get_selected()
if iter != None:
self.error(_("You must select a user"))
return True
def on_name_page_next(self, *args):
name = self.name_entry.get_text()
if not name.isalnum():
self.error(_("You must add a name made up of letters and numbers and containing no spaces."))
return True
for i in self.label_dict:
text = '<b>%s</b>' % (self.label_dict[i] % ("'" + name + "'"))
i.set_markup(text)
for i in self.tooltip_dict:
text = self.tooltip_dict[i] % ("'" + name + "'")
i.set_tooltip_text(text)
if self.confine_application():
exe = self.exec_entry.get_text()
if exe == "":
self.error(_("You must enter a executable"))
return True
policy = sepolicy.generate.policy(name, self.get_type())
policy.set_program(exe)
policy.gen_writeable()
policy.gen_symbols()
for f in policy.files.keys():
iter = self.store.append()
self.store.set_value(iter, 0, f)
self.store.set_value(iter, 1, FILE)
for f in policy.dirs.keys():
iter = self.store.append()
self.store.set_value(iter, 0, f)
self.store.set_value(iter, 1, DIR)
self.tmp_checkbutton.set_active(policy.use_tmp)
self.uid_checkbutton.set_active(policy.use_uid)
self.pam_checkbutton.set_active(policy.use_pam)
self.dbus_checkbutton.set_active(policy.use_dbus)
self.audit_checkbutton.set_active(policy.use_audit)
self.terminal_checkbutton.set_active(policy.use_terminal)
self.mail_checkbutton.set_active(policy.use_mail)
self.syslog_checkbutton.set_active(policy.use_syslog)
def stand_alone(self):
desktopName = _("Configure SELinux")
self.setupScreen()
self.mainWindow.connect("destroy", self.quit)
self.mainWindow.show_all()
Gtk.main()
if __name__ == "__main__":
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = childWindow()
app.stand_alone()
```
#### File: selinux/gui/portsPage.py
```python
import sys
from gi.repository import GObject, Gtk
import seobject
TYPE_COL = 0
PROTOCOL_COL = 1
MLS_COL = 2
PORT_COL = 3
try:
from subprocess import getstatusoutput
except ImportError:
from commands import getstatusoutput
from semanagePage import *
##
## I18N
##
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
class portsPage(semanagePage):
def __init__(self, xml):
semanagePage.__init__(self, xml, "ports", _("Network Port"))
group_listview = xml.get_object("listViewButton")
group_listview.connect("clicked", self.on_group_clicked)
self.group = False
self.ports_filter = xml.get_object("portsFilterEntry")
self.ports_filter.connect("focus_out_event", self.filter_changed)
self.ports_filter.connect("activate", self.filter_changed)
self.ports_name_entry = xml.get_object("portsNameEntry")
self.ports_protocol_combo = xml.get_object("portsProtocolCombo")
self.ports_number_entry = xml.get_object("portsNumberEntry")
self.ports_mls_entry = xml.get_object("portsMLSEntry")
self.ports_add_button = xml.get_object("portsAddButton")
self.ports_properties_button = xml.get_object("portsPropertiesButton")
self.ports_delete_button = xml.get_object("portsDeleteButton")
liststore = self.ports_protocol_combo.get_model()
iter = liststore.get_iter_first()
self.ports_protocol_combo.set_active_iter(iter)
self.init_store()
self.edit = True
self.load()
def filter_changed(self, *arg):
filter = arg[0].get_text()
if filter != self.filter:
if self.edit:
self.load(filter)
else:
self.group_load(filter)
def init_store(self):
self.store = Gtk.ListStore(GObject.TYPE_STRING, GObject.TYPE_STRING, GObject.TYPE_STRING, GObject.TYPE_STRING)
self.view.set_model(self.store)
self.store.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.view.set_search_equal_func(self.search)
col = Gtk.TreeViewColumn(_("SELinux Port\nType"), Gtk.CellRendererText(), text=TYPE_COL)
col.set_sort_column_id(TYPE_COL)
col.set_resizable(True)
self.view.append_column(col)
self.store.set_sort_column_id(TYPE_COL, Gtk.SortType.ASCENDING)
col = Gtk.TreeViewColumn(_("Protocol"), Gtk.CellRendererText(), text=PROTOCOL_COL)
col.set_sort_column_id(PROTOCOL_COL)
col.set_resizable(True)
self.view.append_column(col)
self.mls_col = Gtk.TreeViewColumn(_("MLS/MCS\nLevel"), Gtk.CellRendererText(), text=MLS_COL)
self.mls_col.set_resizable(True)
self.mls_col.set_sort_column_id(MLS_COL)
self.view.append_column(self.mls_col)
col = Gtk.TreeViewColumn(_("Port"), Gtk.CellRendererText(), text=PORT_COL)
col.set_sort_column_id(PORT_COL)
col.set_resizable(True)
self.view.append_column(col)
self.store.set_sort_func(PORT_COL, self.sort_int, "")
def sort_int(self, treemodel, iter1, iter2, user_data):
try:
p1 = int(treemodel.get_value(iter1, PORT_COL).split('-')[0])
p2 = int(treemodel.get_value(iter2, PORT_COL).split('-')[0])
if p1 > p2:
return 1
if p1 == p2:
return 0
return -1
except:
return 0
def load(self, filter=""):
self.filter = filter
self.port = seobject.portRecords()
dict = self.port.get_all(self.local)
self.store.clear()
for k in sorted(dict.keys()):
if not (self.match(str(k[0]), filter) or self.match(dict[k][0], filter) or self.match(k[2], filter) or self.match(dict[k][1], filter) or self.match(dict[k][1], filter)):
continue
iter = self.store.append()
if k[0] == k[1]:
self.store.set_value(iter, PORT_COL, str(k[0]))
else:
rec = "%s-%s" % k[:2]
self.store.set_value(iter, PORT_COL, rec)
self.store.set_value(iter, TYPE_COL, dict[k][0])
self.store.set_value(iter, PROTOCOL_COL, k[2])
self.store.set_value(iter, MLS_COL, dict[k][1])
self.view.get_selection().select_path((0,))
def group_load(self, filter=""):
self.filter = filter
self.port = seobject.portRecords()
dict = self.port.get_all_by_type(self.local)
self.store.clear()
for k in sorted(dict.keys()):
ports_string = ", ".join(dict[k])
if not (self.match(ports_string, filter) or self.match(k[0], filter) or self.match(k[1], filter)):
continue
iter = self.store.append()
self.store.set_value(iter, TYPE_COL, k[0])
self.store.set_value(iter, PROTOCOL_COL, k[1])
self.store.set_value(iter, PORT_COL, ports_string)
self.store.set_value(iter, MLS_COL, "")
self.view.get_selection().select_path((0,))
def propertiesDialog(self):
if self.edit:
semanagePage.propertiesDialog(self)
def dialogInit(self):
store, iter = self.view.get_selection().get_selected()
self.ports_number_entry.set_text(store.get_value(iter, PORT_COL))
self.ports_number_entry.set_sensitive(False)
self.ports_protocol_combo.set_sensitive(False)
self.ports_name_entry.set_text(store.get_value(iter, TYPE_COL))
self.ports_mls_entry.set_text(store.get_value(iter, MLS_COL))
protocol = store.get_value(iter, PROTOCOL_COL)
liststore = self.ports_protocol_combo.get_model()
iter = liststore.get_iter_first()
while iter != None and liststore.get_value(iter, 0) != protocol:
iter = liststore.iter_next(iter)
if iter != None:
self.ports_protocol_combo.set_active_iter(iter)
def dialogClear(self):
self.ports_number_entry.set_text("")
self.ports_number_entry.set_sensitive(True)
self.ports_protocol_combo.set_sensitive(True)
self.ports_name_entry.set_text("")
self.ports_mls_entry.set_text("s0")
def delete(self):
store, iter = self.view.get_selection().get_selected()
port = store.get_value(iter, PORT_COL)
protocol = store.get_value(iter, 1)
try:
self.wait()
(rc, out) = getstatusoutput("semanage port -d -p %s %s" % (protocol, port))
self.ready()
if rc != 0:
return self.error(out)
store.remove(iter)
self.view.get_selection().select_path((0,))
except ValueError as e:
self.error(e.args[0])
def add(self):
target = self.ports_name_entry.get_text().strip()
mls = self.ports_mls_entry.get_text().strip()
port_number = self.ports_number_entry.get_text().strip()
if port_number == "":
port_number = "1"
for i in port_number.split("-"):
if not i.isdigit():
self.error(_("Port number \"%s\" is not valid. 0 < PORT_NUMBER < 65536 ") % port_number)
return False
list_model = self.ports_protocol_combo.get_model()
iter = self.ports_protocol_combo.get_active_iter()
protocol = list_model.get_value(iter, 0)
self.wait()
(rc, out) = getstatusoutput("semanage port -a -p %s -r %s -t %s %s" % (protocol, mls, target, port_number))
self.ready()
if rc != 0:
self.error(out)
return False
iter = self.store.append()
self.store.set_value(iter, TYPE_COL, target)
self.store.set_value(iter, PORT_COL, port_number)
self.store.set_value(iter, PROTOCOL_COL, protocol)
self.store.set_value(iter, MLS_COL, mls)
def modify(self):
target = self.ports_name_entry.get_text().strip()
mls = self.ports_mls_entry.get_text().strip()
port_number = self.ports_number_entry.get_text().strip()
list_model = self.ports_protocol_combo.get_model()
iter = self.ports_protocol_combo.get_active_iter()
protocol = list_model.get_value(iter, 0)
self.wait()
(rc, out) = getstatusoutput("semanage port -m -p %s -r %s -t %s %s" % (protocol, mls, target, port_number))
self.ready()
if rc != 0:
self.error(out)
return False
store, iter = self.view.get_selection().get_selected()
self.store.set_value(iter, TYPE_COL, target)
self.store.set_value(iter, PORT_COL, port_number)
self.store.set_value(iter, PROTOCOL_COL, protocol)
self.store.set_value(iter, MLS_COL, mls)
def on_group_clicked(self, button):
self.ports_add_button.set_sensitive(self.group)
self.ports_properties_button.set_sensitive(self.group)
self.ports_delete_button.set_sensitive(self.group)
self.mls_col.set_visible(self.group)
self.group = not self.group
if self.group:
button.set_label(_("List View"))
self.group_load(self.filter)
else:
button.set_label(_("Group View"))
self.load(self.filter)
return True
```
#### File: src/sepolgen/defaults.py
```python
import os
import re
# Select the correct location for the development files based on a
# path variable (optionally read from a configuration file)
class PathChooser(object):
def __init__(self, pathname):
self.config = dict()
if not os.path.exists(pathname):
self.config_pathname = "(defaults)"
self.config["SELINUX_DEVEL_PATH"] = "/usr/share/selinux/default:/usr/share/selinux/mls:/usr/share/selinux/devel"
return
self.config_pathname = pathname
ignore = re.compile(r"^\s*(?:#.+)?$")
consider = re.compile(r"^\s*(\w+)\s*=\s*(.+?)\s*$")
with open(pathname, "r") as fd:
for lineno, line in enumerate(fd):
if ignore.match(line): continue
mo = consider.match(line)
if not mo:
raise ValueError("%s:%d: line is not in key = value format" % (pathname, lineno+1))
self.config[mo.group(1)] = mo.group(2)
# We're only exporting one useful function, so why not be a function
def __call__(self, testfilename, pathset="SELINUX_DEVEL_PATH"):
paths = self.config.get(pathset, None)
if paths is None:
raise ValueError("%s was not in %s" % (pathset, self.config_pathname))
paths = paths.split(":")
for p in paths:
target = os.path.join(p, testfilename)
if os.path.exists(target): return target
return os.path.join(paths[0], testfilename)
"""
Various default settings, including file and directory locations.
"""
def data_dir():
return "/var/lib/sepolgen"
def perm_map():
return data_dir() + "/perm_map"
def interface_info():
return data_dir() + "/interface_info"
def attribute_info():
return data_dir() + "/attribute_info"
def refpolicy_makefile():
chooser = PathChooser("/etc/selinux/sepolgen.conf")
result = chooser("Makefile")
if not os.path.exists(result):
result = chooser("include/Makefile")
return result
def headers():
chooser = PathChooser("/etc/selinux/sepolgen.conf")
return chooser("include")
```
#### File: src/sepolgen/policygen.py
```python
import itertools
import textwrap
import selinux.audit2why as audit2why
try:
from setools import *
except:
pass
from . import refpolicy
from . import objectmodel
from . import access
from . import interfaces
from . import matching
from . import util
# Constants for the level of explanation from the generation
# routines
NO_EXPLANATION = 0
SHORT_EXPLANATION = 1
LONG_EXPLANATION = 2
class PolicyGenerator:
"""Generate a reference policy module from access vectors.
PolicyGenerator generates a new reference policy module
or updates an existing module based on requested access
in the form of access vectors.
It generates allow rules and optionally module require
statements, reference policy interfaces, and extended
permission access vector rules. By default only allow rules
are generated. The methods .set_gen_refpol, .set_gen_requires
and .set_gen_xperms turns on interface generation,
requires generation, and xperms rules generation respectively.
PolicyGenerator can also optionally add comments explaining
why a particular access was allowed based on the audit
messages that generated the access. The access vectors
passed in must have the .audit_msgs field set correctly
and .explain set to SHORT|LONG_EXPLANATION to enable this
feature.
The module created by PolicyGenerator can be passed to
output.ModuleWriter to output a text representation.
"""
def __init__(self, module=None):
"""Initialize a PolicyGenerator with an optional
existing module.
If the module parameter is not None then access
will be added to the passed in module. Otherwise
a new reference policy module will be created.
"""
self.ifgen = None
self.explain = NO_EXPLANATION
self.gen_requires = False
if module:
self.module = module
else:
self.module = refpolicy.Module()
self.dontaudit = False
self.xperms = False
self.domains = None
def set_gen_refpol(self, if_set=None, perm_maps=None):
"""Set whether reference policy interfaces are generated.
To turn on interface generation pass in an interface set
to use for interface generation. To turn off interface
generation pass in None.
If interface generation is enabled requires generation
will also be enabled.
"""
if if_set:
self.ifgen = InterfaceGenerator(if_set, perm_maps)
self.gen_requires = True
else:
self.ifgen = None
self.__set_module_style()
def set_gen_requires(self, status=True):
"""Set whether module requires are generated.
Passing in true will turn on requires generation and
False will disable generation. If requires generation is
disabled interface generation will also be disabled and
can only be re-enabled via .set_gen_refpol.
"""
self.gen_requires = status
def set_gen_explain(self, explain=SHORT_EXPLANATION):
"""Set whether access is explained.
"""
self.explain = explain
def set_gen_dontaudit(self, dontaudit):
self.dontaudit = dontaudit
def set_gen_xperms(self, xperms):
"""Set whether extended permission access vector rules
are generated.
"""
self.xperms = xperms
def __set_module_style(self):
if self.ifgen:
refpolicy = True
else:
refpolicy = False
for mod in self.module.module_declarations():
mod.refpolicy = refpolicy
def set_module_name(self, name, version="1.0"):
"""Set the name of the module and optionally the version.
"""
# find an existing module declaration
m = None
for mod in self.module.module_declarations():
m = mod
if not m:
m = refpolicy.ModuleDeclaration()
self.module.children.insert(0, m)
m.name = name
m.version = version
if self.ifgen:
m.refpolicy = True
else:
m.refpolicy = False
def get_module(self):
# Generate the requires
if self.gen_requires:
gen_requires(self.module)
"""Return the generated module"""
return self.module
def __add_av_rule(self, av):
"""Add access vector rule.
"""
rule = refpolicy.AVRule(av)
if self.dontaudit:
rule.rule_type = rule.DONTAUDIT
rule.comment = ""
if self.explain:
rule.comment = str(refpolicy.Comment(explain_access(av, verbosity=self.explain)))
if av.type == audit2why.ALLOW:
rule.comment += "\n#!!!! This avc is allowed in the current policy"
if av.xperms:
rule.comment += "\n#!!!! This av rule may have been overridden by an extended permission av rule"
if av.type == audit2why.DONTAUDIT:
rule.comment += "\n#!!!! This avc has a dontaudit rule in the current policy"
if av.type == audit2why.BOOLEAN:
if len(av.data) > 1:
rule.comment += "\n#!!!! This avc can be allowed using one of the these booleans:\n# %s" % ", ".join([x[0] for x in av.data])
else:
rule.comment += "\n#!!!! This avc can be allowed using the boolean '%s'" % av.data[0][0]
if av.type == audit2why.CONSTRAINT:
rule.comment += "\n#!!!! This avc is a constraint violation. You would need to modify the attributes of either the source or target types to allow this access."
rule.comment += "\n#Constraint rule: "
rule.comment += "\n#\t" + av.data[0]
for reason in av.data[1:]:
rule.comment += "\n#\tPossible cause is the source %s and target %s are different." % reason
try:
if ( av.type == audit2why.TERULE and
"write" in av.perms and
( "dir" in av.obj_class or "open" in av.perms )):
if not self.domains:
self.domains = seinfo(ATTRIBUTE, name="domain")[0]["types"]
types=[]
for i in [x[TCONTEXT] for x in sesearch([ALLOW], {SCONTEXT: av.src_type, CLASS: av.obj_class, PERMS: av.perms})]:
if i not in self.domains:
types.append(i)
if len(types) == 1:
rule.comment += "\n#!!!! The source type '%s' can write to a '%s' of the following type:\n# %s\n" % ( av.src_type, av.obj_class, ", ".join(types))
elif len(types) >= 1:
rule.comment += "\n#!!!! The source type '%s' can write to a '%s' of the following types:\n# %s\n" % ( av.src_type, av.obj_class, ", ".join(types))
except:
pass
self.module.children.append(rule)
def __add_ext_av_rules(self, av):
"""Add extended permission access vector rules.
"""
for op in av.xperms.keys():
extrule = refpolicy.AVExtRule(av, op)
if self.dontaudit:
extrule.rule_type = extrule.DONTAUDITXPERM
self.module.children.append(extrule)
def add_access(self, av_set):
"""Add the access from the access vector set to this
module.
"""
# Use the interface generator to split the access
# into raw allow rules and interfaces. After this
# a will contain a list of access that should be
# used as raw allow rules and the interfaces will
# be added to the module.
if self.ifgen:
raw_allow, ifcalls = self.ifgen.gen(av_set, self.explain)
self.module.children.extend(ifcalls)
else:
raw_allow = av_set
# Generate the raw allow rules from the filtered list
for av in raw_allow:
self.__add_av_rule(av)
if self.xperms and av.xperms:
self.__add_ext_av_rules(av)
def add_role_types(self, role_type_set):
for role_type in role_type_set:
self.module.children.append(role_type)
def explain_access(av, ml=None, verbosity=SHORT_EXPLANATION):
"""Explain why a policy statement was generated.
Return a string containing a text explanation of
why a policy statement was generated. The string is
commented and wrapped and can be directly inserted
into a policy.
Params:
av - access vector representing the access. Should
have .audit_msgs set appropriately.
verbosity - the amount of explanation provided. Should
be set to NO_EXPLANATION, SHORT_EXPLANATION, or
LONG_EXPLANATION.
Returns:
list of strings - strings explaining the access or an empty
string if verbosity=NO_EXPLANATION or there is not sufficient
information to provide an explanation.
"""
s = []
def explain_interfaces():
if not ml:
return
s.append(" Interface options:")
for match in ml.all():
ifcall = call_interface(match.interface, ml.av)
s.append(' %s # [%d]' % (ifcall.to_string(), match.dist))
# Format the raw audit data to explain why the
# access was requested - either long or short.
if verbosity == LONG_EXPLANATION:
for msg in av.audit_msgs:
s.append(' %s' % msg.header)
s.append(' scontext="%s" tcontext="%s"' %
(str(msg.scontext), str(msg.tcontext)))
s.append(' class="%s" perms="%s"' %
(msg.tclass, refpolicy.list_to_space_str(msg.accesses)))
s.append(' comm="%s" exe="%s" path="%s"' % (msg.comm, msg.exe, msg.path))
s.extend(textwrap.wrap('message="' + msg.message + '"', 80, initial_indent=" ",
subsequent_indent=" "))
explain_interfaces()
elif verbosity:
s.append(' src="%s" tgt="%s" class="%s", perms="%s"' %
(av.src_type, av.tgt_type, av.obj_class, av.perms.to_space_str()))
# For the short display we are only going to use the additional information
# from the first audit message. For the vast majority of cases this info
# will always be the same anyway.
if len(av.audit_msgs) > 0:
msg = av.audit_msgs[0]
s.append(' comm="%s" exe="%s" path="%s"' % (msg.comm, msg.exe, msg.path))
explain_interfaces()
return s
def call_interface(interface, av):
params = []
args = []
params.extend(interface.params.values())
params.sort(key=lambda param: param.num, reverse=True)
ifcall = refpolicy.InterfaceCall()
ifcall.ifname = interface.name
for i in range(len(params)):
if params[i].type == refpolicy.SRC_TYPE:
ifcall.args.append(av.src_type)
elif params[i].type == refpolicy.TGT_TYPE:
ifcall.args.append(av.tgt_type)
elif params[i].type == refpolicy.OBJ_CLASS:
ifcall.args.append(av.obj_class)
else:
print(params[i].type)
assert(0)
assert(len(ifcall.args) > 0)
return ifcall
class InterfaceGenerator:
def __init__(self, ifs, perm_maps=None):
self.ifs = ifs
self.hack_check_ifs(ifs)
self.matcher = matching.AccessMatcher(perm_maps)
self.calls = []
def hack_check_ifs(self, ifs):
# FIXME: Disable interfaces we can't call - this is a hack.
# Because we don't handle roles, multiple parameters, etc.,
# etc., we must make certain we can actually use a returned
# interface.
for x in ifs.interfaces.values():
params = []
params.extend(x.params.values())
params.sort(key=lambda param: param.num, reverse=True)
for i in range(len(params)):
# Check that the parameter position matches
# the number (e.g., $1 is the first arg). This
# will fail if the parser missed something.
if (i + 1) != params[i].num:
x.enabled = False
break
# Check that we can handle the param type (currently excludes
# roles.
if params[i].type not in [refpolicy.SRC_TYPE, refpolicy.TGT_TYPE,
refpolicy.OBJ_CLASS]:
x.enabled = False
break
def gen(self, avs, verbosity):
raw_av = self.match(avs)
ifcalls = []
for ml in self.calls:
ifcall = call_interface(ml.best().interface, ml.av)
if verbosity:
ifcall.comment = refpolicy.Comment(explain_access(ml.av, ml, verbosity))
ifcalls.append((ifcall, ml))
d = []
for ifcall, ifs in ifcalls:
found = False
for o_ifcall in d:
if o_ifcall.matches(ifcall):
if o_ifcall.comment and ifcall.comment:
o_ifcall.comment.merge(ifcall.comment)
found = True
if not found:
d.append(ifcall)
return (raw_av, d)
def match(self, avs):
raw_av = []
for av in avs:
ans = matching.MatchList()
self.matcher.search_ifs(self.ifs, av, ans)
if len(ans):
self.calls.append(ans)
else:
raw_av.append(av)
return raw_av
def gen_requires(module):
"""Add require statements to the module.
"""
def collect_requires(node):
r = refpolicy.Require()
for avrule in node.avrules():
r.types.update(avrule.src_types)
r.types.update(avrule.tgt_types)
for obj in avrule.obj_classes:
r.add_obj_class(obj, avrule.perms)
for ifcall in node.interface_calls():
for arg in ifcall.args:
# FIXME - handle non-type arguments when we
# can actually figure those out.
r.types.add(arg)
for role_type in node.role_types():
r.roles.add(role_type.role)
r.types.update(role_type.types)
r.types.discard("self")
node.children.insert(0, r)
# FUTURE - this is untested on modules with any sort of
# nesting
for node in module.nodes():
collect_requires(node)
```
#### File: sepolicy/sepolicy/gui.py
```python
import gi
gi.require_version('Gtk', '3.0')
from gi.repository import Gtk
from gi.repository import Gdk
from gi.repository import GLib
from sepolicy.sedbus import SELinuxDBus
import sys
import sepolicy
import selinux
from selinux import DISABLED, PERMISSIVE, ENFORCING
import sepolicy.network
import sepolicy.manpage
import dbus
import os
import re
import unicodedata
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
reverse_file_type_str = {}
for f in sepolicy.file_type_str:
reverse_file_type_str[sepolicy.file_type_str[f]] = f
enabled = [_("No"), _("Yes")]
action = [_("Disable"), _("Enable")]
def cmp(a, b):
if a is None and b is None:
return 0
if a is None:
return -1
if b is None:
return 1
return (a > b) - (a < b)
import distutils.sysconfig
ADVANCED_LABEL = (_("Advanced >>"), _("Advanced <<"))
ADVANCED_SEARCH_LABEL = (_("Advanced Search >>"), _("Advanced Search <<"))
OUTBOUND_PAGE = 0
INBOUND_PAGE = 1
TRANSITIONS_FROM_PAGE = 0
TRANSITIONS_TO_PAGE = 1
TRANSITIONS_FILE_PAGE = 2
EXE_PAGE = 0
WRITABLE_PAGE = 1
APP_PAGE = 2
BOOLEANS_PAGE = 0
FILES_PAGE = 1
NETWORK_PAGE = 2
TRANSITIONS_PAGE = 3
LOGIN_PAGE = 4
USER_PAGE = 5
LOCKDOWN_PAGE = 6
SYSTEM_PAGE = 7
FILE_EQUIV_PAGE = 8
START_PAGE = 9
keys = ["boolean", "fcontext", "fcontext-equiv", "port", "login", "user", "module", "node", "interface"]
DISABLED_TEXT = _("""<small>
To change from Disabled to Enforcing mode
- Change the system mode from Disabled to Permissive
- Reboot, so that the system can relabel
- Once the system is working as planned
* Change the system mode to Enforcing</small>
""")
class SELinuxGui():
def __init__(self, app=None, test=False):
self.finish_init = False
self.advanced_init = True
self.opage = START_PAGE
self.dbus = SELinuxDBus()
try:
customized = self.dbus.customized()
except dbus.exceptions.DBusException as e:
print(e)
self.quit()
self.init_cur()
self.application = app
self.filter_txt = ""
builder = Gtk.Builder() # BUILDER OBJ
self.code_path = distutils.sysconfig.get_python_lib(plat_specific=False) + "/sepolicy/"
glade_file = self.code_path + "sepolicy.glade"
builder.add_from_file(glade_file)
self.outer_notebook = builder.get_object("outer_notebook")
self.window = builder.get_object("SELinux_window")
self.main_selection_window = builder.get_object("Main_selection_menu")
self.main_advanced_label = builder.get_object("main_advanced_label")
self.popup = 0
self.applications_selection_button = builder.get_object("applications_selection_button")
self.revert_button = builder.get_object("Revert_button")
self.busy_cursor = Gdk.Cursor(Gdk.CursorType.WATCH)
self.ready_cursor = Gdk.Cursor(Gdk.CursorType.LEFT_PTR)
self.initialtype = selinux.selinux_getpolicytype()[1]
self.current_popup = None
self.import_export = None
self.clear_entry = True
self.files_add = False
self.network_add = False
self.mislabeled_files = False
self.all_domains = []
self.installed_list = []
self.previously_modified = {}
# file dialog
self.file_dialog = builder.get_object("add_path_dialog")
# Error check ***************************************
self.error_check_window = builder.get_object("error_check_window")
self.error_check_label = builder.get_object("error_check_label")
self.invalid_entry = False
# Advanced search window ****************************
self.advanced_search_window = builder.get_object("advanced_search_window")
self.advanced_search_filter = builder.get_object("advanced_filter")
self.advanced_search_filter.set_visible_func(self.filter_the_data)
self.advanced_search_sort = builder.get_object("advanced_sort")
self.advanced_filter_entry = builder.get_object("advanced_filter_entry")
self.advanced_search_treeview = builder.get_object("advanced_search_treeview")
self.advanced_search = False
# Login Items **************************************
self.login_label = builder.get_object("Login_label")
self.login_seuser_combobox = builder.get_object("login_seuser_combobox")
self.login_seuser_combolist = builder.get_object("login_seuser_liststore")
self.login_name_entry = builder.get_object("login_name_entry")
self.login_mls_label = builder.get_object("login_mls_label")
self.login_mls_entry = builder.get_object("login_mls_entry")
self.login_radio_button = builder.get_object("Login_button")
self.login_treeview = builder.get_object("login_treeview")
self.login_liststore = builder.get_object("login_liststore")
self.login_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.login_filter = builder.get_object("login_filter")
self.login_filter.set_visible_func(self.filter_the_data)
self.login_popup_window = builder.get_object("login_popup_window")
self.login_delete_liststore = builder.get_object("login_delete_liststore")
self.login_delete_window = builder.get_object("login_delete_window")
# Users Items **************************************
self.user_popup_window = builder.get_object("user_popup_window")
self.user_radio_button = builder.get_object("User_button")
self.user_liststore = builder.get_object("user_liststore")
self.user_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.user_filter = builder.get_object("user_filter")
self.user_filter.set_visible_func(self.filter_the_data)
self.user_treeview = builder.get_object("user_treeview")
self.user_roles_combobox = builder.get_object("user_roles_combobox")
self.user_roles_combolist = builder.get_object("user_roles_liststore")
self.user_label = builder.get_object("User_label")
self.user_name_entry = builder.get_object("user_name_entry")
self.user_mls_label = builder.get_object("user_mls_label")
self.user_mls_level_entry = builder.get_object("user_mls_level_entry")
self.user_mls_entry = builder.get_object("user_mls_entry")
self.user_combobox = builder.get_object("selinux_user_combobox")
self.user_delete_liststore = builder.get_object("user_delete_liststore")
self.user_delete_window = builder.get_object("user_delete_window")
# File Equiv Items **************************************
self.file_equiv_label = builder.get_object("file_equiv_label")
self.file_equiv_source_entry = builder.get_object("file_equiv_source_entry")
self.file_equiv_dest_entry = builder.get_object("file_equiv_dest_entry")
self.file_equiv_radio_button = builder.get_object("file_equiv_button")
self.file_equiv_treeview = builder.get_object("file_equiv_treeview")
self.file_equiv_liststore = builder.get_object("file_equiv_liststore")
self.file_equiv_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.file_equiv_popup_window = builder.get_object("file_equiv_popup_window")
self.file_equiv_treefilter = builder.get_object("file_equiv_filter")
self.file_equiv_treefilter.set_visible_func(self.filter_the_data)
self.file_equiv_delete_liststore = builder.get_object("file_equiv_delete_liststore")
self.file_equiv_delete_window = builder.get_object("file_equiv_delete_window")
# System Items **************************************
self.app_system_button = builder.get_object("app_system_button")
self.system_radio_button = builder.get_object("System_button")
self.lockdown_radio_button = builder.get_object("Lockdown_button")
self.systems_box = builder.get_object("Systems_box")
self.relabel_button = builder.get_object("Relabel_button")
self.relabel_button_no = builder.get_object("Relabel_button_no")
self.advanced_system = builder.get_object("advanced_system")
self.outer_notebook_frame = builder.get_object("outer_notebook_frame")
self.system_policy_label = builder.get_object("system_policy_type_label")
# Browse Items **************************************
self.select_button_browse = builder.get_object("select_button_browse")
self.cancel_button_browse = builder.get_object("cancel_button_browse")
# More types window items ***************************
self.moreTypes_window_files = builder.get_object("moreTypes_window_files")
self.more_types_files_liststore = builder.get_object("more_types_file_liststore")
self.moreTypes_treeview = builder.get_object("moreTypes_treeview_files")
# System policy type ********************************
self.system_policy_type_liststore = builder.get_object("system_policy_type_liststore")
self.system_policy_type_combobox = builder.get_object("system_policy_type_combobox")
self.policy_list = []
if self.populate_system_policy() < 2:
self.advanced_system.set_visible(False)
self.system_policy_label.set_visible(False)
self.system_policy_type_combobox.set_visible(False)
self.enforcing_button_default = builder.get_object("Enforcing_button_default")
self.permissive_button_default = builder.get_object("Permissive_button_default")
self.disabled_button_default = builder.get_object("Disabled_button_default")
self.initialize_system_default_mode()
# Lockdown Window *********************************
self.enable_unconfined_button = builder.get_object("enable_unconfined")
self.disable_unconfined_button = builder.get_object("disable_unconfined")
self.enable_permissive_button = builder.get_object("enable_permissive")
self.disable_permissive_button = builder.get_object("disable_permissive")
self.enable_ptrace_button = builder.get_object("enable_ptrace")
self.disable_ptrace_button = builder.get_object("disable_ptrace")
# Help Window *********************************
self.help_window = builder.get_object("help_window")
self.help_text = builder.get_object("help_textv")
self.info_text = builder.get_object("info_text")
self.help_image = builder.get_object("help_image")
self.forward_button = builder.get_object("forward_button")
self.back_button = builder.get_object("back_button")
# Update menu items *********************************
self.update_window = builder.get_object("update_window")
self.update_treeview = builder.get_object("update_treeview")
self.update_treestore = builder.get_object("Update_treestore")
self.apply_button = builder.get_object("apply_button")
self.update_button = builder.get_object("Update_button")
# Add button objects ********************************
self.add_button = builder.get_object("Add_button")
self.delete_button = builder.get_object("Delete_button")
self.files_path_entry = builder.get_object("files_path_entry")
self.network_ports_entry = builder.get_object("network_ports_entry")
self.files_popup_window = builder.get_object("files_popup_window")
self.network_popup_window = builder.get_object("network_popup_window")
self.popup_network_label = builder.get_object("Network_label")
self.popup_files_label = builder.get_object("files_label")
self.recursive_path_toggle = builder.get_object("make_path_recursive")
self.files_type_combolist = builder.get_object("files_type_combo_store")
self.files_class_combolist = builder.get_object("files_class_combo_store")
self.files_type_combobox = builder.get_object("files_type_combobox")
self.files_class_combobox = builder.get_object("files_class_combobox")
self.files_mls_label = builder.get_object("files_mls_label")
self.files_mls_entry = builder.get_object("files_mls_entry")
self.advanced_text_files = builder.get_object("Advanced_text_files")
self.files_cancel_button = builder.get_object("cancel_delete_files")
self.network_tcp_button = builder.get_object("tcp_button")
self.network_udp_button = builder.get_object("udp_button")
self.network_port_type_combolist = builder.get_object("network_type_combo_store")
self.network_port_type_combobox = builder.get_object("network_type_combobox")
self.network_mls_label = builder.get_object("network_mls_label")
self.network_mls_entry = builder.get_object("network_mls_entry")
self.advanced_text_network = builder.get_object("Advanced_text_network")
self.network_cancel_button = builder.get_object("cancel_network_delete")
# Add button objects ********************************
# Modify items **************************************
self.show_mislabeled_files_only = builder.get_object("Show_mislabeled_files")
self.mislabeled_files_label = builder.get_object("mislabeled_files_label")
self.warning_files = builder.get_object("warning_files")
self.modify_button = builder.get_object("Modify_button")
self.modify_button.set_sensitive(False)
# Modify items **************************************
# Fix label *****************************************
self.fix_label_window = builder.get_object("fix_label_window")
self.fixlabel_label = builder.get_object("fixlabel_label")
self.fix_label_cancel = builder.get_object("fix_label_cancel")
# Fix label *****************************************
# Delete items **************************************
self.files_delete_window = builder.get_object("files_delete_window")
self.files_delete_treeview = builder.get_object("files_delete_treeview")
self.files_delete_liststore = builder.get_object("files_delete_liststore")
self.network_delete_window = builder.get_object("network_delete_window")
self.network_delete_treeview = builder.get_object("network_delete_treeview")
self.network_delete_liststore = builder.get_object("network_delete_liststore")
# Delete items **************************************
# Progress bar **************************************
self.progress_bar = builder.get_object("progress_bar")
# Progress bar **************************************
# executable_files items ****************************
self.executable_files_treeview = builder.get_object("Executable_files_treeview") # Get the executable files tree view
self.executable_files_filter = builder.get_object("executable_files_filter")
self.executable_files_filter.set_visible_func(self.filter_the_data)
self.executable_files_tab = builder.get_object("Executable_files_tab")
self.executable_files_tab_tooltip_txt = self.executable_files_tab.get_tooltip_text()
self.executable_files_liststore = builder.get_object("executable_files_treestore")
self.executable_files_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.files_radio_button = builder.get_object("files_button")
self.files_button_tooltip_txt = self.files_radio_button.get_tooltip_text()
# executable_files items ****************************
# writable files items ******************************
self.writable_files_treeview = builder.get_object("Writable_files_treeview") # Get the Writable files tree view
self.writable_files_liststore = builder.get_object("writable_files_treestore") # Contains the tree with File Path, SELinux File Label, Class
self.writable_files_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.writable_files_filter = builder.get_object("writable_files_filter")
self.writable_files_filter.set_visible_func(self.filter_the_data)
self.writable_files_tab = builder.get_object("Writable_files_tab")
self.writable_files_tab_tooltip_txt = self.writable_files_tab.get_tooltip_text()
# writable files items ******************************
# Application File Types ****************************
self.application_files_treeview = builder.get_object("Application_files_treeview") # Get the Application files tree view
self.application_files_filter = builder.get_object("application_files_filter") # Contains the tree with File Path, Description, Class
self.application_files_filter.set_visible_func(self.filter_the_data)
self.application_files_tab = builder.get_object("Application_files_tab")
self.application_files_tab_tooltip_txt = self.writable_files_tab.get_tooltip_text()
self.application_files_liststore = builder.get_object("application_files_treestore")
self.application_files_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.application_files_tab = builder.get_object("Application_files_tab")
self.application_files_tab_tooltip_txt = self.application_files_tab.get_tooltip_text()
# Application File Type *****************************
# network items *************************************
self.network_radio_button = builder.get_object("network_button")
self.network_button_tooltip_txt = self.network_radio_button.get_tooltip_text()
self.network_out_treeview = builder.get_object("outbound_treeview")
self.network_out_liststore = builder.get_object("network_out_liststore")
self.network_out_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.network_out_filter = builder.get_object("network_out_filter")
self.network_out_filter.set_visible_func(self.filter_the_data)
self.network_out_tab = builder.get_object("network_out_tab")
self.network_out_tab_tooltip_txt = self.network_out_tab.get_tooltip_text()
self.network_in_treeview = builder.get_object("inbound_treeview")
self.network_in_liststore = builder.get_object("network_in_liststore")
self.network_in_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.network_in_filter = builder.get_object("network_in_filter")
self.network_in_filter.set_visible_func(self.filter_the_data)
self.network_in_tab = builder.get_object("network_in_tab")
self.network_in_tab_tooltip_txt = self.network_in_tab.get_tooltip_text()
# network items *************************************
# boolean items ************************************
self.boolean_treeview = builder.get_object("Boolean_treeview") # Get the booleans tree list
self.boolean_liststore = builder.get_object("boolean_liststore")
self.boolean_liststore.set_sort_column_id(2, Gtk.SortType.ASCENDING)
self.boolean_filter = builder.get_object("boolean_filter")
self.boolean_filter.set_visible_func(self.filter_the_data)
self.boolean_more_detail_window = builder.get_object("booleans_more_detail_window")
self.boolean_more_detail_treeview = builder.get_object("booleans_more_detail_treeview")
self.boolean_more_detail_tree_data_set = builder.get_object("booleans_more_detail_liststore")
self.boolean_radio_button = builder.get_object("Booleans_button")
self.active_button = self.boolean_radio_button
self.boolean_button_tooltip_txt = self.boolean_radio_button.get_tooltip_text()
# boolean items ************************************
# transitions items ************************************
self.transitions_into_treeview = builder.get_object("transitions_into_treeview") # Get the transitions tree list Enabled, source, Executable File
self.transitions_into_liststore = builder.get_object("transitions_into_liststore") # Contains the tree with
self.transitions_into_liststore.set_sort_column_id(1, Gtk.SortType.ASCENDING)
self.transitions_into_filter = builder.get_object("transitions_into_filter")
self.transitions_into_filter.set_visible_func(self.filter_the_data)
self.transitions_into_tab = builder.get_object("Transitions_into_tab")
self.transitions_into_tab_tooltip_txt = self.transitions_into_tab.get_tooltip_text()
self.transitions_radio_button = builder.get_object("Transitions_button")
self.transitions_button_tooltip_txt = self.transitions_radio_button.get_tooltip_text()
self.transitions_from_treeview = builder.get_object("transitions_from_treeview") # Get the transitions tree list
self.transitions_from_treestore = builder.get_object("transitions_from_treestore") # Contains the tree with Enabled, Executable File Type, Transtype
self.transitions_from_treestore.set_sort_column_id(2, Gtk.SortType.ASCENDING)
self.transitions_from_filter = builder.get_object("transitions_from_filter")
self.transitions_from_filter.set_visible_func(self.filter_the_data)
self.transitions_from_tab = builder.get_object("Transitions_from_tab")
self.transitions_from_tab_tooltip_txt = self.transitions_from_tab.get_tooltip_text()
self.transitions_file_treeview = builder.get_object("file_transitions_treeview") # Get the transitions tree list
self.transitions_file_liststore = builder.get_object("file_transitions_liststore") # Contains the tree with Enabled, Executable File Type, Transtype
self.transitions_file_liststore.set_sort_column_id(0, Gtk.SortType.ASCENDING)
self.transitions_file_filter = builder.get_object("file_transitions_filter")
self.transitions_file_filter.set_visible_func(self.filter_the_data)
self.transitions_file_tab = builder.get_object("file_transitions")
self.transitions_file_tab_tooltip_txt = self.transitions_from_tab.get_tooltip_text()
# transitions items ************************************
# Combobox and Entry items **************************
self.combobox_menu = builder.get_object("combobox_org") # This is the combobox box object, aka the arrow next to the entry text bar
self.application_liststore = builder.get_object("application_liststore")
self.completion_entry = builder.get_object("completion_entry") # self.combobox_menu.get_child()
self.entrycompletion_obj = builder.get_object("entrycompletion_obj")
#self.entrycompletion_obj = Gtk.EntryCompletion()
self.entrycompletion_obj.set_minimum_key_length(0)
self.entrycompletion_obj.set_text_column(0)
self.entrycompletion_obj.set_match_func(self.match_func, None)
self.completion_entry.set_completion(self.entrycompletion_obj)
self.completion_entry.set_icon_from_stock(0, Gtk.STOCK_FIND)
# Combobox and Entry items **************************
# Modify buttons ************************************
self.show_modified_only = builder.get_object("Show_modified_only_toggle")
# Modify button *************************************
# status bar *****************************************
self.current_status_label = builder.get_object("Enforcing_label")
self.current_status_enforcing = builder.get_object("Enforcing_button")
self.current_status_permissive = builder.get_object("Permissive_button")
self.status_bar = builder.get_object("status_bar")
self.context_id = self.status_bar.get_context_id("SELinux status")
# filters *********************************************
self.filter_entry = builder.get_object("filter_entry")
self.filter_box = builder.get_object("filter_box")
self.add_modify_delete_box = builder.get_object("add_modify_delete_box")
# Get_model() sets the tree model filter to be the parent of the tree model (tree model has all the data in it)
# Toggle button ****************************************
self.cell = builder.get_object("activate")
self.del_cell_files = builder.get_object("files_toggle_delete")
self.del_cell_files.connect("toggled", self.on_toggle_update, self.files_delete_liststore)
self.del_cell_files_equiv = builder.get_object("file_equiv_toggle_delete1")
self.del_cell_files_equiv.connect("toggled", self.on_toggle_update, self.file_equiv_delete_liststore)
self.del_cell_user = builder.get_object("user_toggle_delete")
self.del_cell_user.connect("toggled", self.on_toggle_update, self.user_delete_liststore)
self.del_cell_login = builder.get_object("login_toggle_delete")
self.del_cell_login.connect("toggled", self.on_toggle_update, self.login_delete_liststore)
self.del_cell_network = builder.get_object("network_toggle_delete")
self.del_cell_network.connect("toggled", self.on_toggle_update, self.network_delete_liststore)
self.update_cell = builder.get_object("toggle_update")
# Notebook items ***************************************
self.outer_notebook = builder.get_object("outer_notebook")
self.inner_notebook_files = builder.get_object("files_inner_notebook")
self.inner_notebook_network = builder.get_object("network_inner_notebook")
self.inner_notebook_transitions = builder.get_object("transitions_inner_notebook")
# logind gui ***************************************
loading_gui = builder.get_object("loading_gui")
self.update_cell.connect("toggled", self.on_toggle_update, self.update_treestore)
self.all_entries = []
# Need to connect button on code because the tree view model is a treeviewsort
self.cell.connect("toggled", self.on_toggle, self.boolean_liststore)
self.loading = 1
path = None
if test:
self.all_domains = ["httpd_t", "abrt_t"]
if app and app not in self.all_domains:
self.all_domains.append(app)
else:
self.all_domains = sepolicy.get_all_domains()
self.all_domains.sort(key=str.lower)
if app and app not in self.all_domains:
self.error(_("%s is not a valid domain") % app)
self.quit()
loading_gui.show()
length = len(self.all_domains)
entrypoint_dict = sepolicy.get_init_entrypoints_str()
for domain in self.all_domains:
# After the user selects a path in the drop down menu call
# get_init_entrypoint_target(entrypoint) to get the transtype
# which will give you the application
self.combo_box_add(domain, domain)
self.percentage = float(float(self.loading) / float(length))
self.progress_bar.set_fraction(self.percentage)
self.progress_bar.set_pulse_step(self.percentage)
self.idle_func()
for entrypoint in entrypoint_dict.get(domain, []):
path = sepolicy.find_entrypoint_path(entrypoint)
if path:
self.combo_box_add(path, domain)
self.installed_list.append(path)
self.loading += 1
loading_gui.hide()
self.entrycompletion_obj.set_model(self.application_liststore)
self.advanced_search_treeview.set_model(self.advanced_search_sort)
dic = {
"on_combo_button_clicked": self.open_combo_menu,
"on_disable_ptrace_toggled": self.on_disable_ptrace,
"on_SELinux_window_configure_event": self.hide_combo_menu,
"on_entrycompletion_obj_match_selected": self.set_application_label,
"on_filter_changed": self.get_filter_data,
"on_save_changes_file_equiv_clicked": self.update_to_file_equiv,
"on_save_changes_login_clicked": self.update_to_login,
"on_save_changes_user_clicked": self.update_to_user,
"on_save_changes_files_clicked": self.update_to_files,
"on_save_changes_network_clicked": self.update_to_network,
"on_Advanced_text_files_button_press_event": self.reveal_advanced,
"item_in_tree_selected": self.cursor_changed,
"on_Application_file_types_treeview_configure_event": self.resize_wrap,
"on_save_delete_clicked": self.on_save_delete_clicked,
"on_moreTypes_treeview_files_row_activated": self.populate_type_combo,
"on_retry_button_files_clicked": self.invalid_entry_retry,
"on_make_path_recursive_toggled": self.recursive_path,
"on_files_path_entry_button_press_event": self.highlight_entry_text,
"on_files_path_entry_changed": self.autofill_add_files_entry,
"on_select_type_files_clicked": self.select_type_more,
"on_choose_file": self.on_browse_select,
"on_Enforcing_button_toggled": self.set_enforce,
"on_confirmation_close": self.confirmation_close,
"on_column_clicked": self.column_clicked,
"on_tab_switch": self.clear_filters,
"on_file_equiv_button_clicked": self.show_file_equiv_page,
"on_app/system_button_clicked": self.system_interface,
"on_app/users_button_clicked": self.users_interface,
"on_show_advanced_search_window": self.on_show_advanced_search_window,
"on_Show_mislabeled_files_toggled": self.show_mislabeled_files,
"on_Browse_button_files_clicked": self.browse_for_files,
"on_cancel_popup_clicked": self.close_popup,
"on_treeview_cursor_changed": self.cursor_changed,
"on_login_seuser_combobox_changed": self.login_seuser_combobox_change,
"on_user_roles_combobox_changed": self.user_roles_combobox_change,
"on_cancel_button_browse_clicked": self.close_config_window,
"on_apply_button_clicked": self.apply_changes_button_press,
"on_Revert_button_clicked": self.update_or_revert_changes,
"on_Update_button_clicked": self.update_or_revert_changes,
"on_advanced_filter_entry_changed": self.get_advanced_filter_data,
"on_advanced_search_treeview_row_activated": self.advanced_item_selected,
"on_Select_advanced_search_clicked": self.advanced_item_button_push,
"on_info_button_button_press_event": self.on_help_button,
"on_back_button_clicked": self.on_help_back_clicked,
"on_forward_button_clicked": self.on_help_forward_clicked,
"on_Boolean_treeview_columns_changed": self.resize_columns,
"on_completion_entry_changed": self.application_selected,
"on_Add_button_clicked": self.add_button_clicked,
"on_Delete_button_clicked": self.delete_button_clicked,
"on_Modify_button_clicked": self.modify_button_clicked,
"on_Show_modified_only_toggled": self.on_show_modified_only,
"on_cancel_button_config_clicked": self.close_config_window,
"on_Import_button_clicked": self.import_config_show,
"on_Export_button_clicked": self.export_config_show,
"on_enable_unconfined_toggled": self.unconfined_toggle,
"on_enable_permissive_toggled": self.permissive_toggle,
"on_system_policy_type_combobox_changed": self.change_default_policy,
"on_Enforcing_button_default_toggled": self.change_default_mode,
"on_Permissive_button_default_toggled": self.change_default_mode,
"on_Disabled_button_default_toggled": self.change_default_mode,
"on_Relabel_button_toggled_cb": self.relabel_on_reboot,
"on_advanced_system_button_press_event": self.reveal_advanced_system,
"on_files_type_combobox_changed": self.show_more_types,
"on_filter_row_changed": self.filter_the_data,
"on_button_toggled": self.tab_change,
"gtk_main_quit": self.closewindow
}
self.previously_modified_initialize(customized)
builder.connect_signals(dic)
self.window.show() # Show the gui to the screen
GLib.timeout_add_seconds(5, self.selinux_status)
self.selinux_status()
self.lockdown_inited = False
self.add_modify_delete_box.hide()
self.filter_box.hide()
if self.status == DISABLED:
self.show_system_page()
else:
if self.application:
self.applications_selection_button.set_label(self.application)
self.completion_entry.set_text(self.application)
self.show_applications_page()
self.tab_change()
else:
self.clearbuttons()
self.outer_notebook.set_current_page(START_PAGE)
self.reinit()
self.finish_init = True
Gtk.main()
def init_cur(self):
self.cur_dict = {}
for k in keys:
self.cur_dict[k] = {}
def remove_cur(self, ctr):
i = 0
for k in self.cur_dict:
for j in self.cur_dict[k]:
if i == ctr:
del(self.cur_dict[k][j])
return
i += 1
def selinux_status(self):
try:
self.status = selinux.security_getenforce()
except OSError:
self.status = DISABLED
if self.status == DISABLED:
self.current_status_label.set_sensitive(False)
self.current_status_enforcing.set_sensitive(False)
self.current_status_permissive.set_sensitive(False)
self.enforcing_button_default.set_sensitive(False)
self.status_bar.push(self.context_id, _("System Status: Disabled"))
self.info_text.set_label(DISABLED_TEXT)
else:
self.set_enforce_text(self.status)
if os.path.exists('/.autorelabel'):
self.relabel_button.set_active(True)
else:
self.relabel_button_no.set_active(True)
policytype = selinux.selinux_getpolicytype()[1]
mode = selinux.selinux_getenforcemode()[1]
if mode == ENFORCING:
self.enforcing_button_default.set_active(True)
if mode == PERMISSIVE:
self.permissive_button_default.set_active(True)
if mode == DISABLED:
self.disabled_button_default.set_active(True)
return True
def lockdown_init(self):
if self.lockdown_inited:
return
self.wait_mouse()
self.lockdown_inited = True
self.disable_ptrace_button.set_active(selinux.security_get_boolean_active("deny_ptrace"))
self.module_dict = {}
for m in self.dbus.semodule_list().split("\n"):
mod = m.split()
if len(mod) < 3:
continue
self.module_dict[mod[1]] = { "priority": mod[0], "Disabled" : (len(mod) > 3) }
self.enable_unconfined_button.set_active(not self.module_dict["unconfined"]["Disabled"])
self.enable_permissive_button.set_active(not self.module_dict["permissivedomains"]["Disabled"])
self.ready_mouse()
def column_clicked(self, treeview, treepath, treecol, *args):
iter = self.get_selected_iter()
if not iter:
return
if self.opage == BOOLEANS_PAGE:
if treecol.get_name() == "more_detail_col":
self.display_more_detail(self.window, treepath)
if self.opage == FILES_PAGE:
visible = self.liststore.get_value(iter, 3)
# If visible is true then fix mislabeled will be visible
if treecol.get_name() == "restorecon_col" and visible:
self.fix_mislabeled(self.liststore.get_value(iter, 0))
if self.opage == TRANSITIONS_PAGE:
bool_name = self.liststore.get_value(iter, 1)
if bool_name:
self.boolean_radio_button.clicked()
self.filter_entry.set_text(bool_name)
def idle_func(self):
while Gtk.events_pending():
Gtk.main_iteration()
def match_func(self, completion, key_string, iter, func_data):
try:
if self.application_liststore.get_value(iter, 0).find(key_string) != -1:
return True
return False
except AttributeError:
pass
def help_show_page(self):
self.back_button.set_sensitive(self.help_page != 0)
self.forward_button.set_sensitive(self.help_page < (len(self.help_list) - 1))
try:
fd = open("%shelp/%s.txt" % (self.code_path, self.help_list[self.help_page]), "r")
buf = fd.read()
fd.close()
except IOError:
buf = ""
help_text = self.help_text.get_buffer()
help_text.set_text(buf % {"APP": self.application})
self.help_text.set_buffer(help_text)
self.help_image.set_from_file("%shelp/%s.png" % (self.code_path, self.help_list[self.help_page]))
self.show_popup(self.help_window)
def on_help_back_clicked(self, *args):
self.help_page -= 1
self.help_show_page()
def on_help_forward_clicked(self, *args):
self.help_page += 1
self.help_show_page()
def on_help_button(self, *args):
self.help_page = 0
self.help_list = []
if self.opage == START_PAGE:
self.help_window.set_title(_("Help: Start Page"))
self.help_list = ["start"]
if self.opage == BOOLEANS_PAGE:
self.help_window.set_title(_("Help: Booleans Page"))
self.help_list = ["booleans", "booleans_toggled", "booleans_more", "booleans_more_show"]
if self.opage == FILES_PAGE:
ipage = self.inner_notebook_files.get_current_page()
if ipage == EXE_PAGE:
self.help_window.set_title(_("Help: Executable Files Page"))
self.help_list = ["files_exec"]
if ipage == WRITABLE_PAGE:
self.help_window.set_title(_("Help: Writable Files Page"))
self.help_list = ["files_write"]
if ipage == APP_PAGE:
self.help_window.set_title(_("Help: Application Types Page"))
self.help_list = ["files_app"]
if self.opage == NETWORK_PAGE:
ipage = self.inner_notebook_network.get_current_page()
if ipage == OUTBOUND_PAGE:
self.help_window.set_title(_("Help: Outbound Network Connections Page"))
self.help_list = ["ports_outbound"]
if ipage == INBOUND_PAGE:
self.help_window.set_title(_("Help: Inbound Network Connections Page"))
self.help_list = ["ports_inbound"]
if self.opage == TRANSITIONS_PAGE:
ipage = self.inner_notebook_transitions.get_current_page()
if ipage == TRANSITIONS_FROM_PAGE:
self.help_window.set_title(_("Help: Transition from application Page"))
self.help_list = ["transition_from", "transition_from_boolean", "transition_from_boolean_1", "transition_from_boolean_2"]
if ipage == TRANSITIONS_TO_PAGE:
self.help_window.set_title(_("Help: Transition into application Page"))
self.help_list = ["transition_to"]
if ipage == TRANSITIONS_FILE_PAGE:
self.help_window.set_title(_("Help: Transition application file Page"))
self.help_list = ["transition_file"]
if self.opage == SYSTEM_PAGE:
self.help_window.set_title(_("Help: Systems Page"))
self.help_list = ["system", "system_boot_mode", "system_current_mode", "system_export", "system_policy_type", "system_relabel"]
if self.opage == LOCKDOWN_PAGE:
self.help_window.set_title(_("Help: Lockdown Page"))
self.help_list = ["lockdown", "lockdown_unconfined", "lockdown_permissive", "lockdown_ptrace"]
if self.opage == LOGIN_PAGE:
self.help_window.set_title(_("Help: Login Page"))
self.help_list = ["login", "login_default"]
if self.opage == USER_PAGE:
self.help_window.set_title(_("Help: SELinux User Page"))
self.help_list = ["users"]
if self.opage == FILE_EQUIV_PAGE:
self.help_window.set_title(_("Help: File Equivalence Page"))
self.help_list = ["file_equiv"]
return self.help_show_page()
def open_combo_menu(self, *args):
if self.popup == 0:
self.popup = 1
location = self.window.get_position()
self.main_selection_window.move(location[0] + 2, location[1] + 65)
self.main_selection_window.show()
else:
self.main_selection_window.hide()
self.popup = 0
def hide_combo_menu(self, *args):
self.main_selection_window.hide()
self.popup = 0
def set_application_label(self, *args):
self.set_application_label = True
def resize_wrap(self, *args):
print(args)
def initialize_system_default_mode(self):
self.enforce_mode = selinux.selinux_getenforcemode()[1]
if self.enforce_mode == ENFORCING:
self.enforce_button = self.enforcing_button_default
if self.enforce_mode == PERMISSIVE:
self.enforce_button = self.permissive_button_default
if self.enforce_mode == DISABLED:
self.enforce_button = self.disabled_button_default
def populate_system_policy(self):
types = next(os.walk(selinux.selinux_path(), topdown=True))[1]
types.sort()
ctr = 0
for item in types:
iter = self.system_policy_type_liststore.append()
self.system_policy_type_liststore.set_value(iter, 0, item)
if item == self.initialtype:
self.system_policy_type_combobox.set_active(ctr)
self.typeHistory = ctr
ctr += 1
return ctr
def filter_the_data(self, list, iter, *args):
# When there is no txt in the box show all items in the tree
if self.filter_txt == "":
return True
try:
for x in range(0, list.get_n_columns()):
try:
val = list.get_value(iter, x)
if val is True or val is False or val is None:
continue
# Returns true if filter_txt exists within the val
if(val.find(self.filter_txt) != -1 or val.lower().find(self.filter_txt) != -1):
return True
except (AttributeError, TypeError):
pass
except: # ValueError:
pass
return False
def net_update(self, app, netd, protocol, direction, model):
for k in netd.keys():
for t, ports in netd[k]:
pkey = (",".join(ports), protocol)
if pkey in self.cur_dict["port"]:
if self.cur_dict["port"][pkey]["action"] == "-d":
continue
if t != self.cur_dict["port"][pkey]["type"]:
continue
self.network_initial_data_insert(model, ", ".join(ports), t, protocol)
def file_equiv_initialize(self):
self.wait_mouse()
edict = sepolicy.get_file_equiv()
self.file_equiv_liststore.clear()
for f in edict:
iter = self.file_equiv_liststore.append()
if edict[f]["modify"]:
name = self.markup(f)
equiv = self.markup(edict[f]["equiv"])
else:
name = f
equiv = edict[f]["equiv"]
self.file_equiv_liststore.set_value(iter, 0, name)
self.file_equiv_liststore.set_value(iter, 1, equiv)
self.file_equiv_liststore.set_value(iter, 2, edict[f]["modify"])
self.ready_mouse()
def user_initialize(self):
self.wait_mouse()
self.user_liststore.clear()
for u in sepolicy.get_selinux_users():
iter = self.user_liststore.append()
self.user_liststore.set_value(iter, 0, str(u["name"]))
roles = u["roles"]
if "object_r" in roles:
roles.remove("object_r")
self.user_liststore.set_value(iter, 1, ", ".join(roles))
self.user_liststore.set_value(iter, 2, u.get("level", ""))
self.user_liststore.set_value(iter, 3, u.get("range", ""))
self.user_liststore.set_value(iter, 4, True)
self.ready_mouse()
def login_initialize(self):
self.wait_mouse()
self.login_liststore.clear()
for u in sepolicy.get_login_mappings():
iter = self.login_liststore.append()
self.login_liststore.set_value(iter, 0, u["name"])
self.login_liststore.set_value(iter, 1, u["seuser"])
self.login_liststore.set_value(iter, 2, u["mls"])
self.login_liststore.set_value(iter, 3, True)
self.ready_mouse()
def network_initialize(self, app):
netd = sepolicy.network.get_network_connect(app, "tcp", "name_connect", check_bools=True)
self.net_update(app, netd, "tcp", OUTBOUND_PAGE, self.network_out_liststore)
netd = sepolicy.network.get_network_connect(app, "tcp", "name_bind", check_bools=True)
self.net_update(app, netd, "tcp", INBOUND_PAGE, self.network_in_liststore)
netd = sepolicy.network.get_network_connect(app, "udp", "name_bind", check_bools=True)
self.net_update(app, netd, "udp", INBOUND_PAGE, self.network_in_liststore)
def network_initial_data_insert(self, model, ports, portType, protocol):
iter = model.append()
model.set_value(iter, 0, ports)
model.set_value(iter, 1, protocol)
model.set_value(iter, 2, portType)
model.set_value(iter, 4, True)
def combo_set_active_text(self, combobox, val):
ctr = 0
liststore = combobox.get_model()
for i in liststore:
if i[0] == val:
combobox.set_active(ctr)
return
ctr += 1
niter = liststore.get_iter(ctr - 1)
if liststore.get_value(niter, 0) == _("More..."):
iter = liststore.insert_before(niter)
ctr = ctr - 1
else:
iter = liststore.append()
liststore.set_value(iter, 0, val)
combobox.set_active(ctr)
def combo_get_active_text(self, combobox):
liststore = combobox.get_model()
index = combobox.get_active()
if index < 0:
return None
iter = liststore.get_iter(index)
return liststore.get_value(iter, 0)
def combo_box_add(self, val, val1):
if val is None:
return
iter = self.application_liststore.append()
self.application_liststore.set_value(iter, 0, val)
self.application_liststore.set_value(iter, 1, val1)
def select_type_more(self, *args):
app = self.moreTypes_treeview.get_selection()
iter = app.get_selected()[1]
if iter is None:
return
app = self.more_types_files_liststore.get_value(iter, 0)
self.combo_set_active_text(self.files_type_combobox, app)
self.closewindow(self.moreTypes_window_files)
def advanced_item_button_push(self, *args):
row = self.advanced_search_treeview.get_selection()
model, iter = row.get_selected()
iter = model.convert_iter_to_child_iter(iter)
iter = self.advanced_search_filter.convert_iter_to_child_iter(iter)
app = self.application_liststore.get_value(iter, 1)
if app is None:
return
self.advanced_filter_entry.set_text('')
self.advanced_search_window.hide()
self.reveal_advanced(self.main_advanced_label)
self.completion_entry.set_text(app)
def advanced_item_selected(self, treeview, path, *args):
iter = self.advanced_search_filter.get_iter(path)
iter = self.advanced_search_filter.convert_iter_to_child_iter(iter)
app = self.application_liststore.get_value(iter, 1)
self.advanced_filter_entry.set_text('')
self.advanced_search_window.hide()
self.reveal_advanced(self.main_advanced_label)
self.completion_entry.set_text(app)
self.application_selected()
def find_application(self, app):
if app and len(app) > 0:
for items in self.application_liststore:
if app == items[0]:
return True
return False
def application_selected(self, *args):
self.show_mislabeled_files_only.set_visible(False)
self.mislabeled_files_label.set_visible(False)
self.warning_files.set_visible(False)
self.filter_entry.set_text('')
app = self.completion_entry.get_text()
if not self.find_application(app):
return
self.show_applications_page()
self.add_button.set_sensitive(True)
self.delete_button.set_sensitive(True)
# Clear the tree to prepare for a new selection otherwise
self.executable_files_liststore.clear()
# data will pile up every time the user selects a new item from the drop down menu
self.network_in_liststore.clear()
self.network_out_liststore.clear()
self.boolean_liststore.clear()
self.transitions_into_liststore.clear()
self.transitions_from_treestore.clear()
self.application_files_liststore.clear()
self.writable_files_liststore.clear()
self.transitions_file_liststore.clear()
try:
if app[0] == '/':
app = sepolicy.get_init_transtype(app)
if not app:
return
self.application = app
except IndexError:
pass
self.wait_mouse()
self.previously_modified_initialize(self.dbus.customized())
self.reinit()
self.boolean_initialize(app)
self.mislabeled_files = False
self.executable_files_initialize(app)
self.network_initialize(app)
self.writable_files_initialize(app)
self.transitions_into_initialize(app)
self.transitions_from_initialize(app)
self.application_files_initialize(app)
self.transitions_files_initialize(app)
self.executable_files_tab.set_tooltip_text(_("File path used to enter the '%s' domain.") % app)
self.writable_files_tab.set_tooltip_text(_("Files to which the '%s' domain can write.") % app)
self.network_out_tab.set_tooltip_text(_("Network Ports to which the '%s' is allowed to connect.") % app)
self.network_in_tab.set_tooltip_text(_("Network Ports to which the '%s' is allowed to listen.") % app)
self.application_files_tab.set_tooltip_text(_("File Types defined for the '%s'.") % app)
self.boolean_radio_button.set_tooltip_text(_("Display boolean information that can be used to modify the policy for the '%s'.") % app)
self.files_radio_button.set_tooltip_text(_("Display file type information that can be used by the '%s'.") % app)
self.network_radio_button.set_tooltip_text(_("Display network ports to which the '%s' can connect or listen to.") % app)
self.transitions_into_tab.set_label(_("Application Transitions Into '%s'") % app)
self.transitions_from_tab.set_label(_("Application Transitions From '%s'") % app)
self.transitions_file_tab.set_label(_("File Transitions From '%s'") % app)
self.transitions_into_tab.set_tooltip_text(_("Executables which will transition to '%s', when executing selected domains entrypoint.") % app)
self.transitions_from_tab.set_tooltip_text(_("Executables which will transition to a different domain, when '%s' executes them.") % app)
self.transitions_file_tab.set_tooltip_text(_("Files by '%s' with transitions to a different label.") % app)
self.transitions_radio_button.set_tooltip_text(_("Display applications that can transition into or out of the '%s'.") % app)
self.application = app
self.applications_selection_button.set_label(self.application)
self.ready_mouse()
def reinit(self):
sepolicy.reinit()
self.fcdict = sepolicy.get_fcdict()
self.local_file_paths = sepolicy.get_local_file_paths()
def previously_modified_initialize(self, buf):
self.cust_dict = {}
for i in buf.split("\n"):
rec = i.split()
if len(rec) == 0:
continue
if rec[1] == "-D":
continue
if rec[0] not in self.cust_dict:
self.cust_dict[rec[0]] = {}
if rec[0] == "boolean":
self.cust_dict["boolean"][rec[-1]] = {"active": rec[2] == "-1"}
if rec[0] == "login":
self.cust_dict["login"][rec[-1]] = {"seuser": rec[3], "range": rec[5]}
if rec[0] == "interface":
self.cust_dict["interface"][rec[-1]] = {"type": rec[3]}
if rec[0] == "user":
self.cust_dict["user"][rec[-1]] = {"level": "s0", "range": rec[3], "role": rec[5]}
if rec[0] == "port":
self.cust_dict["port"][(rec[-1], rec[-2])] = {"type": rec[3]}
if rec[0] == "node":
self.cust_dict["node"][rec[-1]] = {"mask": rec[3], "protocol": rec[5], "type": rec[7]}
if rec[0] == "fcontext":
if rec[2] == "-e":
if "fcontext-equiv" not in self.cust_dict:
self.cust_dict["fcontext-equiv"] = {}
self.cust_dict["fcontext-equiv"][(rec[-1])] = {"equiv": rec[3]}
else:
self.cust_dict["fcontext"][(rec[-1], rec[3])] = {"type": rec[5]}
if rec[0] == "module":
self.cust_dict["module"][rec[-1]] = {"enabled": rec[2] != "-d"}
if "module" not in self.cust_dict:
return
for semodule, button in [("unconfined", self.disable_unconfined_button), ("permissivedomains", self.disable_permissive_button)]:
if semodule in self.cust_dict["module"]:
button.set_active(self.cust_dict["module"][semodule]["enabled"])
for i in keys:
if i not in self.cust_dict:
self.cust_dict.update({i: {}})
def executable_files_initialize(self, application):
self.entrypoints = sepolicy.get_entrypoints(application)
for exe in self.entrypoints.keys():
if len(self.entrypoints[exe]) == 0:
continue
file_class = self.entrypoints[exe][1]
for path in self.entrypoints[exe][0]:
if (path, file_class) in self.cur_dict["fcontext"]:
if self.cur_dict["fcontext"][(path, file_class)]["action"] == "-d":
continue
if exe != self.cur_dict["fcontext"][(path, file_class)]["type"]:
continue
self.files_initial_data_insert(self.executable_files_liststore, path, exe, file_class)
def mislabeled(self, path):
try:
con = selinux.matchpathcon(path, 0)[1]
cur = selinux.getfilecon(path)[1]
return con != cur
except OSError:
return False
def set_mislabeled(self, tree, path, iter, niter):
if not self.mislabeled(path):
return
con = selinux.matchpathcon(path, 0)[1]
cur = selinux.getfilecon(path)[1]
self.mislabeled_files = True
# Set visibility of label
tree.set_value(niter, 3, True)
# Has a mislabel
tree.set_value(iter, 4, True)
tree.set_value(niter, 4, True)
tree.set_value(iter, 5, con.split(":")[2])
tree.set_value(iter, 6, cur.split(":")[2])
def writable_files_initialize(self, application):
# Traversing the dictionary data struct
self.writable_files = sepolicy.get_writable_files(application)
for write in self.writable_files.keys():
if len(self.writable_files[write]) < 2:
self.files_initial_data_insert(self.writable_files_liststore, None, write, _("all files"))
continue
file_class = self.writable_files[write][1]
for path in self.writable_files[write][0]:
if (path, file_class) in self.cur_dict["fcontext"]:
if self.cur_dict["fcontext"][(path, file_class)]["action"] == "-d":
continue
if write != self.cur_dict["fcontext"][(path, file_class)]["type"]:
continue
self.files_initial_data_insert(self.writable_files_liststore, path, write, file_class)
def files_initial_data_insert(self, liststore, path, selinux_label, file_class):
iter = liststore.append(None)
if path is None:
path = _("MISSING FILE PATH")
modify = False
else:
modify = (path, file_class) in self.local_file_paths
for p in sepolicy.find_file(path):
niter = liststore.append(iter)
liststore.set_value(niter, 0, p)
self.set_mislabeled(liststore, p, iter, niter)
if modify:
path = self.markup(path)
file_class = self.markup(selinux_label)
file_class = self.markup(file_class)
liststore.set_value(iter, 0, path)
liststore.set_value(iter, 1, selinux_label)
liststore.set_value(iter, 2, file_class)
liststore.set_value(iter, 7, modify)
def markup(self, f):
return "<b>%s</b>" % f
def unmarkup(self, f):
if f:
return re.sub("</b>$", "", re.sub("^<b>", "", f))
return None
def application_files_initialize(self, application):
self.file_types = sepolicy.get_file_types(application)
for app in self.file_types.keys():
if len(self.file_types[app]) == 0:
continue
file_class = self.file_types[app][1]
for path in self.file_types[app][0]:
desc = sepolicy.get_description(app, markup=self.markup)
if (path, file_class) in self.cur_dict["fcontext"]:
if self.cur_dict["fcontext"][(path, file_class)]["action"] == "-d":
continue
if app != self.cur_dict["fcontext"][(path, file_class)]["type"]:
continue
self.files_initial_data_insert(self.application_files_liststore, path, desc, file_class)
def modified(self):
i = 0
for k in self.cur_dict:
if len(self.cur_dict[k]) > 0:
return True
return False
def boolean_initialize(self, application):
for blist in sepolicy.get_bools(application):
for b, active in blist:
if b in self.cur_dict["boolean"]:
active = self.cur_dict["boolean"][b]['active']
desc = sepolicy.boolean_desc(b)
self.boolean_initial_data_insert(b, desc, active)
def boolean_initial_data_insert(self, val, desc, active):
# Insert data from data source into tree
iter = self.boolean_liststore.append()
self.boolean_liststore.set_value(iter, 0, active)
self.boolean_liststore.set_value(iter, 1, desc)
self.boolean_liststore.set_value(iter, 2, val)
self.boolean_liststore.set_value(iter, 3, _('More...'))
def transitions_into_initialize(self, application):
for x in sepolicy.get_transitions_into(application):
active = None
executable = None
source = None
if "boolean" in x:
active = x["boolean"]
if "target" in x:
executable = x["target"]
if "source" in x:
source = x["source"]
self.transitions_into_initial_data_insert(active, executable, source)
def transitions_into_initial_data_insert(self, active, executable, source):
iter = self.transitions_into_liststore.append()
if active != None:
self.transitions_into_liststore.set_value(iter, 0, enabled[active[0][1]]) # active[0][1] is either T or F (enabled is all the way at the top)
else:
self.transitions_into_liststore.set_value(iter, 0, "Default")
self.transitions_into_liststore.set_value(iter, 2, executable)
self.transitions_into_liststore.set_value(iter, 1, source)
def transitions_from_initialize(self, application):
for x in sepolicy.get_transitions(application):
active = None
executable = None
transtype = None
if "boolean" in x:
active = x["boolean"]
if "target" in x:
executable_type = x["target"]
if "transtype" in x:
transtype = x["transtype"]
self.transitions_from_initial_data_insert(active, executable_type, transtype)
try:
for executable in self.fcdict[executable_type]["regex"]:
self.transitions_from_initial_data_insert(active, executable, transtype)
except KeyError:
pass
def transitions_from_initial_data_insert(self, active, executable, transtype):
iter = self.transitions_from_treestore.append(None)
if active == None:
self.transitions_from_treestore.set_value(iter, 0, "Default")
self.transitions_from_treestore.set_value(iter, 5, False)
else:
niter = self.transitions_from_treestore.append(iter)
# active[0][1] is either T or F (enabled is all the way at the top)
self.transitions_from_treestore.set_value(iter, 0, enabled[active[0][1]])
markup = ('<span foreground="blue"><u>','</u></span>')
if active[0][1]:
self.transitions_from_treestore.set_value(niter, 2, (_("To disable this transition, go to the %sBoolean section%s.") % markup))
else:
self.transitions_from_treestore.set_value(niter, 2, (_("To enable this transition, go to the %sBoolean section%s.") % markup))
# active[0][0] is the Bool Name
self.transitions_from_treestore.set_value(niter, 1, active[0][0])
self.transitions_from_treestore.set_value(niter, 5, True)
self.transitions_from_treestore.set_value(iter, 2, executable)
self.transitions_from_treestore.set_value(iter, 3, transtype)
def transitions_files_initialize(self, application):
for i in sepolicy.get_file_transitions(application):
if 'filename' in i:
filename = i['filename']
else:
filename = None
self.transitions_files_inital_data_insert(i['target'], i['class'], i['transtype'], filename)
def transitions_files_inital_data_insert(self, path, tclass, dest, name):
iter = self.transitions_file_liststore.append()
self.transitions_file_liststore.set_value(iter, 0, path)
self.transitions_file_liststore.set_value(iter, 1, tclass)
self.transitions_file_liststore.set_value(iter, 2, dest)
if name == None:
name = '*'
self.transitions_file_liststore.set_value(iter, 3, name)
def tab_change(self, *args):
self.clear_filters()
self.treeview = None
self.treesort = None
self.treefilter = None
self.liststore = None
self.modify_button.set_sensitive(False)
self.add_modify_delete_box.hide()
self.show_modified_only.set_visible(False)
self.show_mislabeled_files_only.set_visible(False)
self.mislabeled_files_label.set_visible(False)
self.warning_files.set_visible(False)
if self.boolean_radio_button.get_active():
self.outer_notebook.set_current_page(BOOLEANS_PAGE)
self.treeview = self.boolean_treeview
self.show_modified_only.set_visible(True)
if self.files_radio_button.get_active():
self.show_popup(self.add_modify_delete_box)
self.show_modified_only.set_visible(True)
self.show_mislabeled_files_only.set_visible(self.mislabeled_files)
self.mislabeled_files_label.set_visible(self.mislabeled_files)
self.warning_files.set_visible(self.mislabeled_files)
self.outer_notebook.set_current_page(FILES_PAGE)
if args[0] == self.inner_notebook_files:
ipage = args[2]
else:
ipage = self.inner_notebook_files.get_current_page()
if ipage == EXE_PAGE:
self.treeview = self.executable_files_treeview
category = _("executable")
elif ipage == WRITABLE_PAGE:
self.treeview = self.writable_files_treeview
category = _("writable")
elif ipage == APP_PAGE:
self.treeview = self.application_files_treeview
category = _("application")
self.add_button.set_tooltip_text(_("Add new %(TYPE)s file path for '%(DOMAIN)s' domains.") % {"TYPE": category, "DOMAIN": self.application})
self.delete_button.set_tooltip_text(_("Delete %(TYPE)s file paths for '%(DOMAIN)s' domain.") % {"TYPE": category, "DOMAIN": self.application})
self.modify_button.set_tooltip_text(_("Modify %(TYPE)s file path for '%(DOMAIN)s' domain. Only bolded items in the list can be selected, this indicates they were modified previously.") % {"TYPE": category, "DOMAIN": self.application})
if self.network_radio_button.get_active():
self.add_modify_delete_box.show()
self.show_modified_only.set_visible(True)
self.outer_notebook.set_current_page(NETWORK_PAGE)
if args[0] == self.inner_notebook_network:
ipage = args[2]
else:
ipage = self.inner_notebook_network.get_current_page()
if ipage == OUTBOUND_PAGE:
self.treeview = self.network_out_treeview
category = _("connect")
if ipage == INBOUND_PAGE:
self.treeview = self.network_in_treeview
category = _("listen for inbound connections")
self.add_button.set_tooltip_text(_("Add new port definition to which the '%(APP)s' domain is allowed to %(PERM)s.") % {"APP": self.application, "PERM": category})
self.delete_button.set_tooltip_text(_("Delete modified port definitions to which the '%(APP)s' domain is allowed to %(PERM)s.") % {"APP": self.application, "PERM": category})
self.modify_button.set_tooltip_text(_("Modify port definitions to which the '%(APP)s' domain is allowed to %(PERM)s.") % {"APP": self.application, "PERM": category})
if self.transitions_radio_button.get_active():
self.outer_notebook.set_current_page(TRANSITIONS_PAGE)
if args[0] == self.inner_notebook_transitions:
ipage = args[2]
else:
ipage = self.inner_notebook_transitions.get_current_page()
if ipage == TRANSITIONS_FROM_PAGE:
self.treeview = self.transitions_from_treeview
if ipage == TRANSITIONS_TO_PAGE:
self.treeview = self.transitions_into_treeview
if ipage == TRANSITIONS_FILE_PAGE:
self.treeview = self.transitions_file_treeview
if self.system_radio_button.get_active():
self.outer_notebook.set_current_page(SYSTEM_PAGE)
self.filter_box.hide()
if self.lockdown_radio_button.get_active():
self.lockdown_init()
self.outer_notebook.set_current_page(LOCKDOWN_PAGE)
self.filter_box.hide()
if self.user_radio_button.get_active():
self.outer_notebook.set_current_page(USER_PAGE)
self.add_modify_delete_box.show()
self.show_modified_only.set_visible(True)
self.treeview = self.user_treeview
self.add_button.set_tooltip_text(_("Add new SELinux User/Role definition."))
self.delete_button.set_tooltip_text(_("Delete modified SELinux User/Role definitions."))
self.modify_button.set_tooltip_text(_("Modify selected modified SELinux User/Role definitions."))
if self.login_radio_button.get_active():
self.outer_notebook.set_current_page(LOGIN_PAGE)
self.add_modify_delete_box.show()
self.show_modified_only.set_visible(True)
self.treeview = self.login_treeview
self.add_button.set_tooltip_text(_("Add new Login Mapping definition."))
self.delete_button.set_tooltip_text(_("Delete modified Login Mapping definitions."))
self.modify_button.set_tooltip_text(_("Modify selected modified Login Mapping definitions."))
if self.file_equiv_radio_button.get_active():
self.outer_notebook.set_current_page(FILE_EQUIV_PAGE)
self.add_modify_delete_box.show()
self.show_modified_only.set_visible(True)
self.treeview = self.file_equiv_treeview
self.add_button.set_tooltip_text(_("Add new File Equivalence definition."))
self.delete_button.set_tooltip_text(_("Delete modified File Equivalence definitions."))
self.modify_button.set_tooltip_text(_("Modify selected modified File Equivalence definitions. Only bolded items in the list can be selected, this indicates they were modified previously."))
self.opage = self.outer_notebook.get_current_page()
if self.treeview:
self.filter_box.show()
self.treesort = self.treeview.get_model()
self.treefilter = self.treesort.get_model()
self.liststore = self.treefilter.get_model()
for x in range(0, self.liststore.get_n_columns()):
col = self.treeview.get_column(x)
if col:
cell = col.get_cells()[0]
if isinstance(cell, Gtk.CellRendererText):
self.liststore.set_sort_func(x, self.stripsort, None)
self.treeview.get_selection().unselect_all()
self.modify_button.set_sensitive(False)
def stripsort(self, model, row1, row2, user_data):
sort_column, _ = model.get_sort_column_id()
val1 = self.unmarkup(model.get_value(row1, sort_column))
val2 = self.unmarkup(model.get_value(row2, sort_column))
return cmp(val1, val2)
def display_more_detail(self, windows, path):
it = self.boolean_filter.get_iter(path)
it = self.boolean_filter.convert_iter_to_child_iter(it)
self.boolean_more_detail_tree_data_set.clear()
self.boolean_more_detail_window.set_title(_("Boolean %s Allow Rules") % self.boolean_liststore.get_value(it, 2))
blist = sepolicy.get_boolean_rules(self.application, self.boolean_liststore.get_value(it, 2))
for b in blist:
self.display_more_detail_init(b["source"], b["target"], b["class"], b["permlist"])
self.show_popup(self.boolean_more_detail_window)
def display_more_detail_init(self, source, target, class_type, permission):
iter = self.boolean_more_detail_tree_data_set.append()
self.boolean_more_detail_tree_data_set.set_value(iter, 0, "allow %s %s:%s { %s };" % (source, target, class_type, " ".join(permission)))
def add_button_clicked(self, *args):
self.modify = False
if self.opage == NETWORK_PAGE:
self.popup_network_label.set_text((_("Add Network Port for %s. Ports will be created when update is applied.")) % self.application)
self.network_popup_window.set_title((_("Add Network Port for %s")) % self.application)
self.init_network_dialog(args)
return
if self.opage == FILES_PAGE:
self.popup_files_label.set_text((_("Add File Labeling for %s. File labels will be created when update is applied.")) % self.application)
self.files_popup_window.set_title((_("Add File Labeling for %s")) % self.application)
self.init_files_dialog(args)
ipage = self.inner_notebook_files.get_current_page()
if ipage == EXE_PAGE:
self.files_path_entry.set_text("ex: /usr/sbin/Foobar")
else:
self.files_path_entry.set_text("ex: /var/lib/Foobar")
self.clear_entry = True
if self.opage == LOGIN_PAGE:
self.login_label.set_text((_("Add Login Mapping. User Mapping will be created when Update is applied.")))
self.login_popup_window.set_title(_("Add Login Mapping"))
self.login_init_dialog(args)
self.clear_entry = True
if self.opage == USER_PAGE:
self.user_label.set_text((_("Add SELinux User Role. SELinux user roles will be created when update is applied.")))
self.user_popup_window.set_title(_("Add SELinux Users"))
self.user_init_dialog(args)
self.clear_entry = True
if self.opage == FILE_EQUIV_PAGE:
self.file_equiv_source_entry.set_text("")
self.file_equiv_dest_entry.set_text("")
self.file_equiv_label.set_text((_("Add File Equivalency Mapping. Mapping will be created when update is applied.")))
self.file_equiv_popup_window.set_title(_("Add SELinux File Equivalency"))
self.clear_entry = True
self.show_popup(self.file_equiv_popup_window)
self.new_updates()
def show_popup(self, window):
self.current_popup = window
window.show()
def close_popup(self, *args):
self.current_popup.hide()
self.window.set_sensitive(True)
return True
def modify_button_clicked(self, *args):
iter = None
if self.treeview:
iter = self.get_selected_iter()
if not iter:
self.modify_button.set_sensitive(False)
return
self.modify = True
if self.opage == NETWORK_PAGE:
self.modify_button_network_clicked(args)
if self.opage == FILES_PAGE:
self.popup_files_label.set_text((_("Modify File Labeling for %s. File labels will be created when update is applied.")) % self.application)
self.files_popup_window.set_title((_("Add File Labeling for %s")) % self.application)
self.delete_old_item = None
self.init_files_dialog(args)
self.modify = True
operation = "Modify"
mls = 1
ipage = self.inner_notebook_files.get_current_page()
if ipage == EXE_PAGE:
iter = self.executable_files_filter.convert_iter_to_child_iter(iter)
self.delete_old_item = iter
path = self.executable_files_liststore.get_value(iter, 0)
self.files_path_entry.set_text(path)
ftype = self.executable_files_liststore.get_value(iter, 1)
if ftype != None:
self.combo_set_active_text(self.files_type_combobox, ftype)
tclass = self.executable_files_liststore.get_value(iter, 2)
if tclass != None:
self.combo_set_active_text(self.files_class_combobox, tclass)
if ipage == WRITABLE_PAGE:
iter = self.writable_files_filter.convert_iter_to_child_iter(iter)
self.delete_old_item = iter
path = self.writable_files_liststore.get_value(iter, 0)
self.files_path_entry.set_text(path)
type = self.writable_files_liststore.get_value(iter, 1)
if type != None:
self.combo_set_active_text(self.files_type_combobox, type)
tclass = self.writable_files_liststore.get_value(iter, 2)
if tclass != None:
self.combo_set_active_text(self.files_class_combobox, tclass)
if ipage == APP_PAGE:
iter = self.application_files_filter.convert_iter_to_child_iter(iter)
self.delete_old_item = iter
path = self.application_files_liststore.get_value(iter, 0)
self.files_path_entry.set_text(path)
try:
get_type = self.application_files_liststore.get_value(iter, 1)
get_type = get_type.split("<b>")[1].split("</b>")
except AttributeError:
pass
type = self.application_files_liststore.get_value(iter, 2)
if type != None:
self.combo_set_active_text(self.files_type_combobox, type)
tclass = get_type[0]
if tclass != None:
self.combo_set_active_text(self.files_class_combobox, tclass)
if self.opage == USER_PAGE:
self.user_init_dialog(args)
self.user_name_entry.set_text(self.user_liststore.get_value(iter, 0))
self.user_mls_level_entry.set_text(self.user_liststore.get_value(iter, 2))
self.user_mls_entry.set_text(self.user_liststore.get_value(iter, 3))
self.combo_set_active_text(self.user_roles_combobox, self.user_liststore.get_value(iter, 1))
self.user_label.set_text((_("Modify SELinux User Role. SELinux user roles will be modified when update is applied.")))
self.user_popup_window.set_title(_("Modify SELinux Users"))
self.show_popup(self.user_popup_window)
if self.opage == LOGIN_PAGE:
self.login_init_dialog(args)
self.login_name_entry.set_text(self.login_liststore.get_value(iter, 0))
self.login_mls_entry.set_text(self.login_liststore.get_value(iter, 2))
self.combo_set_active_text(self.login_seuser_combobox, self.login_liststore.get_value(iter, 1))
self.login_label.set_text((_("Modify Login Mapping. Login Mapping will be modified when Update is applied.")))
self.login_popup_window.set_title(_("Modify Login Mapping"))
self.show_popup(self.login_popup_window)
if self.opage == FILE_EQUIV_PAGE:
self.file_equiv_source_entry.set_text(self.unmarkup(self.file_equiv_liststore.get_value(iter, 0)))
self.file_equiv_dest_entry.set_text(self.unmarkup(self.file_equiv_liststore.get_value(iter, 1)))
self.file_equiv_label.set_text((_("Modify File Equivalency Mapping. Mapping will be created when update is applied.")))
self.file_equiv_popup_window.set_title(_("Modify SELinux File Equivalency"))
self.clear_entry = True
self.show_popup(self.file_equiv_popup_window)
def populate_type_combo(self, tree, loc, *args):
iter = self.more_types_files_liststore.get_iter(loc)
ftype = self.more_types_files_liststore.get_value(iter, 0)
self.combo_set_active_text(self.files_type_combobox, ftype)
self.show_popup(self.files_popup_window)
self.moreTypes_window_files.hide()
def strip_domain(self, domain):
if domain == None:
return
if domain.endswith("_script_t"):
split_char = "_script_t"
else:
split_char = "_t"
return domain.split(split_char)[0]
def exclude_type(self, type, exclude_list):
for e in exclude_list:
if type.startswith(e):
return True
return False
def init_files_dialog(self, *args):
exclude_list = []
self.files_class_combobox.set_sensitive(True)
self.show_popup(self.files_popup_window)
ipage = self.inner_notebook_files.get_current_page()
self.files_type_combolist.clear()
self.files_class_combolist.clear()
compare = self.strip_domain(self.application)
for d in self.application_liststore:
if d[0].startswith(compare) and d[0] != self.application and not d[0].startswith("httpd_sys"):
exclude_list.append(self.strip_domain(d[0]))
self.more_types_files_liststore.clear()
try:
for files in sepolicy.file_type_str:
iter = self.files_class_combolist.append()
self.files_class_combolist.set_value(iter, 0, sepolicy.file_type_str[files])
if ipage == EXE_PAGE and self.entrypoints != None:
for exe in self.entrypoints.keys():
if exe.startswith(compare):
iter = self.files_type_combolist.append()
self.files_type_combolist.set_value(iter, 0, exe)
iter = self.more_types_files_liststore.append()
self.more_types_files_liststore.set_value(iter, 0, exe)
self.files_class_combobox.set_active(4)
self.files_class_combobox.set_sensitive(False)
elif ipage == WRITABLE_PAGE and self.writable_files != None:
for write in self.writable_files.keys():
if write.startswith(compare) and not self.exclude_type(write, exclude_list) and write in self.file_types:
iter = self.files_type_combolist.append()
self.files_type_combolist.set_value(iter, 0, write)
iter = self.more_types_files_liststore.append()
self.more_types_files_liststore.set_value(iter, 0, write)
self.files_class_combobox.set_active(0)
elif ipage == APP_PAGE and self.file_types != None:
for app in sepolicy.get_all_file_types():
if app.startswith(compare):
if app.startswith(compare) and not self.exclude_type(app, exclude_list):
iter = self.files_type_combolist.append()
self.files_type_combolist.set_value(iter, 0, app)
iter = self.more_types_files_liststore.append()
self.more_types_files_liststore.set_value(iter, 0, app)
self.files_class_combobox.set_active(0)
except AttributeError:
print("error")
pass
self.files_type_combobox.set_active(0)
self.files_mls_entry.set_text("s0")
iter = self.files_type_combolist.append()
self.files_type_combolist.set_value(iter, 0, _('More...'))
def modify_button_network_clicked(self, *args):
iter = self.get_selected_iter()
if not iter:
self.modify_button.set_sensitive(False)
return
self.popup_network_label.set_text((_("Modify Network Port for %s. Ports will be created when update is applied.")) % self.application)
self.network_popup_window.set_title((_("Modify Network Port for %s")) % self.application)
self.delete_old_item = None
self.init_network_dialog(args)
operation = "Modify"
mls = 1
self.modify = True
iter = self.get_selected_iter()
port = self.liststore.get_value(iter, 0)
self.network_ports_entry.set_text(port)
protocol = self.liststore.get_value(iter, 1)
if protocol == "tcp":
self.network_tcp_button.set_active(True)
elif protocol == "udp":
self.network_udp_button.set_active(True)
type = self.liststore.get_value(iter, 2)
if type != None:
self.combo_set_active_text(self.network_port_type_combobox, type)
self.delete_old_item = iter
def init_network_dialog(self, *args):
self.show_popup(self.network_popup_window)
ipage = self.inner_notebook_network.get_current_page()
self.network_port_type_combolist.clear()
self.network_ports_entry.set_text("")
try:
if ipage == OUTBOUND_PAGE:
netd = sepolicy.network.get_network_connect(self.application, "tcp", "name_connect", check_bools=True)
elif ipage == INBOUND_PAGE:
netd = sepolicy.network.get_network_connect(self.application, "tcp", "name_bind", check_bools=True)
netd += sepolicy.network.get_network_connect(self.application, "udp", "name_bind", check_bools=True)
port_types = []
for k in netd.keys():
for t, ports in netd[k]:
if t not in port_types + ["port_t", "unreserved_port_t"]:
if t.endswith("_type"):
continue
port_types.append(t)
port_types.sort()
short_domain = self.strip_domain(self.application)
if short_domain[-1] == "d":
short_domain = short_domain[:-1]
short_domain = short_domain + "_"
ctr = 0
found = 0
for t in port_types:
if t.startswith(short_domain):
found = ctr
iter = self.network_port_type_combolist.append()
self.network_port_type_combolist.set_value(iter, 0, t)
ctr += 1
self.network_port_type_combobox.set_active(found)
except AttributeError:
pass
self.network_tcp_button.set_active(True)
self.network_mls_entry.set_text("s0")
def login_seuser_combobox_change(self, combo, *args):
seuser = self.combo_get_active_text(combo)
if self.login_mls_entry.get_text() == "":
for u in sepolicy.get_selinux_users():
if seuser == u['name']:
self.login_mls_entry.set_text(u.get('range', ''))
def user_roles_combobox_change(self, combo, *args):
serole = self.combo_get_active_text(combo)
if self.user_mls_entry.get_text() == "":
for u in sepolicy.get_all_roles():
if serole == u['name']:
self.user_mls_entry.set_text(u.get('range', ''))
def get_selected_iter(self):
iter = None
if not self.treeview:
return None
row = self.treeview.get_selection()
if not row:
return None
treesort, iter = row.get_selected()
if iter:
iter = treesort.convert_iter_to_child_iter(iter)
if iter:
iter = self.treefilter.convert_iter_to_child_iter(iter)
return iter
def cursor_changed(self, *args):
self.modify_button.set_sensitive(False)
iter = self.get_selected_iter()
if iter == None:
self.modify_button.set_sensitive(False)
return
if not self.liststore[iter] or not self.liststore[iter][-1]:
return
self.modify_button.set_sensitive(self.liststore[iter][-1])
def login_init_dialog(self, *args):
self.show_popup(self.login_popup_window)
self.login_seuser_combolist.clear()
users = sepolicy.get_all_users()
users.sort()
for u in users:
iter = self.login_seuser_combolist.append()
self.login_seuser_combolist.set_value(iter, 0, str(u))
self.login_name_entry.set_text("")
self.login_mls_entry.set_text("")
def user_init_dialog(self, *args):
self.show_popup(self.user_popup_window)
self.user_roles_combolist.clear()
roles = sepolicy.get_all_roles()
roles.sort()
for r in roles:
iter = self.user_roles_combolist.append()
self.user_roles_combolist.set_value(iter, 0, str(r))
self.user_name_entry.set_text("")
self.user_mls_entry.set_text("")
def on_disable_ptrace(self, checkbutton):
if self.finish_init:
update_buffer = "boolean -m -%d deny_ptrace" % checkbutton.get_active()
self.wait_mouse()
try:
self.dbus.semanage(update_buffer)
except dbus.exceptions.DBusException as e:
self.error(e)
self.ready_mouse()
def on_show_modified_only(self, checkbutton):
length = self.liststore.get_n_columns()
def dup_row(row):
l = []
for i in range(0, length):
l.append(row[i])
return l
append_list = []
if self.opage == BOOLEANS_PAGE:
if not checkbutton.get_active():
return self.boolean_initialize(self.application)
for row in self.liststore:
if row[2] in self.cust_dict["boolean"]:
append_list.append(dup_row(row))
if self.opage == FILES_PAGE:
ipage = self.inner_notebook_files.get_current_page()
if not checkbutton.get_active():
if ipage == EXE_PAGE:
return self.executable_files_initialize(self.application)
if ipage == WRITABLE_PAGE:
return self.writable_files_initialize(self.application)
if ipage == APP_PAGE:
return self.application_files_initialize(self.application)
for row in self.liststore:
if (row[0], row[2]) in self.cust_dict["fcontext"]:
append_list.append(row)
if self.opage == NETWORK_PAGE:
if not checkbutton.get_active():
return self.network_initialize(self.application)
for row in self.liststore:
if (row[0], row[1]) in self.cust_dict["port"]:
append_list.append(dup_row(row))
if self.opage == FILE_EQUIV_PAGE:
if not checkbutton.get_active() == True:
return self.file_equiv_initialize()
for row in self.liststore:
if row[0] in self.cust_dict["fcontext-equiv"]:
append_list.append(dup_row(row))
if self.opage == USER_PAGE:
if not checkbutton.get_active():
return self.user_initialize()
for row in self.liststore:
if row[0] in self.cust_dict["user"]:
append_list.append(dup_row(row))
if self.opage == LOGIN_PAGE:
if not checkbutton.get_active() == True:
return self.login_initialize()
for row in self.liststore:
if row[0] in self.cust_dict["login"]:
append_list.append(dup_row(row))
self.liststore.clear()
for row in append_list:
iter = self.liststore.append()
for i in range(0, length):
self.liststore.set_value(iter, i, row[i])
def init_modified_files_liststore(self, tree, app, ipage, operation, path, fclass, ftype):
iter = tree.append(None)
tree.set_value(iter, 0, path)
tree.set_value(iter, 1, ftype)
tree.set_value(iter, 2, fclass)
def restore_to_default(self, *args):
print("restore to default clicked...")
def invalid_entry_retry(self, *args):
self.closewindow(self.error_check_window)
self.files_popup_window.set_sensitive(True)
self.network_popup_window.set_sensitive(True)
def error_check_files(self, insert_txt):
if len(insert_txt) == 0 or insert_txt[0] != '/':
self.error_check_window.show()
self.files_popup_window.set_sensitive(False)
self.network_popup_window.set_sensitive(False)
self.error_check_label.set_text((_("The entry '%s' is not a valid path. Paths must begin with a '/'.")) % insert_txt)
return True
return False
def error_check_network(self, port):
try:
pnum = int(port)
if pnum < 1 or pnum > 65536:
raise ValueError
except ValueError:
self.error_check_window.show()
self.files_popup_window.set_sensitive(False)
self.network_popup_window.set_sensitive(False)
self.error_check_label.set_text((_("Port number must be between 1 and 65536")))
return True
return False
def show_more_types(self, *args):
if self.finish_init:
if self.combo_get_active_text(self.files_type_combobox) == _('More...'):
self.files_popup_window.hide()
self.moreTypes_window_files.show()
def update_to_login(self, *args):
self.close_popup()
seuser = self.combo_get_active_text(self.login_seuser_combobox)
mls_range = self.login_mls_entry.get_text()
name = self.login_name_entry.get_text()
if self.modify:
iter = self.get_selected_iter()
oldname = self.login_liststore.get_value(iter, 0)
oldseuser = self.login_liststore.get_value(iter, 1)
oldrange = self.login_liststore.get_value(iter, 2)
self.liststore.set_value(iter, 0, oldname)
self.liststore.set_value(iter, 1, oldseuser)
self.liststore.set_value(iter, 2, oldrange)
self.cur_dict["login"][name] = {"action": "-m", "range": mls_range, "seuser": seuser, "oldrange": oldrange, "oldseuser": oldseuser, "oldname": oldname}
else:
iter = self.liststore.append(None)
self.cur_dict["login"][name] = {"action": "-a", "range": mls_range, "seuser": seuser}
self.liststore.set_value(iter, 0, name)
self.liststore.set_value(iter, 1, seuser)
self.liststore.set_value(iter, 2, mls_range)
self.new_updates()
def update_to_user(self, *args):
self.close_popup()
roles = self.combo_get_active_text(self.user_roles_combobox)
level = self.user_mls_level_entry.get_text()
mls_range = self.user_mls_entry.get_text()
name = self.user_name_entry.get_text()
if self.modify:
iter = self.get_selected_iter()
oldname = self.user_liststore.get_value(iter, 0)
oldroles = self.user_liststore.get_value(iter, 1)
oldlevel = self.user_liststore.get_value(iter, 1)
oldrange = self.user_liststore.get_value(iter, 3)
self.liststore.set_value(iter, 0, oldname)
self.liststore.set_value(iter, 1, oldroles)
self.liststore.set_value(iter, 2, oldlevel)
self.liststore.set_value(iter, 3, oldrange)
self.cur_dict["user"][name] = {"action": "-m", "range": mls_range, "level": level, "role": roles, "oldrange": oldrange, "oldlevel": oldlevel, "oldroles": oldroles, "oldname": oldname}
else:
iter = self.liststore.append(None)
if mls_range or level:
self.cur_dict["user"][name] = {"action": "-a", "range": mls_range, "level": level, "role": roles}
else:
self.cur_dict["user"][name] = {"action": "-a", "role": roles}
self.liststore.set_value(iter, 0, name)
self.liststore.set_value(iter, 1, roles)
self.liststore.set_value(iter, 2, level)
self.liststore.set_value(iter, 3, mls_range)
self.new_updates()
def update_to_file_equiv(self, *args):
self.close_popup()
dest = self.file_equiv_dest_entry.get_text()
src = self.file_equiv_source_entry.get_text()
if self.modify:
iter = self.get_selected_iter()
olddest = self.unmarkup(self.liststore.set_value(iter, 0))
oldsrc = self.unmarkup(self.liststore.set_value(iter, 1))
self.cur_dict["fcontext-equiv"][dest] = {"action": "-m", "src": src, "oldsrc": oldsrc, "olddest": olddest}
else:
iter = self.liststore.append(None)
self.cur_dict["fcontext-equiv"][dest] = {"action": "-a", "src": src}
self.liststore.set_value(iter, 0, self.markup(dest))
self.liststore.set_value(iter, 1, self.markup(src))
def update_to_files(self, *args):
self.close_popup()
self.files_add = True
# Insert Function will be used in the future
path = self.files_path_entry.get_text()
if self.error_check_files(path):
return
setype = self.combo_get_active_text(self.files_type_combobox)
mls = self.files_mls_entry.get_text()
tclass = self.combo_get_active_text(self.files_class_combobox)
if self.modify:
iter = self.get_selected_iter()
oldpath = self.unmark(self.liststore.get_value(iter, 0))
oldsetype = self.unmark(self.liststore.set_value(iter, 1))
oldtclass = self.liststore.get_value(iter, 2)
self.cur_dict["fcontext"][(path, tclass)] = {"action": "-m", "type": setype, "oldtype": oldsetype, "oldpath": oldpath, "oldclass": oldtclass}
else:
iter = self.liststore.append(None)
self.cur_dict["fcontext"][(path, tclass)] = {"action": "-a", "type": setype}
self.liststore.set_value(iter, 0, self.markup(path))
self.liststore.set_value(iter, 1, self.markup(setype))
self.liststore.set_value(iter, 2, self.markup(tclass))
self.files_add = False
self.recursive_path_toggle.set_active(False)
self.new_updates()
def update_to_network(self, *args):
self.network_add = True
ports = self.network_ports_entry.get_text()
if self.error_check_network(ports):
return
if self.network_tcp_button.get_active():
protocol = "tcp"
else:
protocol = "udp"
setype = self.combo_get_active_text(self.network_port_type_combobox)
mls = self.network_mls_entry.get_text()
if self.modify:
iter = self.get_selected_iter()
oldports = self.unmark(self.liststore.get_value(iter, 0))
oldprotocol = self.unmark(self.liststore.get_value(iter, 1))
oldsetype = self.unmark(self.liststore.set_value(iter, 2))
self.cur_dict["port"][(ports, protocol)] = {"action": "-m", "type": setype, "mls": mls, "oldtype": oldsetype, "oldprotocol": oldprotocol, "oldports": oldports}
else:
iter = self.liststore.append(None)
self.cur_dict["port"][(ports, protocol)] = {"action": "-a", "type": setype, "mls": mls}
self.liststore.set_value(iter, 0, ports)
self.liststore.set_value(iter, 1, protocol)
self.liststore.set_value(iter, 2, setype)
self.network_add = False
self.network_popup_window.hide()
self.window.set_sensitive(True)
self.new_updates()
def delete_button_clicked(self, *args):
operation = "Add"
self.window.set_sensitive(False)
if self.opage == NETWORK_PAGE:
self.network_delete_liststore.clear()
port_dict = self.cust_dict["port"]
for ports, protocol in port_dict:
setype = port_dict[(ports, protocol)]["type"]
iter = self.network_delete_liststore.append()
self.network_delete_liststore.set_value(iter, 1, ports)
self.network_delete_liststore.set_value(iter, 2, protocol)
self.network_delete_liststore.set_value(iter, 3, setype)
self.show_popup(self.network_delete_window)
return
if self.opage == FILES_PAGE:
self.files_delete_liststore.clear()
fcontext_dict = self.cust_dict["fcontext"]
for path, tclass in fcontext_dict:
setype = fcontext_dict[(path, tclass)]["type"]
iter = self.files_delete_liststore.append()
self.files_delete_liststore.set_value(iter, 1, path)
self.files_delete_liststore.set_value(iter, 2, setype)
self.files_delete_liststore.set_value(iter, 3, sepolicy.file_type_str[tclass])
self.show_popup(self.files_delete_window)
return
if self.opage == USER_PAGE:
self.user_delete_liststore.clear()
user_dict = self.cust_dict["user"]
for user in user_dict:
roles = user_dict[user]["role"]
mls = user_dict[user].get("range", "")
level = user_dict[user].get("level", "")
iter = self.user_delete_liststore.append()
self.user_delete_liststore.set_value(iter, 1, user)
self.user_delete_liststore.set_value(iter, 2, roles)
self.user_delete_liststore.set_value(iter, 3, level)
self.user_delete_liststore.set_value(iter, 4, mls)
self.show_popup(self.user_delete_window)
return
if self.opage == LOGIN_PAGE:
self.login_delete_liststore.clear()
login_dict = self.cust_dict["login"]
for login in login_dict:
seuser = login_dict[login]["seuser"]
mls = login_dict[login].get("range", "")
iter = self.login_delete_liststore.append()
self.login_delete_liststore.set_value(iter, 1, seuser)
self.login_delete_liststore.set_value(iter, 2, login)
self.login_delete_liststore.set_value(iter, 3, mls)
self.show_popup(self.login_delete_window)
return
if self.opage == FILE_EQUIV_PAGE:
self.file_equiv_delete_liststore.clear()
for items in self.file_equiv_liststore:
if items[2]:
iter = self.file_equiv_delete_liststore.append()
self.file_equiv_delete_liststore.set_value(iter, 1, self.unmarkup(items[0]))
self.file_equiv_delete_liststore.set_value(iter, 2, self.unmarkup(items[1]))
self.show_popup(self.file_equiv_delete_window)
return
def on_save_delete_clicked(self, *args):
self.close_popup()
if self.opage == NETWORK_PAGE:
for delete in self.network_delete_liststore:
if delete[0]:
self.cur_dict["port"][(delete[1], delete[2])] = {"action": "-d", "type": delete[3]}
if self.opage == FILES_PAGE:
for delete in self.files_delete_liststore:
if delete[0]:
self.cur_dict["fcontext"][(delete[1], reverse_file_type_str[delete[3]])] = {"action": "-d", "type": delete[2]}
if self.opage == USER_PAGE:
for delete in self.user_delete_liststore:
if delete[0]:
self.cur_dict["user"][delete[1]] = {"action": "-d", "role": delete[2], "range": delete[4]}
if self.opage == LOGIN_PAGE:
for delete in self.login_delete_liststore:
if delete[0]:
self.cur_dict["login"][delete[2]] = {"action": "-d", "login": delete[2], "seuser": delete[1], "range": delete[3]}
if self.opage == FILE_EQUIV_PAGE:
for delete in self.file_equiv_delete_liststore:
if delete[0]:
self.cur_dict["fcontext-equiv"][delete[1]] = {"action": "-d", "src": delete[2]}
self.new_updates()
def on_save_delete_file_equiv_clicked(self, *args):
for delete in self.files_delete_liststore:
print(delete[0], delete[1], delete[2],)
def on_toggle_update(self, cell, path, model):
model[path][0] = not model[path][0]
def ipage_delete(self, liststore, key):
ctr = 0
for items in liststore:
if items[0] == key[0] and items[2] == key[1]:
iter = liststore.get_iter(ctr)
liststore.remove(iter)
return
ctr += 1
def on_toggle(self, cell, path, model):
if not path:
return
iter = self.boolean_filter.get_iter(path)
iter = self.boolean_filter.convert_iter_to_child_iter(iter)
name = model.get_value(iter, 2)
model.set_value(iter, 0, not model.get_value(iter, 0))
active = model.get_value(iter, 0)
if name in self.cur_dict["boolean"]:
del(self.cur_dict["boolean"][name])
else:
self.cur_dict["boolean"][name] = {"active": active}
self.new_updates()
def get_advanced_filter_data(self, entry, *args):
self.filter_txt = entry.get_text()
self.advanced_search_filter.refilter()
def get_filter_data(self, windows, *args):
#search for desired item
# The txt that the use rinputs into the filter is stored in filter_txt
self.filter_txt = windows.get_text()
self.treefilter.refilter()
def update_gui(self, *args):
self.update = True
self.update_treestore.clear()
for bools in self.cur_dict["boolean"]:
operation = self.cur_dict["boolean"][bools]["action"]
iter = self.update_treestore.append(None)
self.update_treestore.set_value(iter, 0, True)
self.update_treestore.set_value(iter, 1, sepolicy.boolean_desc(bools))
self.update_treestore.set_value(iter, 2, action[self.cur_dict["boolean"][bools]['active']])
self.update_treestore.set_value(iter, 3, True)
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 1, (_("SELinux name: %s")) % bools)
self.update_treestore.set_value(niter, 3, False)
for path, tclass in self.cur_dict["fcontext"]:
operation = self.cur_dict["fcontext"][(path, tclass)]["action"]
setype = self.cur_dict["fcontext"][(path, tclass)]["type"]
iter = self.update_treestore.append(None)
self.update_treestore.set_value(iter, 0, True)
self.update_treestore.set_value(iter, 2, operation)
self.update_treestore.set_value(iter, 0, True)
if operation == "-a":
self.update_treestore.set_value(iter, 1, (_("Add file labeling for %s")) % self.application)
if operation == "-d":
self.update_treestore.set_value(iter, 1, (_("Delete file labeling for %s")) % self.application)
if operation == "-m":
self.update_treestore.set_value(iter, 1, (_("Modify file labeling for %s")) % self.application)
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
self.update_treestore.set_value(niter, 1, (_("File path: %s")) % path)
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
self.update_treestore.set_value(niter, 1, (_("File class: %s")) % sepolicy.file_type_str[tclass])
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
self.update_treestore.set_value(niter, 1, (_("SELinux file type: %s")) % setype)
for port, protocol in self.cur_dict["port"]:
operation = self.cur_dict["port"][(port, protocol)]["action"]
iter = self.update_treestore.append(None)
self.update_treestore.set_value(iter, 0, True)
self.update_treestore.set_value(iter, 2, operation)
self.update_treestore.set_value(iter, 3, True)
if operation == "-a":
self.update_treestore.set_value(iter, 1, (_("Add ports for %s")) % self.application)
if operation == "-d":
self.update_treestore.set_value(iter, 1, (_("Delete ports for %s")) % self.application)
if operation == "-m":
self.update_treestore.set_value(iter, 1, (_("Modify ports for %s")) % self.application)
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 1, (_("Network ports: %s")) % port)
self.update_treestore.set_value(niter, 3, False)
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 1, (_("Network protocol: %s")) % protocol)
self.update_treestore.set_value(niter, 3, False)
setype = self.cur_dict["port"][(port, protocol)]["type"]
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
self.update_treestore.set_value(niter, 1, (_("SELinux file type: %s")) % setype)
for user in self.cur_dict["user"]:
operation = self.cur_dict["user"][user]["action"]
iter = self.update_treestore.append(None)
self.update_treestore.set_value(iter, 0, True)
self.update_treestore.set_value(iter, 2, operation)
self.update_treestore.set_value(iter, 0, True)
if operation == "-a":
self.update_treestore.set_value(iter, 1, _("Add user"))
if operation == "-d":
self.update_treestore.set_value(iter, 1, _("Delete user"))
if operation == "-m":
self.update_treestore.set_value(iter, 1, _("Modify user"))
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 1, (_("SELinux User : %s")) % user)
self.update_treestore.set_value(niter, 3, False)
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
roles = self.cur_dict["user"][user]["role"]
self.update_treestore.set_value(niter, 1, (_("Roles: %s")) % roles)
mls = self.cur_dict["user"][user].get("range", "")
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
self.update_treestore.set_value(niter, 1, _("MLS/MCS Range: %s") % mls)
for login in self.cur_dict["login"]:
operation = self.cur_dict["login"][login]["action"]
iter = self.update_treestore.append(None)
self.update_treestore.set_value(iter, 0, True)
self.update_treestore.set_value(iter, 2, operation)
self.update_treestore.set_value(iter, 0, True)
if operation == "-a":
self.update_treestore.set_value(iter, 1, _("Add login mapping"))
if operation == "-d":
self.update_treestore.set_value(iter, 1, _("Delete login mapping"))
if operation == "-m":
self.update_treestore.set_value(iter, 1, _("Modify login mapping"))
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
self.update_treestore.set_value(niter, 1, (_("Login Name : %s")) % login)
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
seuser = self.cur_dict["login"][login]["seuser"]
self.update_treestore.set_value(niter, 1, (_("SELinux User: %s")) % seuser)
mls = self.cur_dict["login"][login].get("range", "")
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
self.update_treestore.set_value(niter, 1, _("MLS/MCS Range: %s") % mls)
for path in self.cur_dict["fcontext-equiv"]:
operation = self.cur_dict["fcontext-equiv"][path]["action"]
iter = self.update_treestore.append(None)
self.update_treestore.set_value(iter, 0, True)
self.update_treestore.set_value(iter, 2, operation)
self.update_treestore.set_value(iter, 0, True)
if operation == "-a":
self.update_treestore.set_value(iter, 1, (_("Add file equiv labeling.")))
if operation == "-d":
self.update_treestore.set_value(iter, 1, (_("Delete file equiv labeling.")))
if operation == "-m":
self.update_treestore.set_value(iter, 1, (_("Modify file equiv labeling.")))
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
self.update_treestore.set_value(niter, 1, (_("File path : %s")) % path)
niter = self.update_treestore.append(iter)
self.update_treestore.set_value(niter, 3, False)
src = self.cur_dict["fcontext-equiv"][path]["src"]
self.update_treestore.set_value(niter, 1, (_("Equivalence: %s")) % src)
self.show_popup(self.update_window)
def set_active_application_button(self):
if self.boolean_radio_button.get_active():
self.active_button = self.boolean_radio_button
if self.files_radio_button.get_active():
self.active_button = self.files_radio_button
if self.transitions_radio_button.get_active():
self.active_button = self.transitions_radio_button
if self.network_radio_button.get_active():
self.active_button = self.network_radio_button
def clearbuttons(self, clear=True):
self.main_selection_window.hide()
self.boolean_radio_button.set_visible(False)
self.files_radio_button.set_visible(False)
self.network_radio_button.set_visible(False)
self.transitions_radio_button.set_visible(False)
self.system_radio_button.set_visible(False)
self.lockdown_radio_button.set_visible(False)
self.user_radio_button.set_visible(False)
self.login_radio_button.set_visible(False)
if clear:
self.completion_entry.set_text("")
def show_system_page(self):
self.clearbuttons()
self.system_radio_button.set_visible(True)
self.lockdown_radio_button.set_visible(True)
self.applications_selection_button.set_label(_("System"))
self.system_radio_button.set_active(True)
self.tab_change()
self.idle_func()
def show_file_equiv_page(self, *args):
self.clearbuttons()
self.file_equiv_initialize()
self.file_equiv_radio_button.set_active(True)
self.applications_selection_button.set_label(_("File Equivalence"))
self.tab_change()
self.idle_func()
self.add_button.set_sensitive(True)
self.delete_button.set_sensitive(True)
def show_users_page(self):
self.clearbuttons()
self.login_radio_button.set_visible(True)
self.user_radio_button.set_visible(True)
self.applications_selection_button.set_label(_("Users"))
self.login_radio_button.set_active(True)
self.tab_change()
self.user_initialize()
self.login_initialize()
self.idle_func()
self.add_button.set_sensitive(True)
self.delete_button.set_sensitive(True)
def show_applications_page(self):
self.clearbuttons(False)
self.boolean_radio_button.set_visible(True)
self.files_radio_button.set_visible(True)
self.network_radio_button.set_visible(True)
self.transitions_radio_button.set_visible(True)
self.boolean_radio_button.set_active(True)
self.tab_change()
self.idle_func()
def system_interface(self, *args):
self.show_system_page()
def users_interface(self, *args):
self.show_users_page()
def show_mislabeled_files(self, checkbutton, *args):
iterlist = []
ctr = 0
ipage = self.inner_notebook_files.get_current_page()
if checkbutton.get_active() == True:
for items in self.liststore:
iter = self.treesort.get_iter(ctr)
iter = self.treesort.convert_iter_to_child_iter(iter)
iter = self.treefilter.convert_iter_to_child_iter(iter)
if iter != None:
if self.liststore.get_value(iter, 4) == False:
iterlist.append(iter)
ctr += 1
for iters in iterlist:
self.liststore.remove(iters)
elif self.application != None:
self.liststore.clear()
if ipage == EXE_PAGE:
self.executable_files_initialize(self.application)
elif ipage == WRITABLE_PAGE:
self.writable_files_initialize(self.application)
elif ipage == APP_PAGE:
self.application_files_initialize(self.application)
def fix_mislabeled(self, path):
cur = selinux.getfilecon(path)[1].split(":")[2]
con = selinux.matchpathcon(path, 0)[1].split(":")[2]
if self.verify(_("Run restorecon on %(PATH)s to change its type from %(CUR_CONTEXT)s to the default %(DEF_CONTEXT)s?") % {"PATH": path, "CUR_CONTEXT": cur, "DEF_CONTEXT": con}, title="restorecon dialog") == Gtk.ResponseType.YES:
self.dbus.restorecon(path)
self.application_selected()
def new_updates(self, *args):
self.update_button.set_sensitive(self.modified())
self.revert_button.set_sensitive(self.modified())
def update_or_revert_changes(self, button, *args):
self.update_gui()
self.update = (button.get_label() == _("Update"))
if self.update:
self.update_window.set_title(_("Update Changes"))
else:
self.update_window.set_title(_("Revert Changes"))
def apply_changes_button_press(self, *args):
self.close_popup()
if self.update:
self.update_the_system()
else:
self.revert_data()
self.finish_init = False
self.previously_modified_initialize(self.dbus.customized())
self.finish_init = True
self.clear_filters()
self.application_selected()
self.new_updates()
self.update_treestore.clear()
def update_the_system(self, *args):
self.close_popup()
update_buffer = self.format_update()
self.wait_mouse()
try:
self.dbus.semanage(update_buffer)
except dbus.exceptions.DBusException as e:
print(e)
self.ready_mouse()
self.init_cur()
def ipage_value_lookup(self, lookup):
ipage_values = {"Executable Files": 0, "Writable Files": 1, "Application File Type": 2, "Inbound": 1, "Outbound": 0}
for value in ipage_values:
if value == lookup:
return ipage_values[value]
return "Booleans"
def get_attributes_update(self, attribute):
attribute = attribute.split(": ")[1]
bool_id = attribute.split(": ")[0]
if bool_id == "SELinux name":
self.bool_revert = attribute
else:
return attribute
def format_update(self):
self.revert_data()
update_buffer = ""
for k in self.cur_dict:
if k in "boolean":
for b in self.cur_dict[k]:
update_buffer += "boolean -m -%d %s\n" % (self.cur_dict[k][b]["active"], b)
if k in "login":
for l in self.cur_dict[k]:
if self.cur_dict[k][l]["action"] == "-d":
update_buffer += "login -d %s\n" % l
elif "range" in self.cur_dict[k][l]:
update_buffer += "login %s -s %s -r %s %s\n" % (self.cur_dict[k][l]["action"], self.cur_dict[k][l]["seuser"], self.cur_dict[k][l]["range"], l)
else:
update_buffer += "login %s -s %s %s\n" % (self.cur_dict[k][l]["action"], self.cur_dict[k][l]["seuser"], l)
if k in "user":
for u in self.cur_dict[k]:
if self.cur_dict[k][u]["action"] == "-d":
update_buffer += "user -d %s\n" % u
elif "level" in self.cur_dict[k][u] and "range" in self.cur_dict[k][u]:
update_buffer += "user %s -L %s -r %s -R %s %s\n" % (self.cur_dict[k][u]["action"], self.cur_dict[k][u]["level"], self.cur_dict[k][u]["range"], self.cur_dict[k][u]["role"], u)
else:
update_buffer += "user %s -R %s %s\n" % (self.cur_dict[k][u]["action"], self.cur_dict[k][u]["role"], u)
if k in "fcontext-equiv":
for f in self.cur_dict[k]:
if self.cur_dict[k][f]["action"] == "-d":
update_buffer += "fcontext -d %s\n" % f
else:
update_buffer += "fcontext %s -e %s %s\n" % (self.cur_dict[k][f]["action"], self.cur_dict[k][f]["src"], f)
if k in "fcontext":
for f in self.cur_dict[k]:
if self.cur_dict[k][f]["action"] == "-d":
update_buffer += "fcontext -d %s\n" % f
else:
update_buffer += "fcontext %s -t %s -f %s %s\n" % (self.cur_dict[k][f]["action"], self.cur_dict[k][f]["type"], self.cur_dict[k][f]["class"], f)
if k in "port":
for port, protocol in self.cur_dict[k]:
if self.cur_dict[k][(port, protocol)]["action"] == "-d":
update_buffer += "port -d -p %s %s\n" % (protocol, port)
else:
update_buffer += "port %s -t %s -p %s %s\n" % (self.cur_dict[k][f]["action"], self.cur_dict[k][f]["type"], protocol, port)
return update_buffer
def revert_data(self):
ctr = 0
remove_list = []
update_buffer = ""
for items in self.update_treestore:
if not self.update_treestore[ctr][0]:
remove_list.append(ctr)
ctr += 1
remove_list.reverse()
for ctr in remove_list:
self.remove_cur(ctr)
def reveal_advanced_system(self, label, *args):
advanced = label.get_text() == ADVANCED_LABEL[0]
if advanced:
label.set_text(ADVANCED_LABEL[1])
else:
label.set_text(ADVANCED_LABEL[0])
self.system_policy_label.set_visible(advanced)
self.system_policy_type_combobox.set_visible(advanced)
def reveal_advanced(self, label, *args):
advanced = label.get_text() == ADVANCED_LABEL[0]
if advanced:
label.set_text(ADVANCED_LABEL[1])
else:
label.set_text(ADVANCED_LABEL[0])
self.files_mls_label.set_visible(advanced)
self.files_mls_entry.set_visible(advanced)
self.network_mls_label.set_visible(advanced)
self.network_mls_entry.set_visible(advanced)
def on_show_advanced_search_window(self, label, *args):
if label.get_text() == ADVANCED_SEARCH_LABEL[1]:
label.set_text(ADVANCED_SEARCH_LABEL[0])
self.close_popup()
else:
label.set_text(ADVANCED_SEARCH_LABEL[1])
self.show_popup(self.advanced_search_window)
def set_enforce_text(self, value):
if value:
self.status_bar.push(self.context_id, _("System Status: Enforcing"))
self.current_status_enforcing.set_active(True)
else:
self.status_bar.push(self.context_id, _("System Status: Permissive"))
self.current_status_permissive.set_active(True)
def set_enforce(self, button):
if not self.finish_init:
return
self.dbus.setenforce(button.get_active())
self.set_enforce_text(button.get_active())
def on_browse_select(self, *args):
filename = self.file_dialog.get_filename()
if filename == None:
return
self.clear_entry = False
self.file_dialog.hide()
self.files_path_entry.set_text(filename)
if self.import_export == 'Import':
self.import_config(filename)
elif self.import_export == 'Export':
self.export_config(filename)
def recursive_path(self, *args):
path = self.files_path_entry.get_text()
if self.recursive_path_toggle.get_active():
if not path.endswith("(/.*)?"):
self.files_path_entry.set_text(path + "(/.*)?")
elif path.endswith("(/.*)?"):
path = path.split("(/.*)?")[0]
self.files_path_entry.set_text(path)
def highlight_entry_text(self, entry_obj, *args):
txt = entry_obj.get_text()
if self.clear_entry:
entry_obj.set_text('')
self.clear_entry = False
def autofill_add_files_entry(self, entry):
text = entry.get_text()
if text == '':
return
if text.endswith("(/.*)?"):
self.recursive_path_toggle.set_active(True)
for d in sepolicy.DEFAULT_DIRS:
if text.startswith(d):
for t in self.files_type_combolist:
if t[0].endswith(sepolicy.DEFAULT_DIRS[d]):
self.combo_set_active_text(self.files_type_combobox, t[0])
def resize_columns(self, *args):
self.boolean_column_1 = self.boolean_treeview.get_col(1)
width = self.boolean_column_1.get_width()
renderer = self.boolean_column_1.get_cell_renderers()
def browse_for_files(self, *args):
self.file_dialog.show()
def close_config_window(self, *args):
self.file_dialog.hide()
def change_default_policy(self, *args):
if self.typeHistory == self.system_policy_type_combobox.get_active():
return
if self.verify(_("Changing the policy type will cause a relabel of the entire file system on the next boot. Relabeling takes a long time depending on the size of the file system. Do you wish to continue?")) == Gtk.ResponseType.NO:
self.system_policy_type_combobox.set_active(self.typeHistory)
return None
self.dbus.change_default_policy(self.combo_get_active_text(self.system_policy_type_combobox))
self.dbus.relabel_on_boot(True)
self.typeHistory = self.system_policy_type_combobox.get_active()
def change_default_mode(self, button):
if not self.finish_init:
return
self.enabled_changed(button)
if button.get_active():
self.dbus.change_default_mode(button.get_label().lower())
def import_config_show(self, *args):
self.file_dialog.set_action(Gtk.FileChooserAction.OPEN)
self.file_dialog.set_title("Import Configuration")
self.file_dialog.show()
#self.file_dialog.set_uri('/tmp')
self.import_export = 'Import'
def export_config_show(self, *args):
self.file_dialog.set_action(Gtk.FileChooserAction.SAVE)
self.file_dialog.set_title("Export Configuration")
self.file_dialog.show()
self.import_export = 'Export'
def export_config(self, filename):
self.wait_mouse()
buf = self.dbus.customized()
fd = open(filename, 'w')
fd.write(buf)
fd.close()
self.ready_mouse()
def import_config(self, filename):
fd = open(filename, "r")
buf = fd.read()
fd.close()
self.wait_mouse()
try:
self.dbus.semanage(buf)
except OSError:
pass
self.ready_mouse()
def init_dictionary(self, dic, app, ipage, operation, p, q, ftype, mls, changed, old):
if (app, ipage, operation) not in dic:
dic[app, ipage, operation] = {}
if (p, q) not in dic[app, ipage, operation]:
dic[app, ipage, operation][p, q] = {'type': ftype, 'mls': mls, 'changed': changed, 'old': old}
def translate_bool(self, b):
b = b.split('-')[1]
if b == '0':
return False
if b == '1':
return True
def relabel_on_reboot(self, *args):
active = self.relabel_button.get_active()
exists = os.path.exists("/.autorelabel")
if active and exists:
return
if not active and not exists:
return
try:
self.dbus.relabel_on_boot(active)
except dbus.exceptions.DBusException as e:
self.error(e)
def closewindow(self, window, *args):
window.hide()
self.recursive_path_toggle.set_active(False)
self.window.set_sensitive(True)
if self.moreTypes_window_files == window:
self.show_popup(self.files_popup_window)
if self.combo_get_active_text(self.files_type_combobox) == _('More...'):
self.files_type_combobox.set_active(0)
if self.error_check_window == window:
if self.files_add:
self.show_popup(self.files_popup_window)
elif self.network_add:
self.show_popup(self.network_popup_window)
if self.files_mls_label.get_visible() or self.network_mls_label.get_visible():
self.advanced_text_files.set_visible(True)
self.files_mls_label.set_visible(False)
self.files_mls_entry.set_visible(False)
self.advanced_text_network.set_visible(True)
self.network_mls_label.set_visible(False)
self.network_mls_entry.set_visible(False)
if self.main_advanced_label.get_text() == ADVANCED_SEARCH_LABEL[1]:
self.main_advanced_label.set_text(ADVANCED_SEARCH_LABEL[0])
return True
def wait_mouse(self):
self.window.get_window().set_cursor(self.busy_cursor)
self.idle_func()
def ready_mouse(self):
self.window.get_window().set_cursor(self.ready_cursor)
self.idle_func()
def verify(self, message, title=""):
dlg = Gtk.MessageDialog(None, 0, Gtk.MessageType.INFO,
Gtk.ButtonsType.YES_NO,
message)
dlg.set_title(title)
dlg.set_position(Gtk.WindowPosition.MOUSE)
dlg.show_all()
rc = dlg.run()
dlg.destroy()
return rc
def error(self, message):
dlg = Gtk.MessageDialog(None, 0, Gtk.MessageType.ERROR,
Gtk.ButtonsType.CLOSE,
message)
dlg.set_position(Gtk.WindowPosition.MOUSE)
dlg.show_all()
dlg.run()
dlg.destroy()
def enabled_changed(self, radio):
if not radio.get_active():
return
label = radio.get_label()
if label == 'Disabled' and self.enforce_mode != DISABLED:
if self.verify(_("Changing to SELinux disabled requires a reboot. It is not recommended. If you later decide to turn SELinux back on, the system will be required to relabel. If you just want to see if SELinux is causing a problem on your system, you can go to permissive mode which will only log errors and not enforce SELinux policy. Permissive mode does not require a reboot. Do you wish to continue?")) == Gtk.ResponseType.NO:
self.enforce_button.set_active(True)
if label != 'Disabled' and self.enforce_mode == DISABLED:
if self.verify(_("Changing to SELinux enabled will cause a relabel of the entire file system on the next boot. Relabeling takes a long time depending on the size of the file system. Do you wish to continue?")) == Gtk.ResponseType.NO:
self.enforce_button.set_active(True)
self.enforce_button = radio
def clear_filters(self, *args):
self.filter_entry.set_text('')
self.show_modified_only.set_active(False)
def unconfined_toggle(self, *args):
if not self.finish_init:
return
self.wait_mouse()
if self.enable_unconfined_button.get_active():
self.dbus.semanage("module -e unconfined")
else:
self.dbus.semanage("module -d unconfined")
self.ready_mouse()
def permissive_toggle(self, *args):
if not self.finish_init:
return
self.wait_mouse()
if self.enable_permissive_button.get_active():
self.dbus.semanage("module -e permissivedomains")
else:
self.dbus.semanage("module -d permissivedomains")
self.ready_mouse()
def confirmation_close(self, button, *args):
if len(self.update_treestore) > 0:
if self.verify(_("You are attempting to close the application without applying your changes.\n * To apply changes you have made during this session, click No and click Update.\n * To leave the application without applying your changes, click Yes. All changes that you have made during this session will be lost."), _("Loss of data Dialog")) == Gtk.ResponseType.NO:
return True
self.quit()
def quit(self, *args):
sys.exit(0)
if __name__ == '__main__':
start = SELinuxGui()
```
#### File: sepolicy/sepolicy/__init__.py
```python
import errno
import selinux
import setools
import glob
import sepolgen.defaults as defaults
import sepolgen.interfaces as interfaces
import sys
import os
import re
import gzip
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
TYPE = 1
ROLE = 2
ATTRIBUTE = 3
PORT = 4
USER = 5
BOOLEAN = 6
TCLASS = 7
ALLOW = 'allow'
AUDITALLOW = 'auditallow'
NEVERALLOW = 'neverallow'
DONTAUDIT = 'dontaudit'
SOURCE = 'source'
TARGET = 'target'
PERMS = 'permlist'
CLASS = 'class'
TRANSITION = 'transition'
ROLE_ALLOW = 'role_allow'
# Autofill for adding files *************************
DEFAULT_DIRS = {}
DEFAULT_DIRS["/etc"] = "etc_t"
DEFAULT_DIRS["/tmp"] = "tmp_t"
DEFAULT_DIRS["/usr/lib/systemd/system"] = "unit_file_t"
DEFAULT_DIRS["/lib/systemd/system"] = "unit_file_t"
DEFAULT_DIRS["/etc/systemd/system"] = "unit_file_t"
DEFAULT_DIRS["/var/cache"] = "var_cache_t"
DEFAULT_DIRS["/var/lib"] = "var_lib_t"
DEFAULT_DIRS["/var/log"] = "log_t"
DEFAULT_DIRS["/var/run"] = "var_run_t"
DEFAULT_DIRS["/run"] = "var_run_t"
DEFAULT_DIRS["/run/lock"] = "var_lock_t"
DEFAULT_DIRS["/var/run/lock"] = "var_lock_t"
DEFAULT_DIRS["/var/spool"] = "var_spool_t"
DEFAULT_DIRS["/var/www"] = "content_t"
file_type_str = {}
file_type_str["a"] = _("all files")
file_type_str["f"] = _("regular file")
file_type_str["d"] = _("directory")
file_type_str["c"] = _("character device")
file_type_str["b"] = _("block device")
file_type_str["s"] = _("socket file")
file_type_str["l"] = _("symbolic link")
file_type_str["p"] = _("named pipe")
trans_file_type_str = {}
trans_file_type_str[""] = "a"
trans_file_type_str["--"] = "f"
trans_file_type_str["-d"] = "d"
trans_file_type_str["-c"] = "c"
trans_file_type_str["-b"] = "b"
trans_file_type_str["-s"] = "s"
trans_file_type_str["-l"] = "l"
trans_file_type_str["-p"] = "p"
# the setools policy handle
_pol = None
# cache the lookup results
file_equiv_modified = None
file_equiv = None
local_files = None
fcdict = None
methods = []
all_types = None
all_types_info = None
user_types = None
role_allows = None
portrecs = None
portrecsbynum = None
all_domains = None
roles = None
selinux_user_list = None
login_mappings = None
file_types = None
port_types = None
bools = None
all_attributes = None
booleans = None
booleans_dict = None
all_allow_rules = None
all_transitions = None
def policy_sortkey(policy_path):
# Parse the extension of a policy path which looks like .../policy/policy.31
extension = policy_path.rsplit('/policy.', 1)[1]
try:
return int(extension), policy_path
except ValueError:
# Fallback with sorting on the full path
return 0, policy_path
def get_installed_policy(root="/"):
try:
path = root + selinux.selinux_binary_policy_path()
policies = glob.glob("%s.*" % path)
policies.sort(key=policy_sortkey)
return policies[-1]
except:
pass
raise ValueError(_("No SELinux Policy installed"))
def get_store_policy(store):
"""Get the path to the policy file located in the given store name"""
policies = glob.glob("%s%s/policy/policy.*" %
(selinux.selinux_path(), store))
if not policies:
return None
# Return the policy with the higher version number
policies.sort(key=policy_sortkey)
return policies[-1]
def policy(policy_file):
global all_domains
global all_attributes
global bools
global all_types
global role_allows
global users
global roles
global file_types
global port_types
all_domains = None
all_attributes = None
bools = None
all_types = None
role_allows = None
users = None
roles = None
file_types = None
port_types = None
global _pol
try:
_pol = setools.SELinuxPolicy(policy_file)
except:
raise ValueError(_("Failed to read %s policy file") % policy_file)
def load_store_policy(store):
policy_file = get_store_policy(store)
if not policy_file:
return None
policy(policy_file)
try:
policy_file = get_installed_policy()
policy(policy_file)
except ValueError as e:
if selinux.is_selinux_enabled() == 1:
raise e
def info(setype, name=None):
if setype == TYPE:
q = setools.TypeQuery(_pol)
q.name = name
results = list(q.results())
if name and len(results) < 1:
# type not found, try alias
q.name = None
q.alias = name
results = list(q.results())
return ({
'aliases': list(map(str, x.aliases())),
'name': str(x),
'permissive': bool(x.ispermissive),
'attributes': list(map(str, x.attributes()))
} for x in results)
elif setype == ROLE:
q = setools.RoleQuery(_pol)
if name:
q.name = name
return ({
'name': str(x),
'roles': list(map(str, x.expand())),
'types': list(map(str, x.types())),
} for x in q.results())
elif setype == ATTRIBUTE:
q = setools.TypeAttributeQuery(_pol)
if name:
q.name = name
return ({
'name': str(x),
'types': list(map(str, x.expand())),
} for x in q.results())
elif setype == PORT:
q = setools.PortconQuery(_pol)
if name:
ports = [int(i) for i in name.split("-")]
if len(ports) == 2:
q.ports = ports
elif len(ports) == 1:
q.ports = (ports[0], ports[0])
if _pol.mls:
return ({
'high': x.ports.high,
'protocol': str(x.protocol),
'range': str(x.context.range_),
'type': str(x.context.type_),
'low': x.ports.low,
} for x in q.results())
return ({
'high': x.ports.high,
'protocol': str(x.protocol),
'type': str(x.context.type_),
'low': x.ports.low,
} for x in q.results())
elif setype == USER:
q = setools.UserQuery(_pol)
if name:
q.name = name
if _pol.mls:
return ({
'range': str(x.mls_range),
'name': str(x),
'roles': list(map(str, x.roles)),
'level': str(x.mls_level),
} for x in q.results())
return ({
'name': str(x),
'roles': list(map(str, x.roles)),
} for x in q.results())
elif setype == BOOLEAN:
q = setools.BoolQuery(_pol)
if name:
q.name = name
return ({
'name': str(x),
'state': x.state,
} for x in q.results())
elif setype == TCLASS:
q = setools.ObjClassQuery(_pol)
if name:
q.name = name
return ({
'name': str(x),
'permlist': list(x.perms),
} for x in q.results())
else:
raise ValueError("Invalid type")
def _setools_rule_to_dict(rule):
d = {
'type': str(rule.ruletype),
'source': str(rule.source),
'target': str(rule.target),
'class': str(rule.tclass),
}
# Evaluate boolean expression associated with given rule (if there is any)
try:
# Get state of all booleans in the conditional expression
boolstate = {}
for boolean in rule.conditional.booleans:
boolstate[str(boolean)] = boolean.state
# evaluate if the rule is enabled
enabled = rule.conditional.evaluate(**boolstate) == rule.conditional_block
except AttributeError:
# non-conditional rules are always enabled
enabled = True
d['enabled'] = enabled
try:
d['permlist'] = list(map(str, rule.perms))
except AttributeError:
pass
try:
d['transtype'] = str(rule.default)
except AttributeError:
pass
try:
d['boolean'] = [(str(rule.conditional), enabled)]
except AttributeError:
pass
try:
d['filename'] = rule.filename
except AttributeError:
pass
return d
def search(types, seinfo=None):
if not seinfo:
seinfo = {}
valid_types = set([ALLOW, AUDITALLOW, NEVERALLOW, DONTAUDIT, TRANSITION, ROLE_ALLOW])
for setype in types:
if setype not in valid_types:
raise ValueError("Type has to be in %s" % " ".join(valid_types))
source = None
if SOURCE in seinfo:
source = str(seinfo[SOURCE])
target = None
if TARGET in seinfo:
target = str(seinfo[TARGET])
tclass = None
if CLASS in seinfo:
tclass = str(seinfo[CLASS]).split(',')
toret = []
tertypes = []
if ALLOW in types:
tertypes.append(ALLOW)
if NEVERALLOW in types:
tertypes.append(NEVERALLOW)
if AUDITALLOW in types:
tertypes.append(AUDITALLOW)
if DONTAUDIT in types:
tertypes.append(DONTAUDIT)
if len(tertypes) > 0:
q = setools.TERuleQuery(_pol,
ruletype=tertypes,
source=source,
target=target,
tclass=tclass)
if PERMS in seinfo:
q.perms = seinfo[PERMS]
toret += [_setools_rule_to_dict(x) for x in q.results()]
if TRANSITION in types:
rtypes = ['type_transition', 'type_change', 'type_member']
q = setools.TERuleQuery(_pol,
ruletype=rtypes,
source=source,
target=target,
tclass=tclass)
if PERMS in seinfo:
q.perms = seinfo[PERMS]
toret += [_setools_rule_to_dict(x) for x in q.results()]
if ROLE_ALLOW in types:
ratypes = ['allow']
q = setools.RBACRuleQuery(_pol,
ruletype=ratypes,
source=source,
target=target,
tclass=tclass)
for r in q.results():
toret.append({'source': str(r.source),
'target': str(r.target)})
return toret
def get_conditionals(src, dest, tclass, perm):
tdict = {}
tlist = []
src_list = [src]
dest_list = [dest]
# add assigned attributes
try:
src_list += list(filter(lambda x: x['name'] == src, get_all_types_info()))[0]['attributes']
except:
pass
try:
dest_list += list(filter(lambda x: x['name'] == dest, get_all_types_info()))[0]['attributes']
except:
pass
allows = map(lambda y: y, filter(lambda x:
x['source'] in src_list and
x['target'] in dest_list and
set(perm).issubset(x[PERMS]) and
'boolean' in x,
get_all_allow_rules()))
try:
for i in allows:
tdict.update({'source': i['source'], 'boolean': i['boolean']})
if tdict not in tlist:
tlist.append(tdict)
tdict = {}
except KeyError:
return(tlist)
return (tlist)
def get_conditionals_format_text(cond):
enabled = False
for x in cond:
if x['boolean'][0][1]:
enabled = True
break
return _("-- Allowed %s [ %s ]") % (enabled, " || ".join(set(map(lambda x: "%s=%d" % (x['boolean'][0][0], x['boolean'][0][1]), cond))))
def get_types_from_attribute(attribute):
return list(info(ATTRIBUTE, attribute))[0]["types"]
def get_file_types(setype):
flist = []
mpaths = {}
for f in get_all_file_types():
if f.startswith(gen_short_name(setype)):
flist.append(f)
fcdict = get_fcdict()
for f in flist:
try:
mpaths[f] = (fcdict[f]["regex"], file_type_str[fcdict[f]["ftype"]])
except KeyError:
mpaths[f] = []
return mpaths
def get_real_type_name(name):
"""Return the real name of a type
* If 'name' refers to a type alias, return the corresponding type name.
* Otherwise return the original name (even if the type does not exist).
"""
if not name:
return name
try:
return next(info(TYPE, name))["name"]
except (RuntimeError, StopIteration):
return name
def get_writable_files(setype):
file_types = get_all_file_types()
all_writes = []
mpaths = {}
permlist = search([ALLOW], {'source': setype, 'permlist': ['open', 'write'], 'class': 'file'})
if permlist is None or len(permlist) == 0:
return mpaths
fcdict = get_fcdict()
attributes = ["proc_type", "sysctl_type"]
for i in permlist:
if i['target'] in attributes:
continue
if "enabled" in i:
if not i["enabled"]:
continue
if i['target'].endswith("_t"):
if i['target'] not in file_types:
continue
if i['target'] not in all_writes:
if i['target'] != setype:
all_writes.append(i['target'])
else:
for t in get_types_from_attribute(i['target']):
if t not in all_writes:
all_writes.append(t)
for f in all_writes:
try:
mpaths[f] = (fcdict[f]["regex"], file_type_str[fcdict[f]["ftype"]])
except KeyError:
mpaths[f] = [] # {"regex":[],"paths":[]}
return mpaths
def find_file(reg):
if os.path.exists(reg):
return [reg]
try:
pat = re.compile(r"%s$" % reg)
except:
print("bad reg:", reg)
return []
p = reg
if p.endswith("(/.*)?"):
p = p[:-6] + "/"
path = os.path.dirname(p)
try: # Bug fix: when "all files on system"
if path[-1] != "/": # is pass in it breaks without try block
path += "/"
except IndexError:
print("try failed got an IndexError")
try:
pat = re.compile(r"%s$" % reg)
return [x for x in map(lambda x: path + x, os.listdir(path)) if pat.match(x)]
except:
return []
def find_all_files(domain, exclude_list=[]):
executable_files = get_entrypoints(domain)
for exe in executable_files.keys():
if exe.endswith("_exec_t") and exe not in exclude_list:
for path in executable_files[exe]:
for f in find_file(path):
return f
return None
def find_entrypoint_path(exe, exclude_list=[]):
fcdict = get_fcdict()
try:
if exe.endswith("_exec_t") and exe not in exclude_list:
for path in fcdict[exe]["regex"]:
for f in find_file(path):
return f
except KeyError:
pass
return None
def read_file_equiv(edict, fc_path, modify):
try:
with open(fc_path, "r") as fd:
for e in fd:
f = e.split()
if f and not f[0].startswith('#'):
edict[f[0]] = {"equiv": f[1], "modify": modify}
except OSError as e:
if e.errno != errno.ENOENT:
raise
return edict
def get_file_equiv_modified(fc_path=selinux.selinux_file_context_path()):
global file_equiv_modified
if file_equiv_modified:
return file_equiv_modified
file_equiv_modified = {}
file_equiv_modified = read_file_equiv(file_equiv_modified, fc_path + ".subs", modify=True)
return file_equiv_modified
def get_file_equiv(fc_path=selinux.selinux_file_context_path()):
global file_equiv
if file_equiv:
return file_equiv
file_equiv = get_file_equiv_modified(fc_path)
file_equiv = read_file_equiv(file_equiv, fc_path + ".subs_dist", modify=False)
return file_equiv
def get_local_file_paths(fc_path=selinux.selinux_file_context_path()):
global local_files
if local_files:
return local_files
local_files = []
try:
with open(fc_path + ".local", "r") as fd:
fc = fd.readlines()
except OSError as e:
if e.errno != errno.ENOENT:
raise
return []
for i in fc:
rec = i.split()
if len(rec) == 0:
continue
try:
if len(rec) > 2:
ftype = trans_file_type_str[rec[1]]
else:
ftype = "a"
local_files.append((rec[0], ftype))
except KeyError:
pass
return local_files
def get_fcdict(fc_path=selinux.selinux_file_context_path()):
global fcdict
if fcdict:
return fcdict
fd = open(fc_path, "r")
fc = fd.readlines()
fd.close()
fd = open(fc_path + ".homedirs", "r")
fc += fd.readlines()
fd.close()
fcdict = {}
try:
with open(fc_path + ".local", "r") as fd:
fc += fd.readlines()
except OSError as e:
if e.errno != errno.ENOENT:
raise
for i in fc:
rec = i.split()
try:
if len(rec) > 2:
ftype = trans_file_type_str[rec[1]]
else:
ftype = "a"
t = rec[-1].split(":")[2]
if t in fcdict:
fcdict[t]["regex"].append(rec[0])
else:
fcdict[t] = {"regex": [rec[0]], "ftype": ftype}
except:
pass
fcdict["logfile"] = {"regex": ["all log files"]}
fcdict["user_tmp_type"] = {"regex": ["all user tmp files"]}
fcdict["user_home_type"] = {"regex": ["all user home files"]}
fcdict["virt_image_type"] = {"regex": ["all virtual image files"]}
fcdict["noxattrfs"] = {"regex": ["all files on file systems which do not support extended attributes"]}
fcdict["sandbox_tmpfs_type"] = {"regex": ["all sandbox content in tmpfs file systems"]}
fcdict["user_tmpfs_type"] = {"regex": ["all user content in tmpfs file systems"]}
fcdict["file_type"] = {"regex": ["all files on the system"]}
fcdict["samba_share_t"] = {"regex": ["use this label for random content that will be shared using samba"]}
return fcdict
def get_transitions_into(setype):
try:
return [x for x in search([TRANSITION], {'class': 'process'}) if x["transtype"] == setype]
except (TypeError, AttributeError):
pass
return None
def get_transitions(setype):
try:
return search([TRANSITION], {'source': setype, 'class': 'process'})
except (TypeError, AttributeError):
pass
return None
def get_file_transitions(setype):
try:
return [x for x in search([TRANSITION], {'source': setype}) if x['class'] != "process"]
except (TypeError, AttributeError):
pass
return None
def get_boolean_rules(setype, boolean):
boollist = []
permlist = search([ALLOW], {'source': setype})
for p in permlist:
if "boolean" in p:
try:
for b in p["boolean"]:
if boolean in b:
boollist.append(p)
except:
pass
return boollist
def get_all_entrypoints():
return get_types_from_attribute("entry_type")
def get_entrypoint_types(setype):
q = setools.TERuleQuery(_pol,
ruletype=[ALLOW],
source=setype,
tclass=["file"],
perms=["entrypoint"])
return [str(x.target) for x in q.results() if x.source == setype]
def get_init_transtype(path):
entrypoint = selinux.getfilecon(path)[1].split(":")[2]
try:
entrypoints = list(filter(lambda x: x['target'] == entrypoint, search([TRANSITION], {'source': "init_t", 'class': 'process'})))
return entrypoints[0]["transtype"]
except (TypeError, AttributeError, IndexError):
pass
return None
def get_init_entrypoint(transtype):
q = setools.TERuleQuery(_pol,
ruletype=["type_transition"],
source="init_t",
tclass=["process"])
entrypoints = []
for i in q.results():
try:
if i.default == transtype:
entrypoints.append(i.target)
except AttributeError:
continue
return entrypoints
def get_init_entrypoints_str():
q = setools.TERuleQuery(_pol,
ruletype=["type_transition"],
source="init_t",
tclass=["process"])
entrypoints = {}
for i in q.results():
try:
transtype = str(i.default)
if transtype in entrypoints:
entrypoints[transtype].append(str(i.target))
else:
entrypoints[transtype] = [str(i.target)]
except AttributeError:
continue
return entrypoints
def get_init_entrypoint_target(entrypoint):
try:
entrypoints = map(lambda x: x['transtype'], search([TRANSITION], {'source': "init_t", 'target': entrypoint, 'class': 'process'}))
return list(entrypoints)[0]
except (TypeError, IndexError):
pass
return None
def get_entrypoints(setype):
fcdict = get_fcdict()
mpaths = {}
for f in get_entrypoint_types(setype):
try:
mpaths[f] = (fcdict[f]["regex"], file_type_str[fcdict[f]["ftype"]])
except KeyError:
mpaths[f] = []
return mpaths
def get_methods():
global methods
if len(methods) > 0:
return methods
gen_interfaces()
fn = defaults.interface_info()
try:
fd = open(fn)
# List of per_role_template interfaces
ifs = interfaces.InterfaceSet()
ifs.from_file(fd)
methods = list(ifs.interfaces.keys())
fd.close()
except:
sys.stderr.write("could not open interface info [%s]\n" % fn)
sys.exit(1)
methods.sort()
return methods
def get_all_types():
global all_types
if all_types is None:
all_types = [x['name'] for x in info(TYPE)]
return all_types
def get_all_types_info():
global all_types_info
if all_types_info is None:
all_types_info = list(info(TYPE))
return all_types_info
def get_user_types():
global user_types
if user_types is None:
user_types = list(list(info(ATTRIBUTE, "userdomain"))[0]["types"])
return user_types
def get_all_role_allows():
global role_allows
if role_allows:
return role_allows
role_allows = {}
q = setools.RBACRuleQuery(_pol, ruletype=[ALLOW])
for r in q.results():
src = str(r.source)
tgt = str(r.target)
if src == "system_r" or tgt == "system_r":
continue
if src in role_allows:
role_allows[src].append(tgt)
else:
role_allows[src] = [tgt]
return role_allows
def get_all_entrypoint_domains():
import re
all_domains = []
types = sorted(get_all_types())
for i in types:
m = re.findall("(.*)%s" % "_exec_t$", i)
if len(m) > 0:
if len(re.findall("(.*)%s" % "_initrc$", m[0])) == 0 and m[0] not in all_domains:
all_domains.append(m[0])
return all_domains
def gen_interfaces():
try:
from commands import getstatusoutput
except ImportError:
from subprocess import getstatusoutput
ifile = defaults.interface_info()
headers = defaults.headers()
try:
if os.stat(headers).st_mtime <= os.stat(ifile).st_mtime:
return
except OSError:
pass
if os.getuid() != 0:
raise ValueError(_("You must regenerate interface info by running /usr/bin/sepolgen-ifgen"))
print(getstatusoutput("/usr/bin/sepolgen-ifgen")[1])
def gen_port_dict():
global portrecs
global portrecsbynum
if portrecs:
return (portrecs, portrecsbynum)
portrecsbynum = {}
portrecs = {}
for i in info(PORT):
if i['low'] == i['high']:
port = str(i['low'])
else:
port = "%s-%s" % (str(i['low']), str(i['high']))
if (i['type'], i['protocol']) in portrecs:
portrecs[(i['type'], i['protocol'])].append(port)
else:
portrecs[(i['type'], i['protocol'])] = [port]
if 'range' in i:
portrecsbynum[(i['low'], i['high'], i['protocol'])] = (i['type'], i['range'])
else:
portrecsbynum[(i['low'], i['high'], i['protocol'])] = (i['type'])
return (portrecs, portrecsbynum)
def get_all_domains():
global all_domains
if not all_domains:
all_domains = list(list(info(ATTRIBUTE, "domain"))[0]["types"])
return all_domains
def get_all_roles():
global roles
if roles:
return roles
q = setools.RoleQuery(_pol)
roles = [str(x) for x in q.results() if str(x) != "object_r"]
return roles
def get_selinux_users():
global selinux_user_list
if not selinux_user_list:
selinux_user_list = list(info(USER))
if _pol.mls:
for x in selinux_user_list:
x['range'] = "".join(x['range'].split(" "))
return selinux_user_list
def get_login_mappings():
global login_mappings
if login_mappings:
return login_mappings
fd = open(selinux.selinux_usersconf_path(), "r")
buf = fd.read()
fd.close()
login_mappings = []
for b in buf.split("\n"):
b = b.strip()
if len(b) == 0 or b.startswith("#"):
continue
x = b.split(":")
login_mappings.append({"name": x[0], "seuser": x[1], "mls": ":".join(x[2:])})
return login_mappings
def get_all_users():
return sorted(map(lambda x: x['name'], get_selinux_users()))
def get_all_file_types():
global file_types
if file_types:
return file_types
file_types = list(sorted(info(ATTRIBUTE, "file_type"))[0]["types"])
return file_types
def get_all_port_types():
global port_types
if port_types:
return port_types
port_types = list(sorted(info(ATTRIBUTE, "port_type"))[0]["types"])
return port_types
def get_all_bools():
global bools
if not bools:
bools = list(info(BOOLEAN))
return bools
def prettyprint(f, trim):
return " ".join(f[:-len(trim)].split("_"))
def markup(f):
return f
def get_description(f, markup=markup):
txt = "Set files with the %s type, if you want to " % markup(f)
if f.endswith("_var_run_t"):
return txt + "store the %s files under the /run or /var/run directory." % prettyprint(f, "_var_run_t")
if f.endswith("_pid_t"):
return txt + "store the %s files under the /run directory." % prettyprint(f, "_pid_t")
if f.endswith("_var_lib_t"):
return txt + "store the %s files under the /var/lib directory." % prettyprint(f, "_var_lib_t")
if f.endswith("_var_t"):
return txt + "store the %s files under the /var directory." % prettyprint(f, "_var_lib_t")
if f.endswith("_var_spool_t"):
return txt + "store the %s files under the /var/spool directory." % prettyprint(f, "_spool_t")
if f.endswith("_spool_t"):
return txt + "store the %s files under the /var/spool directory." % prettyprint(f, "_spool_t")
if f.endswith("_cache_t") or f.endswith("_var_cache_t"):
return txt + "store the files under the /var/cache directory."
if f.endswith("_keytab_t"):
return txt + "treat the files as kerberos keytab files."
if f.endswith("_lock_t"):
return txt + "treat the files as %s lock data, stored under the /var/lock directory" % prettyprint(f, "_lock_t")
if f.endswith("_log_t"):
return txt + "treat the data as %s log data, usually stored under the /var/log directory." % prettyprint(f, "_log_t")
if f.endswith("_config_t"):
return txt + "treat the files as %s configuration data, usually stored under the /etc directory." % prettyprint(f, "_config_t")
if f.endswith("_conf_t"):
return txt + "treat the files as %s configuration data, usually stored under the /etc directory." % prettyprint(f, "_conf_t")
if f.endswith("_exec_t"):
return txt + "transition an executable to the %s_t domain." % f[:-len("_exec_t")]
if f.endswith("_cgi_content_t"):
return txt + "treat the files as %s cgi content." % prettyprint(f, "_cgi_content_t")
if f.endswith("_rw_content_t"):
return txt + "treat the files as %s read/write content." % prettyprint(f, "_rw_content_t")
if f.endswith("_rw_t"):
return txt + "treat the files as %s read/write content." % prettyprint(f, "_rw_t")
if f.endswith("_write_t"):
return txt + "treat the files as %s read/write content." % prettyprint(f, "_write_t")
if f.endswith("_db_t"):
return txt + "treat the files as %s database content." % prettyprint(f, "_db_t")
if f.endswith("_ra_content_t"):
return txt + "treat the files as %s read/append content." % prettyprint(f, "_ra_content_t")
if f.endswith("_cert_t"):
return txt + "treat the files as %s certificate data." % prettyprint(f, "_cert_t")
if f.endswith("_key_t"):
return txt + "treat the files as %s key data." % prettyprint(f, "_key_t")
if f.endswith("_secret_t"):
return txt + "treat the files as %s secret data." % prettyprint(f, "_key_t")
if f.endswith("_ra_t"):
return txt + "treat the files as %s read/append content." % prettyprint(f, "_ra_t")
if f.endswith("_ro_t"):
return txt + "treat the files as %s read/only content." % prettyprint(f, "_ro_t")
if f.endswith("_modules_t"):
return txt + "treat the files as %s modules." % prettyprint(f, "_modules_t")
if f.endswith("_content_t"):
return txt + "treat the files as %s content." % prettyprint(f, "_content_t")
if f.endswith("_state_t"):
return txt + "treat the files as %s state data." % prettyprint(f, "_state_t")
if f.endswith("_files_t"):
return txt + "treat the files as %s content." % prettyprint(f, "_files_t")
if f.endswith("_file_t"):
return txt + "treat the files as %s content." % prettyprint(f, "_file_t")
if f.endswith("_data_t"):
return txt + "treat the files as %s content." % prettyprint(f, "_data_t")
if f.endswith("_file_t"):
return txt + "treat the data as %s content." % prettyprint(f, "_file_t")
if f.endswith("_tmp_t"):
return txt + "store %s temporary files in the /tmp directories." % prettyprint(f, "_tmp_t")
if f.endswith("_etc_t"):
return txt + "store %s files in the /etc directories." % prettyprint(f, "_tmp_t")
if f.endswith("_home_t"):
return txt + "store %s files in the users home directory." % prettyprint(f, "_home_t")
if f.endswith("_tmpfs_t"):
return txt + "store %s files on a tmpfs file system." % prettyprint(f, "_tmpfs_t")
if f.endswith("_unit_file_t"):
return txt + "treat files as a systemd unit file."
if f.endswith("_htaccess_t"):
return txt + "treat the file as a %s access file." % prettyprint(f, "_htaccess_t")
return txt + "treat the files as %s data." % prettyprint(f, "_t")
def get_all_attributes():
global all_attributes
if not all_attributes:
all_attributes = list(sorted(map(lambda x: x['name'], info(ATTRIBUTE))))
return all_attributes
def _dict_has_perms(dict, perms):
for perm in perms:
if perm not in dict[PERMS]:
return False
return True
def gen_short_name(setype):
all_domains = get_all_domains()
if setype.endswith("_t"):
# replace aliases with corresponding types
setype = get_real_type_name(setype)
domainname = setype[:-2]
else:
domainname = setype
if domainname + "_t" not in all_domains:
raise ValueError("domain %s_t does not exist" % domainname)
if domainname[-1] == 'd':
short_name = domainname[:-1] + "_"
else:
short_name = domainname + "_"
return (domainname, short_name)
def get_all_allow_rules():
global all_allow_rules
if not all_allow_rules:
all_allow_rules = search([ALLOW])
return all_allow_rules
def get_all_transitions():
global all_transitions
if not all_transitions:
all_transitions = list(search([TRANSITION]))
return all_transitions
def get_bools(setype):
bools = []
domainbools = []
domainname, short_name = gen_short_name(setype)
for i in map(lambda x: x['boolean'], filter(lambda x: 'boolean' in x and x['source'] == setype, get_all_allow_rules())):
for b in i:
if not isinstance(b, tuple):
continue
try:
enabled = selinux.security_get_boolean_active(b[0])
except OSError:
enabled = b[1]
if b[0].startswith(short_name) or b[0].startswith(domainname):
if (b[0], enabled) not in domainbools and (b[0], not enabled) not in domainbools:
domainbools.append((b[0], enabled))
else:
if (b[0], enabled) not in bools and (b[0], not enabled) not in bools:
bools.append((b[0], enabled))
return (domainbools, bools)
def get_all_booleans():
global booleans
if not booleans:
booleans = selinux.security_get_boolean_names()[1]
return booleans
def policy_xml(path="/usr/share/selinux/devel/policy.xml"):
try:
fd = gzip.open(path)
buf = fd.read()
fd.close()
except IOError:
fd = open(path)
buf = fd.read()
fd.close()
return buf
def gen_bool_dict(path="/usr/share/selinux/devel/policy.xml"):
global booleans_dict
if booleans_dict:
return booleans_dict
import xml.etree.ElementTree
booleans_dict = {}
try:
tree = xml.etree.ElementTree.fromstring(policy_xml(path))
for l in tree.findall("layer"):
for m in l.findall("module"):
for b in m.findall("tunable"):
desc = b.find("desc").find("p").text.strip("\n")
desc = re.sub("\n", " ", desc)
booleans_dict[b.get('name')] = (m.get("name"), b.get('dftval'), desc)
for b in m.findall("bool"):
desc = b.find("desc").find("p").text.strip("\n")
desc = re.sub("\n", " ", desc)
booleans_dict[b.get('name')] = (m.get("name"), b.get('dftval'), desc)
for i in tree.findall("bool"):
desc = i.find("desc").find("p").text.strip("\n")
desc = re.sub("\n", " ", desc)
booleans_dict[i.get('name')] = ("global", i.get('dftval'), desc)
for i in tree.findall("tunable"):
desc = i.find("desc").find("p").text.strip("\n")
desc = re.sub("\n", " ", desc)
booleans_dict[i.get('name')] = ("global", i.get('dftval'), desc)
except IOError:
pass
return booleans_dict
def boolean_category(boolean):
booleans_dict = gen_bool_dict()
if boolean in booleans_dict:
return _(booleans_dict[boolean][0])
else:
return _("unknown")
def boolean_desc(boolean):
booleans_dict = gen_bool_dict()
if boolean in booleans_dict:
return _(booleans_dict[boolean][2])
else:
desc = boolean.split("_")
return "Allow %s to %s" % (desc[0], " ".join(desc[1:]))
def get_os_version():
os_version = ""
pkg_name = "selinux-policy"
try:
try:
from commands import getstatusoutput
except ImportError:
from subprocess import getstatusoutput
rc, output = getstatusoutput("rpm -q '%s'" % pkg_name)
if rc == 0:
os_version = output.split(".")[-2]
except:
os_version = ""
if os_version[0:2] == "fc":
os_version = "Fedora" + os_version[2:]
elif os_version[0:2] == "el":
os_version = "RHEL" + os_version[2:]
else:
os_version = ""
return os_version
def reinit():
global all_attributes
global all_domains
global all_types
global booleans
global booleans_dict
global bools
global fcdict
global file_types
global local_files
global methods
global methods
global portrecs
global portrecsbynum
global port_types
global role_allows
global roles
global login_mappings
global selinux_user_list
global user_types
all_attributes = None
all_domains = None
all_types = None
booleans = None
booleans_dict = None
bools = None
fcdict = None
file_types = None
local_files = None
methods = None
methods = None
portrecs = None
portrecsbynum = None
port_types = None
role_allows = None
roles = None
user_types = None
login_mappings = None
selinux_user_list = None
```
#### File: python/sepolicy/sepolicy.py
```python
import os
import sys
import selinux
import sepolicy
from multiprocessing import Pool
from sepolicy import get_os_version, get_conditionals, get_conditionals_format_text
import argparse
PROGNAME = "policycoreutils"
try:
import gettext
kwargs = {}
if sys.version_info < (3,):
kwargs['unicode'] = True
gettext.install(PROGNAME,
localedir="/usr/share/locale",
codeset='utf-8',
**kwargs)
except:
try:
import builtins
builtins.__dict__['_'] = str
except ImportError:
import __builtin__
__builtin__.__dict__['_'] = unicode
usage = "sepolicy generate [-h] [-n NAME] [-p PATH] ["
usage_dict = {' --newtype': ('-t [TYPES [TYPES ...]]',), ' --customize': ('-d DOMAIN', '-a ADMIN_DOMAIN', "[ -w WRITEPATHS ]",), ' --admin_user': ('[-r TRANSITION_ROLE ]', "[ -w WRITEPATHS ]",), ' --application': ('COMMAND', "[ -w WRITEPATHS ]",), ' --cgi': ('COMMAND', "[ -w WRITEPATHS ]",), ' --confined_admin': ('-a ADMIN_DOMAIN', "[ -w WRITEPATHS ]",), ' --dbus': ('COMMAND', "[ -w WRITEPATHS ]",), ' --desktop_user': ('', "[ -w WRITEPATHS ]",), ' --inetd': ('COMMAND', "[ -w WRITEPATHS ]",), ' --init': ('COMMAND', "[ -w WRITEPATHS ]",), ' --sandbox': ("[ -w WRITEPATHS ]",), ' --term_user': ("[ -w WRITEPATHS ]",), ' --x_user': ("[ -w WRITEPATHS ]",)}
class CheckPath(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if not os.path.exists(values):
raise ValueError("%s does not exist" % values)
setattr(namespace, self.dest, values)
class CheckType(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
if isinstance(values, str):
setattr(namespace, self.dest, values)
else:
newval = getattr(namespace, self.dest)
if not newval:
newval = []
for v in values:
newval.append(v)
setattr(namespace, self.dest, newval)
class CheckBoolean(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
booleans = sepolicy.get_all_booleans()
newval = getattr(namespace, self.dest)
if not newval:
newval = []
if isinstance(values, str):
v = selinux.selinux_boolean_sub(values)
if v not in booleans:
raise ValueError("%s must be an SELinux process domain:\nValid domains: %s" % (v, ", ".join(booleans)))
newval.append(v)
setattr(namespace, self.dest, newval)
else:
for value in values:
v = selinux.selinux_boolean_sub(value)
if v not in booleans:
raise ValueError("%s must be an SELinux boolean:\nValid boolean: %s" % (v, ", ".join(booleans)))
newval.append(v)
setattr(namespace, self.dest, newval)
class CheckDomain(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
domains = sepolicy.get_all_domains()
if isinstance(values, str):
values = sepolicy.get_real_type_name(values)
if values not in domains:
raise ValueError("%s must be an SELinux process domain:\nValid domains: %s" % (values, ", ".join(domains)))
setattr(namespace, self.dest, values)
else:
newval = getattr(namespace, self.dest)
if not newval:
newval = []
for v in values:
v = sepolicy.get_real_type_name(v)
if v not in domains:
raise ValueError("%s must be an SELinux process domain:\nValid domains: %s" % (v, ", ".join(domains)))
newval.append(v)
setattr(namespace, self.dest, newval)
all_classes = None
class CheckClass(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
global all_classes
if not all_classes:
all_classes = map(lambda x: x['name'], sepolicy.info(sepolicy.TCLASS))
if values not in all_classes:
raise ValueError("%s must be an SELinux class:\nValid classes: %s" % (values, ", ".join(all_classes)))
setattr(namespace, self.dest, values)
class CheckAdmin(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
from sepolicy.interface import get_admin
newval = getattr(namespace, self.dest)
if not newval:
newval = []
admins = get_admin()
if values not in admins:
raise ValueError("%s must be an SELinux admin domain:\nValid admin domains: %s" % (values, ", ".join(admins)))
newval.append(values)
setattr(namespace, self.dest, newval)
class CheckPort(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
newval = getattr(namespace, self.dest)
if not newval:
newval = []
for v in values:
if v < 1 or v > 65536:
raise ValueError("%s must be an integer between 1 and 65536" % v)
newval.append(v)
setattr(namespace, self.dest, newval)
class CheckPortType(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
port_types = sepolicy.get_all_port_types()
newval = getattr(namespace, self.dest)
if not newval:
newval = []
for v in values:
v = sepolicy.get_real_type_name(v)
if v not in port_types:
raise ValueError("%s must be an SELinux port type:\nValid port types: %s" % (v, ", ".join(port_types)))
newval.append(v)
setattr(namespace, self.dest, newval)
class LoadPolicy(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
import sepolicy
sepolicy.policy(values)
setattr(namespace, self.dest, values)
class CheckUser(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
newval = getattr(namespace, self.dest)
if not newval:
newval = []
users = sepolicy.get_all_users()
if value not in users:
raise ValueError("%s must be an SELinux user:\nValid users: %s" % (value, ", ".join(users)))
newval.append(value)
setattr(namespace, self.dest, newval)
class CheckRole(argparse.Action):
def __call__(self, parser, namespace, value, option_string=None):
newval = getattr(namespace, self.dest)
if not newval:
newval = []
roles = sepolicy.get_all_roles()
if value not in roles:
raise ValueError("%s must be an SELinux role:\nValid roles: %s" % (value, ", ".join(roles)))
newval.append(value[:-2])
setattr(namespace, self.dest, newval)
class InterfaceInfo(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
from sepolicy.interface import get_interface_dict
interface_dict = get_interface_dict()
for v in values:
if v not in interface_dict.keys():
raise ValueError(_("Interface %s does not exist.") % v)
setattr(namespace, self.dest, values)
def generate_custom_usage(usage_text, usage_dict):
sorted_keys = []
for i in usage_dict.keys():
sorted_keys.append(i)
sorted_keys.sort()
for k in sorted_keys:
usage_text += "%s %s |" % (k, (" ".join(usage_dict[k])))
usage_text = usage_text[:-1] + "]"
usage_text = _(usage_text)
return usage_text
# expects formats:
# "22 (sshd_t)", "80, 8080 (httpd_t)", "all ports (port_type)"
def port_string_to_num(val):
try:
return int(val.split(" ")[0].split(",")[0].split("-")[0])
except:
return 99999999
def _print_net(src, protocol, perm):
import sepolicy.network
portdict = sepolicy.network.get_network_connect(src, protocol, perm)
if len(portdict) > 0:
bold_start = "\033[1m"
bold_end = "\033[0;0m"
print("\n" + bold_start + "%s: %s %s" % (src, protocol, perm) + bold_end)
port_strings = []
boolean_text = ""
for p in portdict:
for t, recs in portdict[p]:
cond = get_conditionals(src, t, "%s_socket" % protocol, [perm])
if cond:
boolean_text = get_conditionals_format_text(cond)
port_strings.append("%s (%s) %s" % (", ".join(recs), t, boolean_text))
else:
port_strings.append("%s (%s)" % (", ".join(recs), t))
port_strings.sort(key=lambda param: port_string_to_num(param))
for p in port_strings:
print("\t" + p)
def network(args):
portrecs, portrecsbynum = sepolicy.gen_port_dict()
all_ports = []
if args.list_ports:
for i in portrecs:
if i[0] not in all_ports:
all_ports.append(i[0])
all_ports.sort()
print("\n".join(all_ports))
for port in args.port:
found = False
for i in portrecsbynum:
if i[0] <= port and port <= i[1]:
if i[0] == i[1]:
range = i[0]
else:
range = "%s-%s" % (i[0], i[1])
found = True
print("%d: %s %s %s" % (port, i[2], portrecsbynum[i][0], range))
if not found:
if port < 500:
print("Undefined reserved port type")
else:
print("Undefined port type")
for t in args.type:
if (t, 'tcp') in portrecs.keys():
print("%s: tcp: %s" % (t, ",".join(portrecs[t, 'tcp'])))
if (t, 'udp') in portrecs.keys():
print( "%s: udp: %s" % (t, ",".join(portrecs[t, 'udp'])))
for a in args.applications:
d = sepolicy.get_init_transtype(a)
if d:
args.domain.append(d)
for d in args.domain:
_print_net(d, "tcp", "name_connect")
for net in ("tcp", "udp"):
_print_net(d, net, "name_bind")
def gui_run(args):
try:
import sepolicy.gui
sepolicy.gui.SELinuxGui(args.domain, args.test)
pass
except ImportError:
raise ValueError(_("You need to install policycoreutils-gui package to use the gui option"))
def gen_gui_args(parser):
gui = parser.add_parser("gui",
help=_('Graphical User Interface for SELinux Policy'))
gui.add_argument("-d", "--domain", default=None,
action=CheckDomain,
help=_("Domain name(s) of man pages to be created"))
gui.add_argument("-t", "--test", default=False, action="store_true",
help=argparse.SUPPRESS)
gui.set_defaults(func=gui_run)
def manpage_work(domain, path, root, source_files, web):
from sepolicy.manpage import ManPage
m = ManPage(domain, path, root, source_files, web)
print(m.get_man_page_path())
def manpage(args):
from sepolicy.manpage import HTMLManPages, manpage_domains, manpage_roles, gen_domains
path = args.path
if not args.policy and args.root != "/":
sepolicy.policy(sepolicy.get_installed_policy(args.root))
if args.source_files and args.root == "/":
raise ValueError(_("Alternative root needs to be setup"))
if args.all:
test_domains = gen_domains()
else:
test_domains = args.domain
p = Pool()
for domain in test_domains:
p.apply_async(manpage_work, [domain, path, args.root, args.source_files, args.web])
p.close()
p.join()
if args.web:
HTMLManPages(manpage_roles, manpage_domains, path, args.os)
def gen_manpage_args(parser):
man = parser.add_parser("manpage",
help=_('Generate SELinux man pages'))
man.add_argument("-p", "--path", dest="path", default="/tmp",
help=_("path in which the generated SELinux man pages will be stored"))
man.add_argument("-o", "--os", dest="os", default=get_os_version(),
help=_("name of the OS for man pages"))
man.add_argument("-w", "--web", dest="web", default=False, action="store_true",
help=_("Generate HTML man pages structure for selected SELinux man page"))
man.add_argument("-r", "--root", dest="root", default="/",
help=_("Alternate root directory, defaults to /"))
man.add_argument("--source_files", dest="source_files", default=False, action="store_true",
help=_("With this flag, alternative root path needs to include file context files and policy.xml file"))
group = man.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--all", dest="all", default=False,
action="store_true",
help=_("All domains"))
group.add_argument("-d", "--domain", nargs="+",
action=CheckDomain,
help=_("Domain name(s) of man pages to be created"))
man.set_defaults(func=manpage)
def gen_network_args(parser):
net = parser.add_parser("network",
help=_('Query SELinux policy network information'))
group = net.add_mutually_exclusive_group(required=True)
group.add_argument("-l", "--list", dest="list_ports",
action="store_true",
help=_("list all SELinux port types"))
group.add_argument("-p", "--port", dest="port", default=[],
action=CheckPort, nargs="+", type=int,
help=_("show SELinux type related to the port"))
group.add_argument("-t", "--type", dest="type", default=[],
action=CheckPortType, nargs="+",
help=_("Show ports defined for this SELinux type"))
group.add_argument("-d", "--domain", dest="domain", default=[],
action=CheckDomain, nargs="+",
help=_("show ports to which this domain can bind and/or connect"))
group.add_argument("-a", "--application", dest="applications", default=[],
nargs="+",
help=_("show ports to which this application can bind and/or connect"))
net.set_defaults(func=network)
def communicate(args):
from sepolicy.communicate import get_types
writable = get_types(args.source, args.tclass, args.sourceaccess.split(","))
readable = get_types(args.target, args.tclass, args.targetaccess.split(","))
out = list(set(writable) & set(readable))
for t in out:
print(t)
def gen_communicate_args(parser):
comm = parser.add_parser("communicate",
help=_('query SELinux policy to see if domains can communicate with each other'))
comm.add_argument("-s", "--source", dest="source",
action=CheckDomain, required=True,
help=_("Source Domain"))
comm.add_argument("-t", "--target", dest="target",
action=CheckDomain, required=True,
help=_("Target Domain"))
comm.add_argument("-c", "--class", required=False, dest="tclass",
action=CheckClass,
default="file", help="class to use for communications, Default 'file'")
comm.add_argument("-S", "--sourceaccess", required=False, dest="sourceaccess", default="open,write", help="comma separate list of permissions for the source type to use, Default 'open,write'")
comm.add_argument("-T", "--targetaccess", required=False, dest="targetaccess", default="open,read", help="comma separated list of permissions for the target type to use, Default 'open,read'")
comm.set_defaults(func=communicate)
def booleans(args):
from sepolicy import boolean_desc
if args.all:
rc, args.booleans = selinux.security_get_boolean_names()
args.booleans.sort()
for b in args.booleans:
print("%s=_(\"%s\")" % (b, boolean_desc(b)))
def gen_booleans_args(parser):
bools = parser.add_parser("booleans",
help=_('query SELinux Policy to see description of booleans'))
group = bools.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--all", dest="all", default=False,
action="store_true",
help=_("get all booleans descriptions"))
group.add_argument("-b", "--boolean", dest="booleans", nargs="+",
action=CheckBoolean, required=False,
help=_("boolean to get description"))
bools.set_defaults(func=booleans)
def transition(args):
from sepolicy.transition import setrans
mytrans = setrans(args.source, args.target)
mytrans.output()
def gen_transition_args(parser):
trans = parser.add_parser("transition",
help=_('query SELinux Policy to see how a source process domain can transition to the target process domain'))
trans.add_argument("-s", "--source", dest="source",
action=CheckDomain, required=True,
help=_("source process domain"))
trans.add_argument("-t", "--target", dest="target",
action=CheckDomain,
help=_("target process domain"))
trans.set_defaults(func=transition)
def print_interfaces(interfaces, args, append=""):
from sepolicy.interface import get_interface_format_text, interface_compile_test
for i in interfaces:
if args.verbose:
try:
print(get_interface_format_text(i + append))
except KeyError:
print(i)
if args.compile:
try:
interface_compile_test(i)
except KeyError:
print(i)
else:
print(i)
def interface(args):
from sepolicy.interface import get_admin, get_user, get_interface_dict, get_all_interfaces
if args.list_admin:
print_interfaces(get_admin(args.file), args, "_admin")
if args.list_user:
print_interfaces(get_user(args.file), args, "_role")
if args.list:
print_interfaces(get_all_interfaces(args.file), args)
if args.interfaces:
print_interfaces(args.interfaces, args)
def generate(args):
from sepolicy.generate import policy, AUSER, RUSER, EUSER, USERS, SANDBOX, APPLICATIONS, NEWTYPE
cmd = None
# numbers present POLTYPE defined in sepolicy.generate
conflict_args = {'TYPES': (NEWTYPE,), 'DOMAIN': (EUSER,), 'ADMIN_DOMAIN': (AUSER, RUSER, EUSER,)}
error_text = ""
if args.policytype is None:
generate_usage = generate_custom_usage(usage, usage_dict)
for k in usage_dict:
error_text += "%s" % (k)
print(generate_usage)
print(_("sepolicy generate: error: one of the arguments %s is required") % error_text)
sys.exit(1)
if args.policytype in APPLICATIONS:
if not args.command:
raise ValueError(_("Command required for this type of policy"))
cmd = os.path.realpath(args.command)
if not args.name:
args.name = os.path.basename(cmd).replace("-", "_")
mypolicy = policy(args.name, args.policytype)
if cmd:
mypolicy.set_program(cmd)
if args.types:
if args.policytype not in conflict_args['TYPES']:
raise ValueError(_("-t option can not be used with '%s' domains. Read usage for more details.") % sepolicy.generate.poltype[args.policytype])
mypolicy.set_types(args.types)
if args.domain:
if args.policytype not in conflict_args['DOMAIN']:
raise ValueError(_("-d option can not be used with '%s' domains. Read usage for more details.") % sepolicy.generate.poltype[args.policytype])
if args.admin_domain:
if args.policytype not in conflict_args['ADMIN_DOMAIN']:
raise ValueError(_("-a option can not be used with '%s' domains. Read usage for more details.") % sepolicy.generate.poltype[args.policytype])
if len(args.writepaths) > 0 and args.policytype == NEWTYPE:
raise ValueError(_("-w option can not be used with the --newtype option"))
for p in args.writepaths:
if os.path.isdir(p):
mypolicy.add_dir(p)
else:
mypolicy.add_file(p)
mypolicy.set_transition_users(args.user)
mypolicy.set_admin_roles(args.role)
mypolicy.set_admin_domains(args.admin_domain)
mypolicy.set_existing_domains(args.domain)
if args.policytype in APPLICATIONS:
mypolicy.gen_writeable()
mypolicy.gen_symbols()
print(mypolicy.generate(args.path))
def gen_interface_args(parser):
itf = parser.add_parser("interface",
help=_('List SELinux Policy interfaces'))
itf.add_argument("-c", "--compile", dest="compile",
action="store_true", default=False,
help="Run compile test for selected interface")
itf.add_argument("-v", "--verbose", dest="verbose",
action="store_true", default=False,
help="Show verbose information")
itf.add_argument("-f", "--file", dest="file",
help="Interface file")
group = itf.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--list_admin", dest="list_admin", action="store_true", default=False,
help="List all domains with admin interface - DOMAIN_admin()")
group.add_argument("-u", "--list_user", dest="list_user", action="store_true",
default=False,
help="List all domains with SELinux user role interface - DOMAIN_role()")
group.add_argument("-l", "--list", dest="list", action="store_true",
default=False,
help="List all interfaces")
group.add_argument("-i", "--interfaces", nargs="+", dest="interfaces",
action=InterfaceInfo,
help=_("Enter interface names, you wish to query"))
itf.set_defaults(func=interface)
def gen_generate_args(parser):
from sepolicy.generate import get_poltype_desc, poltype, DAEMON, DBUS, INETD, CGI, SANDBOX, USER, EUSER, TUSER, XUSER, LUSER, AUSER, RUSER, NEWTYPE
generate_usage = generate_custom_usage(usage, usage_dict)
pol = parser.add_parser("generate", usage=generate_usage,
help=_('Generate SELinux Policy module template'))
pol.add_argument("-d", "--domain", dest="domain", default=[],
action=CheckDomain, nargs="*",
help=_("Enter domain type which you will be extending"))
pol.add_argument("-u", "--user", dest="user", default=[],
action=CheckUser,
help=_("Enter SELinux user(s) which will transition to this domain"))
pol.add_argument("-r", "--role", dest="role", default=[],
action=CheckRole,
help=_("Enter SELinux role(s) to which the administror domain will transition"))
pol.add_argument("-a", "--admin", dest="admin_domain", default=[],
action=CheckAdmin,
help=_("Enter domain(s) which this confined admin will administrate"))
pol.add_argument("-n", "--name", dest="name",
default=None,
help=_("name of policy to generate"))
pol.add_argument("-T", "--test", dest="test", default=False, action="store_true",
help=argparse.SUPPRESS)
pol.add_argument("-t", "--type", dest="types", default=[], nargs="*",
action=CheckType,
help="Enter type(s) for which you will generate new definition and rule(s)")
pol.add_argument("-p", "--path", dest="path", default=os.getcwd(),
help=_("path in which the generated policy files will be stored"))
pol.add_argument("-w", "--writepath", dest="writepaths", nargs="*", default=[],
help=_("path to which the confined processes will need to write"))
cmdtype = pol.add_argument_group(_("Policy types which require a command"))
cmdgroup = cmdtype.add_mutually_exclusive_group(required=False)
cmdgroup.add_argument("--application", dest="policytype", const=USER,
action="store_const",
help=_("Generate '%s' policy") % poltype[USER])
cmdgroup.add_argument("--cgi", dest="policytype", const=CGI,
action="store_const",
help=_("Generate '%s' policy") % poltype[CGI])
cmdgroup.add_argument("--dbus", dest="policytype", const=DBUS,
action="store_const",
help=_("Generate '%s' policy") % poltype[DBUS])
cmdgroup.add_argument("--inetd", dest="policytype", const=INETD,
action="store_const",
help=_("Generate '%s' policy") % poltype[INETD])
cmdgroup.add_argument("--init", dest="policytype", const=DAEMON,
action="store_const", default=DAEMON,
help=_("Generate '%s' policy") % poltype[DAEMON])
type = pol.add_argument_group("Policy types which do not require a command")
group = type.add_mutually_exclusive_group(required=False)
group.add_argument("--admin_user", dest="policytype", const=AUSER,
action="store_const",
help=_("Generate '%s' policy") % poltype[AUSER])
group.add_argument("--confined_admin", dest="policytype", const=RUSER,
action="store_const",
help=_("Generate '%s' policy") % poltype[RUSER])
group.add_argument("--customize", dest="policytype", const=EUSER,
action="store_const",
help=_("Generate '%s' policy") % poltype[EUSER])
group.add_argument("--desktop_user", dest="policytype", const=LUSER,
action="store_const",
help=_("Generate '%s' policy ") % poltype[LUSER])
group.add_argument("--newtype", dest="policytype", const=NEWTYPE,
action="store_const",
help=_("Generate '%s' policy") % poltype[NEWTYPE])
group.add_argument("--sandbox", dest="policytype", const=SANDBOX,
action="store_const",
help=_("Generate '%s' policy") % poltype[SANDBOX])
group.add_argument("--term_user", dest="policytype", const=TUSER,
action="store_const",
help=_("Generate '%s' policy") % poltype[TUSER])
group.add_argument("--x_user", dest="policytype", const=XUSER,
action="store_const",
help=_("Generate '%s' policy") % poltype[XUSER])
pol.add_argument("command", nargs="?", default=None,
help=_("executable to confine"))
pol.set_defaults(func=generate)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='SELinux Policy Inspection Tool')
subparsers = parser.add_subparsers(help=_("commands"))
parser.add_argument("-P", "--policy", dest="policy",
action=LoadPolicy,
default=None, help=_("Alternate SELinux policy, defaults to /sys/fs/selinux/policy"))
gen_booleans_args(subparsers)
gen_communicate_args(subparsers)
gen_generate_args(subparsers)
gen_gui_args(subparsers)
gen_interface_args(subparsers)
gen_manpage_args(subparsers)
gen_network_args(subparsers)
gen_transition_args(subparsers)
try:
if os.path.basename(sys.argv[0]) == "sepolgen":
parser_args = [ "generate" ] + sys.argv[1:]
elif len(sys.argv) > 1:
parser_args = sys.argv[1:]
else:
parser_args = ["-h"]
args = parser.parse_args(args=parser_args)
args.func(args)
sys.exit(0)
except ValueError as e:
sys.stderr.write("%s: %s\n" % (e.__class__.__name__, str(e)))
sys.exit(1)
except IOError as e:
sys.stderr.write("%s: %s\n" % (e.__class__.__name__, str(e)))
sys.exit(1)
except KeyboardInterrupt:
print("Out")
sys.exit(0)
```
#### File: jni/inject/gen_jni_hooks.py
```python
primitives = ['jint', 'jboolean', 'jlong']
class JType:
def __init__(self, name, sig) -> None:
self.name = name
self.sig = sig
class JArray(JType):
def __init__(self, type) -> None:
if type.name in primitives:
name = type.name + 'Array'
else:
name = 'jobjectArray'
super().__init__(name, '[' + type.sig)
class Argument:
def __init__(self, name, type, set_arg = False) -> None:
self.name = name
self.type = type
self.set_arg = set_arg
def cpp(self):
return f'{self.type.name} {self.name}'
class Method:
def __init__(self, name, args) -> None:
self.name = name
self.args = args
def cpp(self):
return ', '.join(map(lambda a: a.cpp(), self.args))
def name_list(self):
return ', '.join(map(lambda a: a.name, self.args))
def jni(self):
return ''.join(map(lambda a: a.type.sig, self.args))
# Common types
jint = JType('jint', 'I')
jintArray = JArray(jint)
jstring = JType('jstring', 'Ljava/lang/String;')
jboolean = JType('jboolean', 'Z')
jlong = JType('jlong', 'J')
# Common args
uid = Argument('uid', jint)
gid = Argument('gid', jint)
gids = Argument('gids', jintArray)
runtime_flags = Argument('runtime_flags', jint)
rlimits = Argument('rlimits', JArray(jintArray))
mount_external = Argument('mount_external', jint)
se_info = Argument('se_info', jstring)
nice_name = Argument('nice_name', jstring)
fds_to_close = Argument('fds_to_close', jintArray)
instruction_set = Argument('instruction_set', jstring)
app_data_dir = Argument('app_data_dir', jstring)
# o
fds_to_ignore = Argument('fds_to_ignore', jintArray)
# p
is_child_zygote = Argument('is_child_zygote', jboolean, True)
# q_alt
is_top_app = Argument('is_top_app', jboolean, True)
# r
pkg_data_info_list = Argument('pkg_data_info_list', JArray(jstring), True)
whitelisted_data_info_list = Argument('whitelisted_data_info_list', JArray(jstring), True)
mount_data_dirs = Argument('mount_data_dirs', jboolean, True)
mount_storage_dirs = Argument('mount_storage_dirs', jboolean, True)
# samsung (non-standard arguments)
i1 = Argument('i1', jint)
i2 = Argument('i2', jint)
i3 = Argument('i3', jint)
# server
permitted_capabilities = Argument('permitted_capabilities', jlong)
effective_capabilities = Argument('effective_capabilities', jlong)
# Method definitions
fork_l = Method('l', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, nice_name, fds_to_close, instruction_set, app_data_dir])
fork_o = Method('o', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, nice_name, fds_to_close, fds_to_ignore, instruction_set, app_data_dir])
fork_p = Method('p', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir])
fork_q_alt = Method('q_alt', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app])
fork_r = Method('r', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir, is_top_app,
pkg_data_info_list, whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs])
fork_samsung_m = Method('samsung_m', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, i1, i2, nice_name, fds_to_close, instruction_set, app_data_dir])
fork_samsung_n = Method('samsung_n', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, i1, i2, nice_name, fds_to_close, instruction_set, app_data_dir, i3])
fork_samsung_o = Method('samsung_o', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, i1, i2, nice_name, fds_to_close, fds_to_ignore, instruction_set, app_data_dir])
fork_samsung_p = Method('samsung_p', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, i1, i2, nice_name, fds_to_close, fds_to_ignore, is_child_zygote, instruction_set, app_data_dir])
spec_q = Method('q', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, is_child_zygote, instruction_set, app_data_dir])
spec_q_alt = Method('q_alt', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info,
nice_name, is_child_zygote, instruction_set, app_data_dir, is_top_app])
spec_r = Method('r', [uid, gid, gids, runtime_flags, rlimits, mount_external, se_info, nice_name,
is_child_zygote, instruction_set, app_data_dir, is_top_app, pkg_data_info_list,
whitelisted_data_info_list, mount_data_dirs, mount_storage_dirs])
spec_samsung_q = Method('samsung_q', [uid, gid, gids, runtime_flags, rlimits, mount_external,
se_info, i1, i2, nice_name, is_child_zygote, instruction_set, app_data_dir])
server_m = Method('m', [uid, gid, gids, runtime_flags, rlimits,
permitted_capabilities, effective_capabilities])
server_samsung_q = Method('samsung_q', [uid, gid, gids, runtime_flags, i1, i2, rlimits,
permitted_capabilities, effective_capabilities])
def ind(i):
return '\n' + ' ' * i
def gen_definitions(methods, base_name):
decl = ''
if base_name != 'nativeSpecializeAppProcess':
ret_stat = ind(1) + 'return ctx.pid;'
cpp_ret = 'jint'
jni_ret = 'I'
else:
ret_stat = ''
cpp_ret = 'void'
jni_ret = 'V'
for m in methods:
func_name = f'{base_name}_{m.name}'
decl += ind(0) + f'{cpp_ret} {func_name}(JNIEnv *env, jclass clazz, {m.cpp()}) {{'
decl += ind(1) + 'HookContext ctx{};'
if base_name == 'nativeForkSystemServer':
decl += ind(1) + 'ForkSystemServerArgs args(uid, gid, gids, runtime_flags, permitted_capabilities, effective_capabilities);'
else:
decl += ind(1) + 'SpecializeAppProcessArgs args(uid, gid, gids, runtime_flags, mount_external, se_info, nice_name, instruction_set, app_data_dir);'
for a in m.args:
if a.set_arg:
decl += ind(1) + f'args.{a.name} = &{a.name};'
decl += ind(1) + 'ctx.raw_args = &args;'
decl += ind(1) + f'{base_name}_pre(&ctx, env, clazz);'
decl += ind(1) + f'reinterpret_cast<decltype(&{func_name})>({base_name}_orig)('
decl += ind(2) + f'env, clazz, {m.name_list()}'
decl += ind(1) + ');'
decl += ind(1) + f'{base_name}_post(&ctx, env, clazz);'
decl += ret_stat
decl += ind(0) + '}'
decl += ind(0) + f'const JNINativeMethod {base_name}_methods[] = {{'
for m in methods:
decl += ind(1) + '{'
decl += ind(2) + f'"{base_name}",'
decl += ind(2) + f'"({m.jni()}){jni_ret}",'
decl += ind(2) + f'(void *) &{base_name}_{m.name}'
decl += ind(1) + '},'
decl += ind(0) + '};'
decl += ind(0) + f'constexpr int {base_name}_methods_num = std::size({base_name}_methods);'
decl += ind(0)
return decl
def gen_fork():
methods = [fork_l, fork_o, fork_p, fork_q_alt, fork_r, fork_samsung_m, fork_samsung_n, fork_samsung_o, fork_samsung_p]
return gen_definitions(methods, 'nativeForkAndSpecialize')
def gen_spec():
methods = [spec_q, spec_q_alt, spec_r, spec_samsung_q]
return gen_definitions(methods, 'nativeSpecializeAppProcess')
def gen_server():
methods = [server_m, server_samsung_q]
return gen_definitions(methods, 'nativeForkSystemServer')
with open('jni_hooks.hpp', 'w') as f:
f.write('// Generated by gen_jni_hooks.py\n')
f.write(gen_fork())
f.write(gen_spec())
f.write(gen_server())
``` |
{
"source": "joyofdata/google-cloud-json-ingester",
"score": 3
} |
#### File: app_engine/app/main.py
```python
from flask import Flask
from flask import request
from google.cloud import pubsub
import json
import os
app = Flask(__name__)
@app.route('/dummy', methods=['GET'])
def dummy():
val = request.args.get('val')
if val is None:
return 'Value is missing.', 400
return val + "_x", 200
@app.route('/upload', methods=['POST'])
def upload():
f = request.files['file']
if f is None:
return 'File is missing. To be specified as string for form text input field named "file".', 400
else:
raw_data = f.read()
bucket_name = os.environ.get("BUCKET_NAME_FOR_RAW_DATA")
project_id = os.environ.get("PROJECT_ID")
pubsub_name_cloud_storage = os.environ.get("PUBSUB_NAME_CLOUD_STORAGE")
payload = {
"bucket_name": bucket_name,
"object_name": f.filename,
"object_data": raw_data.decode("utf-8"),
"prepend_random_string_to_object_name": True
}
payload = json.dumps(payload).encode("utf-8")
pub = pubsub.PublisherClient()
topic_path = pub.topic_path(project_id, pubsub_name_cloud_storage)
pub.publish(topic_path, data=payload)
return "OK", 200
if __name__ == '__main__':
app.run(host='127.0.0.1', port=8080, debug=True)
```
#### File: joyofdata/google-cloud-json-ingester/deploy_utils.py
```python
from google.cloud import storage
from google.cloud import bigquery
from google.cloud import pubsub_v1
import subprocess as sp
def replace_tokens_in_config_files(map_token_value, map_template_to_config, f_log):
for (tf,cf) in map_template_to_config:
with open(tf, "r") as h:
tf_content = h.read()
for (t,v) in map_token_value:
tf_content = tf_content.replace("{{"+t+"}}", v)
with open(cf, "w+") as h:
h.write(tf_content)
return
def deploy_appengine_app(app_yaml, region, cwd, f_log):
log("---", f_log)
log("deploy_appengine_app()", f_log)
cmd = [
"gcloud", "app", "create", "--region", region
]
res = sp.Popen(cmd,
stdout=f_log,
stderr=f_log,
cwd=cwd
).communicate()
cmd = [
"gcloud", "-q", "app", "deploy", app_yaml
]
res = sp.Popen(cmd,
stdout=f_log,
stderr=f_log,
cwd=cwd
).communicate()
log("---", f_log)
return
def deploy_endpoints_api(openapi_yaml, cwd, f_log):
log("---", f_log)
log("deploy_endpoints_api()", f_log)
cmd = [
"gcloud", "endpoints", "services", "deploy", openapi_yaml
]
res = sp.Popen(cmd,
stdout=f_log,
stderr=f_log,
cwd=cwd
).communicate()
log("---", f_log)
return
def deploy_cloud_functions(cloud_functions, cwd, f_log):
log("---", f_log)
log("deploy_cloud_functions()", f_log)
for cf in cloud_functions:
cmd = [
"gcloud", "functions", "deploy",
cf["name"],
"--region", cf["region"],
"--source", cf["source"],
"--runtime", cf["runtime"]
]
trigger = cf["trigger"]
if trigger["type"] == "http":
cmd += ["--trigger-http"]
elif trigger["type"] == "topic":
cmd += [
"--trigger-topic", trigger["topic-name"]
]
elif trigger["type"] == "bucket":
cmd += [
"--trigger-resource", trigger["bucket-name"],
"--trigger-event", trigger["event-type"]
]
else:
raise Exception("unknown trigger type")
if "env-vars" in cf:
cmd += [
"--set-env-vars",
",".join(
"{k}={v}".format(k=k, v=v)
for (k, v)
in cf["env-vars"].items()
)]
res = sp.Popen(cmd,
stdout=f_log,
stderr=f_log,
cwd=cwd
).communicate()
log("---", f_log)
return
def create_pubsub_topics(project_id, topics, f_log):
log("---", f_log)
log("create_pubsub_topics()", f_log)
client = pubsub_v1.PublisherClient()
for t in topics:
topic_path = client.topic_path(project=project_id, topic=t["name"])
try:
client.get_topic(topic=topic_path)
log("PubSub tobic '{t}' already exists.".format(t=topic_path), f_log)
except:
client.create_topic(topic_path)
log("PubSub tobic '{t}' created.".format(t=topic_path), f_log)
log("---", f_log)
return
def create_bigquery_tables(project_id, tables, f_log):
log("---", f_log)
log("create_bigquery_tables()", f_log)
client = bigquery.Client()
for t in tables:
dataset_id = t["dataset-id"]
full_table_id = "{p}.{d}.{t}".format(
p=project_id,
d=dataset_id,
t=t["id"]
)
cols = t["columns"]
schema = [
bigquery.SchemaField(
col["name"],
col["type"],
mode=col["mode"]
) for col in cols
]
if t["id"] in [t.table_id for t in client.list_tables(dataset_id)]:
log("BigQuery table '{t}' exists already.".format(t=t["id"]), f_log)
else:
table = bigquery.Table(full_table_id, schema=schema)
client.create_table(table)
log("BigQuery table '{t}' created.".format(t=t["id"]), f_log)
log("---", f_log)
return
def create_bigquery_datasets(datasets, f_log):
log("---", f_log)
log("create_bigquery_datasets()", f_log)
client = bigquery.Client()
for ds in datasets:
if ds["id"] in [ds.dataset_id for ds in client.list_datasets()]:
log("BigQuery dataset '{ds}' exists already.".format(ds=ds["id"]), f_log)
else:
dataset_ref = client.dataset(ds["id"])
dataset = bigquery.Dataset(dataset_ref)
client.create_dataset(dataset)
log("BigQuery dataset '{ds}' created.".format(ds=ds["id"]), f_log)
log("---", f_log)
return
def create_storage_bucket(name, location, f_log):
log("---", f_log)
log("create_storage_bucket()", f_log)
client = storage.Client()
if name in [b.name for b in client.list_buckets()]:
b = client.get_bucket(name)
b.delete_blobs(blobs=b.list_blobs())
log("Storage bucket '{b}' exists already. Bucket was emptied.".format(b=name), f_log)
else:
b = storage.Bucket(client=client)
b.name = name
b.create(location=location)
log("Storage bucket '{b}' created.".format(b=name), f_log)
log("---", f_log)
return
def log(text, f):
f.write(text + "\n")
f.flush()
``` |
{
"source": "joyofscripting/blurio",
"score": 3
} |
#### File: joyofscripting/blurio/tests.py
```python
import unittest
import tempfile
from pathlib import Path
import blur_video
import blurio
class BlurVideoTest(unittest.TestCase):
def test_get_output_filepath(self):
input_filepath = '/Users/test/test.mp4'
output_filepath = '/Users/test/test_blurred.mp4'
self.assertEqual(blur_video.get_output_filepath(input_filepath), output_filepath)
input_filepath = '/Users/test/test'
output_filepath = '/Users/test/test_blurred'
self.assertEqual(blur_video.get_output_filepath(input_filepath), output_filepath)
def test_input_file_directory(self):
with tempfile.TemporaryDirectory() as tempdir:
with self.assertRaises(Exception) as context:
blur_video.main(['--faces', '--plates', '--input', tempdir])
self.assertTrue('The given input file does not point to a file' in str(context.exception))
def test_input_file_does_not_exist(self):
with self.assertRaises(Exception) as context:
blur_video.main(['--faces', '--plates', '--input', '12345678987654321'])
self.assertTrue('The given input file does not exist' in str(context.exception))
def test_calculate_costs(self):
with tempfile.NamedTemporaryFile(mode="wb") as video_file:
video_file.truncate(1024 * 1024)
video_file_path= Path(video_file.name)
calculated_costs = blurio.BlurIt.calculate_costs(video_file_path)
self.assertEqual(calculated_costs['filesize'], 1048576)
self.assertEqual(calculated_costs['filesize_mb'], 1.0)
self.assertEqual(calculated_costs['filesize_human_readable'], '1.00MB')
for cost_item in calculated_costs['costs']:
if cost_item['range_size'] == '0-1GB':
self.assertEqual(cost_item['total_price'], '0.02€')
elif cost_item['range_size'] == '>1GB':
self.assertEqual(cost_item['total_price'], '0.01€')
def test_blurit_task_no_faces_no_plates(self):
with self.assertRaises(Exception) as context:
blurit = blurio.BlurIt('fake_client_id', 'fake_secret_id')
blurit.logged_in = True
blurit.start_task('fakefile.mp4', blur_faces=False, blur_plates=False)
self.assertTrue('You decided not to blur faces and plates in the video. That makes no sense.' in str(context.exception))
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joyongjin/Jackal",
"score": 2
} |
#### File: management/commands/create_app.py
```python
import os
from django.core.management import CommandError
from django.core.management.templates import TemplateCommand
from jackal.settings import jackal_settings
class Command(TemplateCommand):
"""
jackal settings 내에 지정한 APP_DIR 내로 앱을 생성합니다.
"""
help = (
"Create app in app folder"
)
missing_args_message = "You must provide an application name."
create_file = {
'serializer.py': 'from rest_framework import serializers\n\n',
}
remove_file = [
'tests.py', 'admin.py', 'views.py',
]
def handle(self, **options):
app_name = options.pop('name')
app_root = jackal_settings.APP_DIR
try:
if app_root is not None:
app_path = os.path.join(app_root, app_name)
try:
os.mkdir(app_path)
except FileExistsError:
raise CommandError("'%s' already exists" % app_name)
else:
app_path = None
super().handle('app', app_name, app_path, **options)
except CommandError as e:
raise e
if app_path is None:
app_path = os.path.join(os.getcwd(), app_name)
for file in self.remove_file:
os.remove(app_path + '/' + file)
for file_name, inner_data in self.create_file.items():
with open(app_path + '/{}'.format(file_name), 'w') as f:
f.write(inner_data)
return
```
#### File: Jackal/jackal/managers.py
```python
from django.db import models
from django.utils import timezone
class SoftDeleteQuerySet(models.QuerySet):
def delete(self):
self.update(deleted_at=timezone.now())
class SoftDeleteManager(models.Manager):
_queryset_class = SoftDeleteQuerySet
def get_queryset(self):
kwargs = {'model': self.model, 'using': self._db}
if hasattr(self, '_hints'):
kwargs['hints'] = self._hints
return self._queryset_class(**kwargs).filter(deleted_at__isnull=True)
@property
def defaults(self):
kwargs = {'model': self.model, 'using': self._db}
if hasattr(self, '_hints'):
kwargs['hints'] = self._hints
return self._queryset_class(**kwargs)
```
#### File: Jackal/jackal/models.py
```python
from django.db import models
from django.utils import timezone
from jackal.managers import SoftDeleteManager
class JackalModel(models.Model):
soft_delete = True
NAME_FIELD = 'name'
created_at = models.DateTimeField(auto_now_add=True, null=True)
deleted_at = models.DateTimeField(null=True)
objects = SoftDeleteManager()
defaults = models.Manager()
class Meta:
abstract = True
base_manager_name = 'objects'
def __str__(self):
if hasattr(self, self.NAME_FIELD):
return getattr(self, self.NAME_FIELD)
return super().__str__()
def delete(self, using=None, *args, **kwargs):
if kwargs.pop('soft', self.soft_delete):
self.deleted_at = timezone.now()
self.save(using=using)
else:
return super().delete(using=using, *args, **kwargs)
```
#### File: joyongjin/Jackal/setup.py
```python
import os
import sys
from shutil import rmtree
from setuptools import Command, setup, find_packages
here = os.path.abspath(os.path.dirname(__file__))
VERSION = __import__('jackal').__version__
def read(f):
return open(f, 'r', encoding='utf-8').read()
class UploadCommand(Command):
"""Support setup.py upload."""
description = 'Build and publish the package.'
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print('\033[1m{0}\033[0m'.format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status('Removing previous builds…')
rmtree(os.path.join(here, 'dist'))
rmtree(os.path.join(here, 'build'))
rmtree(os.path.join(here, 'jackal.egg-info'))
except OSError:
pass
self.status('Building Source and Wheel (universal) distribution…')
os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to PyPI via Twine…')
os.system('twine upload dist/*')
sys.exit()
setup(
name='django_jackal',
version=VERSION,
description='Boilerplate for Django and Django REST Framework',
long_description=read('README.rst'),
long_description_content_type='text/x-rst',
url='https://github.com/joyongjin/jackal',
author='<NAME>',
author_email='<EMAIL>',
lisence='MIT',
packages=find_packages(exclude=['tests*', '.*']),
zip_safe=False,
classifiers=[
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.5",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
install_requires=[
'django>=2.0', 'djangorestframework',
],
python_requires='>=3.5',
cmdclass={
'upload': UploadCommand,
}
)
```
#### File: Jackal/tests/test_loaders.py
```python
from django.test import override_settings
from jackal.loaders import query_function_loader, structure_loader
from jackal.settings import DEFAULT_QUERY_FUNCTION
from jackal.tests import JackalTestCase
from tests.structures import test_value
class TestLoader(JackalTestCase):
def test_structure_loader(self):
with override_settings(JACKAL={
'STATUS_CONDITION_CLASSES': [
'tests.structures.MyTestStructure'
],
'CUSTOM_STRUCTURES': {
'my_structure': [
'tests.structures.MyTestCustomStructure'
]
}
}):
structures = structure_loader('STATUS_CONDITION_CLASSES')
self.assertEqual(structures['test_key'], test_value)
structures = structure_loader('my_structure')
self.assertEqual(structures['set1'], test_value)
def test_query_function_loader(self):
with override_settings(JACKAL={
'QUERY_FUNCTION_CLASSES': [
'tests.structures.MyQueryFunction',
DEFAULT_QUERY_FUNCTION,
]
}):
funcs = query_function_loader()
self.assertIn('to_boolean', funcs)
self.assertIn('to_list', funcs)
self.assertIn('test_func', funcs)
```
#### File: tests/test_mixins/test_bind_mixin.py
```python
from django.db import models
from jackal.fields import JSONField
from jackal.mixins.bind_mixin import BindMixin
from jackal.tests import JackalTransactionTestCase
class TestBindModel(BindMixin, models.Model):
bound_fields = ['b_field1', 'b_field2']
extra = JSONField(default=dict)
field_char = models.CharField(max_length=150, null=True)
class TestBindModel2(BindMixin, models.Model):
bind_field_name = 'b_field'
bound_fields = ['b_field1', 'b_field2']
b_field = JSONField(default=dict)
class SoftDeleteTest(JackalTransactionTestCase):
def test_bind_values(self):
tobj = TestBindModel()
tobj.b_field1 = 'test_b_field1'
tobj.field_char = 'char_field'
tobj.save()
tobj = TestBindModel.objects.get(id=tobj.id)
self.assertEqual(tobj.b_field1, 'test_b_field1')
self.assertEqual(tobj.extra, {'b_field1': 'test_b_field1'})
self.assertIsNone(tobj.b_field2)
self.assertEqual(tobj.field_char, 'char_field')
with self.assertRaises(AttributeError):
tobj.b_field3
def test_different_bind_field_name(self):
tobj = TestBindModel2()
tobj.b_field1 = 'test_b_field1'
tobj.save()
tobj = TestBindModel2.objects.get(id=tobj.id)
self.assertEqual(tobj.b_field1, 'test_b_field1')
self.assertEqual(tobj.b_field, {'b_field1': 'test_b_field1'})
self.assertIsNone(tobj.b_field2)
with self.assertRaises(AttributeError):
tobj.b_field3
def test_create(self):
tobj = TestBindModel.objects.create(
b_field1='test_b_field1', field_char='char_field'
)
self.assertEqual(tobj.extra, {'b_field1': 'test_b_field1'})
self.assertEqual(tobj.field_char, 'char_field')
```
#### File: Jackal/tests/test_shortcuts.py
```python
from django.test import override_settings
from jackal.exceptions import NotFound
from jackal.helpers.data_helper import isiter
from jackal.settings import jackal_settings
from jackal.shortcuts import get_object_or_404, get_object_or_None, model_update, operating, status_checker, \
status_readable, get_object_or
from jackal.structures import BaseStatusCondition, BaseStatusReadable
from jackal.tests import JackalTransactionTestCase
from tests.models import TestModel
class TestCondition(BaseStatusCondition):
prefix = 'status'
@classmethod
def status__test(cls):
return {
'test': {
2: (
('<', 2), ('>', 0)
)
},
}
class TestReadable(BaseStatusReadable):
prefix = 'status'
@classmethod
def status__test(cls):
return {
'test': {
1: 'one',
2: 'two',
0: 'zero'
},
}
class TestShortcuts(JackalTransactionTestCase):
def test_iterable(self):
self.assertTrue(isiter([1, 2, 3]))
self.assertTrue(isiter((1, 2, 3)))
self.assertTrue(isiter({1, 2, 3}))
self.assertTrue(isiter({1: 1, 2: 2, 3: 3}))
self.assertFalse(isiter('String Sentence'))
self.assertFalse(isiter(None))
self.assertFalse(isiter(False))
self.assertFalse(isiter(True))
self.assertFalse(isiter(123))
def test_get_object_or(self):
obj = TestModel.objects.create(field_int=1)
self.assertIsNone(get_object_or_None(TestModel, field_int=2))
self.assertEqual(get_object_or_None(TestModel, field_int=1), obj)
with self.assertRaises(NotFound) as res:
get_object_or_404(TestModel, field_int=2)
self.assertIs(res.exception.model, TestModel)
self.assertEqual(get_object_or_404(TestModel, field_int=1), obj)
self.assertEqual('TestModel', get_object_or(TestModel, 'TestModel', field_int=2))
self.assertEqual(obj, get_object_or(TestModel, 'TestModel', field_int=1))
def test_model_update(self):
obj = TestModel.objects.create(field_int=1, field_char='text')
obj = model_update(obj, field_int=2, field_char='test2')
self.assertEqual(obj.field_int, 2)
self.assertEqual(obj.field_char, 'test2')
def test_operating(self):
self.assertTrue(operating(1, '==', 1))
self.assertTrue(operating(1, '<=', 2))
self.assertTrue(operating(1, '<', 2))
self.assertTrue(operating(1, '>', 0))
self.assertTrue(operating(1, '>=', 1))
self.assertTrue(operating(1, '!=', 2))
self.assertFalse(operating(1, '==', 2))
self.assertFalse(operating(3, '<=', 2))
self.assertFalse(operating(2, '<', 2))
self.assertFalse(operating(1, '>', 1))
self.assertFalse(operating(1, '>=', 2))
self.assertFalse(operating(1, '!=', 1))
def test_status_checker(self):
with override_settings(JACKAL={
'STATUS_CONDITION_CLASSES': [
'tests.test_shortcuts.TestCondition'
]
}):
obj1 = TestModel.objects.create(field_int=1)
obj0 = TestModel.objects.create(field_int=0)
self.assertTrue(status_checker(2, obj1.field_int, 'test'))
self.assertFalse(status_checker(2, obj0.field_int, 'test'))
def test_readable_status(self):
unknown = 'I do not know'
with override_settings(JACKAL={
'STATUS_READABLE_CLASSES': [
'tests.test_shortcuts.TestReadable'
],
'UNKNOWN_READABLE': unknown
}):
obj1 = TestModel.objects.create(field_int=1)
obj0 = TestModel.objects.create(field_int=0)
obj2 = TestModel.objects.create(field_int=2)
self.assertEqual(status_readable(obj1.field_int, 'test'), 'one')
self.assertEqual(status_readable(obj2.field_int, 'test'), 'two')
self.assertEqual(status_readable(obj0.field_int, 'test'), 'zero')
self.assertEqual(status_readable(4, 'test'), unknown)
``` |
{
"source": "joyongjin/peb",
"score": 3
} |
#### File: peb/peb/regex.py
```python
import re
filename_pattern = re.compile(r'(\/.*?\.[\w:]+)')
url_pattern = re.compile(r'https?://(?:[-\w.]|(?:%[\da-fA-F]{2}))+')
email_pattern = re.compile(r'^\w+([\.-]?\w+)*@\w+([\.-]?\w+)*(\.\w{2,3})+$')
file_extension_pattern = re.compile(r'\.(?P<ext>[a-zA-Z0-9]+$)')
def is_url(string):
return url_pattern.match(string) is not None
def is_email(string):
return email_pattern.match(string) is not None
def is_filename(string):
return filename_pattern.match(string) is not None
def find_all_filename(string):
return re.findall(filename_pattern, string)
def get_file_extension(string):
return re.search(file_extension_pattern, string).group('ext')
```
#### File: peb/peb/uri.py
```python
def join_path(str1, str2):
if str1[-1] == '/':
if str2[0] == '/':
return str1 + str2[1:]
else:
return str1 + str2
else:
if str2[0] == '/':
return str1 + str2
else:
return str1 + '/' + str2
```
#### File: peb/peb/utils.py
```python
def raise_or(e, value, throw=False):
if throw:
raise e
return value
def safeaccess(obj, *args, context=None, default=None):
if obj is None:
return default
context = context or dict()
value = obj
for arg in args:
value = getattr(value, arg)
if value is None:
return default
if callable(value):
value = value(**context)
return value
```
#### File: peb/tests/collections_tests.py
```python
import random
from unittest import TestCase
import peb
from peb import wrap_list, deep_update
class IterationTest(TestCase):
def setUp(self):
pass
def test_wrap_list(self):
self.assertEqual(wrap_list('test'), ['test'])
self.assertEqual(wrap_list(['test']), ['test'])
self.assertEqual(wrap_list(1), [1])
self.assertEqual(wrap_list([1]), [1])
def test_chunk_iter(self):
size = 1000
max_value = random.randint(100000, 1000000)
divided, remain = divmod(max_value, size)
count = 0
for chunk in peb.chunk_iter(range(1, max_value), size):
count += 1
is_last = divided + 1 == count
if remain > 0 and is_last:
self.assertLess(len(chunk), size)
else:
self.assertEqual(len(chunk), size)
self.assertEqual(chunk[0], (size * (count - 1)) + 1)
if is_last:
self.assertEqual(chunk[-1], max_value - 1) # range not contain last value
def test_isiter(self):
self.assertTrue(peb.isiter([1, 2, 3]))
self.assertTrue(peb.isiter((1, 2, 3)))
self.assertTrue(peb.isiter([]))
self.assertTrue(peb.isiter(()))
self.assertTrue(peb.isiter({1: 1, 2: 2}))
self.assertTrue(peb.isiter({}))
self.assertTrue(peb.isiter({1, 2, 3}))
self.assertTrue(peb.isiter([_ for _ in range(1, 100)]))
self.assertFalse(peb.isiter({1: 1, 2: 2}, allow_dict=False))
self.assertFalse(peb.isiter('1234'))
self.assertFalse(peb.isiter(''))
self.assertFalse(peb.isiter(1234))
self.assertFalse(peb.isiter(None))
self.assertFalse(peb.isiter(True))
self.assertFalse(peb.isiter(False))
self.assertFalse(peb.isiter(0.1234))
self.assertFalse(peb.isiter(0))
self.assertFalse(peb.isiter(1 + 2j))
def test_deep_update(self):
source_dict = {
'test': 'test',
'deep': {
1: 2,
None: 'None',
'test': 'test',
'2deep': {
'test': 'test',
'keep': 'keep'
},
'keep': 'keep'
},
'keep': 'keep'
}
update_dict = {
'test': 'updated',
'deep': {
1: 3,
None: 'updated',
'test': 'updated',
'2deep': {
'test': 'updated',
},
},
}
expect_dict = {
'test': 'updated',
'deep': {
1: 3,
None: 'updated',
'test': 'updated',
'2deep': {
'test': 'updated',
'keep': 'keep'
},
'keep': 'keep'
},
'keep': 'keep'
}
result = deep_update(source_dict, update_dict)
self.assertEqual(expect_dict, result)
``` |
{
"source": "joyousprakhar/DialogWAE",
"score": 2
} |
#### File: joyousprakhar/DialogWAE/configs.py
```python
def config_DialogWAE():
conf = {
'maxlen':40, # maximum utterance length
'diaglen':10, # how many utterance kept in the context window
# Model Arguments
'emb_size':200, # size of word embeddings
'n_hidden':300, # number of hidden units per layer
'n_layers':1, # number of layers
'noise_radius':0.2, # stdev of noise for autoencoder (regularizer)
'z_size':200, # dimension of z # 300 performs worse
'lambda_gp':10, # Gradient penalty lambda hyperparameter.
'temp':1.0, # softmax temperature (lower --> more discrete)
'dropout':0.5, # dropout applied to layers (0 = no dropout)
# Training Arguments
'batch_size':32,
'epochs':100, # maximum number of epochs
'min_epochs':2, # minimum number of epochs to train for
'n_iters_d':5, # number of discriminator iterations in training
'lr_ae':1.0, # autoencoder learning rate
'lr_gan_g':5e-05, # generator learning rate
'lr_gan_d':1e-05, # critic/discriminator learning rate
'beta1':0.9, # beta1 for adam
'clip':1.0, # gradient clipping, max norm
'gan_clamp':0.01, # WGAN clamp (Do not use clamp when you apply gradient penelty
}
return conf
def config_DialogWAE_GMP():
conf=config_DialogWAE()
conf['n_prior_components']=3 # DailyDial 5 SWDA 3
conf['gumbel_temp']=0.1
return conf
``` |
{
"source": "joyoza/cloud-ops-sandbox",
"score": 2
} |
#### File: cloud-ops-sandbox/sre-recipes/recipe_runner.py
```python
import abc
import importlib
import requests
import subprocess
import yaml
from inspect import isclass
from os import path
import utils
from recipes.impl_based.base import BaseRecipeImpl
# Default Load Generation Config
DEFAULT_LOADGEN_USER_TYPE = "BasicHomePageViewingUser"
DEFAULT_LOADGEN_USER_COUNT = 20
DEFAULT_LOADGEN_SPAWN_RATE = 1
DEFAULT_LOADGEN_TIMEOUT_SECONDS = 600
class ImplBasedRecipeRunner:
"""A SRE Recipe runner for running recipes implemented as class objects.
Given a `recipe_name`, it tries to run `recipes/impl_based/recipe_name.py`.
This runner will propgate all exceptions to the caller, and it is caller's
responsibility to handle any exception and to perform any error logging.
"""
def __init__(self, recipe_name):
self.recipe = None
module = importlib.import_module(f"recipes.impl_based.{recipe_name}")
for attribute_name in dir(module):
attr = getattr(module, attribute_name)
if isclass(attr) and attr is not BaseRecipeImpl and issubclass(attr, BaseRecipeImpl):
self.recipe = attr()
break
if not self.recipe:
raise NotImplementedError(
f"No valid implementation exists for `{recipe_name}` recipe.")
def get_name(self):
return self.recipe.get_name()
def get_description(self):
return self.recipe.get_description()
def run_break(self):
return self.recipe.run_break()
def run_restore(self):
return self.recipe.run_restore()
def run_hint(self):
return self.recipe.run_hint()
def run_verify(self):
return self.recipe.run_verify()
class ConfigBasedRecipeRunner:
"""A SRE Recipe runner for running recipes implemented using configs.
Given a `recipe_name`, it tries to load `recipes/configs_based/recipe_name.yaml`.
This runner will propagate all exceptions to the caller, and it is caller's
responsibility to handle any exception and to perform any error logging.
"""
def __init__(self, recipe_name, skip_loadgen=False):
filepath = path.join(path.dirname(
path.abspath(__file__)), f"recipes/configs_based/{recipe_name}.yaml")
with open(filepath, "r") as file:
self.recipe = yaml.safe_load(file.read())
if not self.recipe:
raise ValueError("Cannot parse config as YAML.")
self.action_handler = ActionHandler(skip_loadgen)
def get_name(self):
return self.recipe.get("name", "No name found")
def get_description(self):
return self.recipe.get("description", "No description found")
@property
def config(self):
return self.recipe.get("config", {})
def run_break(self):
print('Deploying broken service...')
for action in self.config.get("break", []):
self.action_handler.handle_action(action)
print('Done. Deployed broken service')
def run_restore(self):
print('Restoring service back to normal...')
for action in self.config.get("restore", []):
self.action_handler.handle_action(action)
print('Done. Restored broken service to working state.')
def run_hint(self):
hint = self.config.get("hint", None)
if hint:
print(f'Here is your hint!\n\n{hint}')
else:
print("This recipe has no hints.")
def run_verify(self):
verify_config = self.config.get("verify", [])
if not verify_config:
raise NotImplementedError("Verify is not configured")
for action in verify_config:
self.action_handler.handle_action(action)
class ActionHandler:
"""A utility helper for executing actions supported by SRE Recipe configs.
Implementation Guide
--------------------
1. Map the action name to the action handler in the `__init__` method.
2. All action handlers should take exactly one argument, which is the full
config specified for the action itself, as it is defined in YAML.
For example: {action: "run-shell-commands", commands: ['echo Hi']}
This runner will propgate all exceptions to the caller, and it is caller's
responsibility to handle any exception and to perform any error logging.
"""
def __init__(self, skip_loadgen=False):
# Action types to action handlers
self.action_map = {
"run-shell-commands": self.run_shell_commands,
"multiple-choice-quiz": self.run_multiple_choice_quiz,
"loadgen-spawn": self.loadgen_spawn,
"loadgen-stop": self.loadgen_stop,
}
if skip_loadgen:
# ignore loadgen actions when requested
self.action_map["loadgen-spawn"] = lambda *args: None
self.action_map['loadgen-stop'] = lambda *args: None
# Reusable parameters shared between action handlers
self.loadgen_ip = None
def handle_action(self, config):
if "action" not in config:
raise ValueError("Action config missing `action` type")
action_type = config["action"]
if action_type not in self.action_map:
raise NotImplementedError(
f"Action type not implemented: {action_type}")
return self.action_map[action_type](config)
def init_loadgen_ip(self):
if not self.loadgen_ip:
self.loadgen_ip, err = utils.get_loadgen_ip()
if err:
raise RuntimeError(f"Failed to get loadgen IP: {err}")
############################ Action Handlers ###############################
def run_shell_commands(self, config):
"""Runs the commands one at a time in shell.
Config Paramters
----------------
commands: string[]
Required. A list of shell command strings.
"""
for cmd in config["commands"]:
output, err = utils.run_shell_command(cmd)
if err:
raise RuntimeError(
f"Failed to run command `{cmd}`: {err}")
def run_multiple_choice_quiz(self, config):
"""Runs an interactive multiple choice quiz.
Config Paramters
----------------
prompt: string
Required. The question prompt to display to the user.
choices: dict[]
option: string
Required. The answer display text to show to the user.
accept: bool
Optional. If true, the choice is considered correct.
"""
if "prompt" not in config:
raise ValueError("No prompt specified for the multiple choice.")
elif "choices" not in config:
raise ValueError(
"No answer choices available for the multiple choice.")
utils.run_interactive_multiple_choice(
config["prompt"], config["choices"])
def loadgen_spawn(self, config):
"""
Starts spawning a load shape at specified spawn rate until a total
user count is reached. Then, stop the load after a specified timesout.
Config Paramters
----------------
user_type: string
Optional. Same as the `sre_recipe_user_identifier` for locust tasks
defined in `sre/loadgenerator/locust_tasks`.
Default: BasicHomePageViewingUser.
user_count: int
Optional. The number of total users to spawn. Default: 20.
spawn_rate: int
Optional. The number of users per second to spawn. Default: 1.
stop_after: int
Optional. The number of seconds to spawn before stopping.
Default: 600 seconds.
"""
self.init_loadgen_ip()
user_type = config.get(
"user_type", DEFAULT_LOADGEN_USER_TYPE)
resp = requests.post(
f"http://{self.loadgen_ip}:81/api/spawn/{user_type}",
{
"user_count": int(config.get("user_count", DEFAULT_LOADGEN_USER_COUNT)),
"spawn_rate": int(config.get("spawn_rate", DEFAULT_LOADGEN_SPAWN_RATE)),
"stop_after": int(config.get("stop_after", DEFAULT_LOADGEN_TIMEOUT_SECONDS))
})
if not resp.ok:
raise RuntimeError(
f"Failed to start load generation: {resp.status_code} {resp.reason}")
def loadgen_stop(self, config):
"""Stops any active load generation produced by SRE Recipes.
Config Paramters is not required.
"""
self.init_loadgen_ip()
resp = requests.post(f"http://{self.loadgen_ip}:81/api/stop")
if not resp.ok:
raise RuntimeError(
f"Failed to stop existing load generation: {resp.status_code} {resp.reason}")
```
#### File: recipes/impl_based/base.py
```python
import abc
class BaseRecipeImpl(abc.ABC):
"""The base abstract class for implementation based SRE Recipe."""
@abc.abstractmethod
def get_name(self):
"""Returns the name of the recipe."""
@abc.abstractmethod
def get_description(self):
"""Returns the descripion of the recipe."""
@abc.abstractmethod
def run_break(self):
"""Performs SRE Recipe actions to break a sandbox service."""
@abc.abstractmethod
def run_restore(self):
"""Performs SRE Recipe actions to restore a sandbox service."""
@abc.abstractmethod
def run_hint(self):
"""Prints a hint about the root cause of the issue"""
@abc.abstractmethod
def run_verify(self):
"""
Verifies that the user of the recipe found the correct impacted broken
service, as well as the root cause of the breakage."""
```
#### File: impl_based/disabled/dummy_recipe.py
```python
from .base import BaseRecipeImpl
class DummyRecipe(BaseRecipeImpl):
def get_name(self):
return "A dummy recipe"
def get_description(self):
return "A implementation based recipe for illustration purposes only"
def run_break(self):
print("Nothing to break.")
def run_restore(self):
print("Nothing to restore.")
def run_hint(self):
print("No hints needed. I am a dummy recipe.")
def run_verify(self):
print("Nothing to verify. It's just a dummy recipe")
``` |
{
"source": "JoyPang123/facial_identity_system",
"score": 2
} |
#### File: facial_identity_system/jetson/main.py
```python
import argparse
import os
import io
import time
import base64
from datetime import datetime
import threading
import signal
from screeninfo import get_monitors
import socket
from firebase import firebase
from google.cloud import storage
from PIL import Image
import imutils
import cv2
import numpy as np
import torch
import torch.nn as nn
import torchvision.transforms as transforms
from facenet_pytorch import MTCNN
import RPi.GPIO as GPIO
from model.triplet.model import TripletNet
device = "cpu"
ID = "hello"
detect_pin = 18
R_pin = 23
G_pin = 24
B_pin = 25
global_frame = None
running = True
img_transforms = transforms.Compose([
transforms.ToTensor(),
transforms.Resize((140, 140)),
transforms.Normalize(
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
)
])
def exit_handler(signum, frame):
global running
running = False
def set_light(R_value, G_value, B_value):
GPIO.output(R_pin, R_value)
GPIO.output(G_pin, G_value)
GPIO.output(B_pin, B_value)
def monitor():
global global_frame
global running
BUFF_SIZE = 65536 * 32
WIDTH = 400
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.bind(("0.0.0.0", 9999))
server_socket.listen(5)
server_socket.settimeout(5)
while running:
try:
conn, client_addr = server_socket.accept()
print(client_addr)
while running:
# Get the global frame
curr_frame = global_frame.copy()
frame = imutils.resize(curr_frame, width=WIDTH)
# Encode the img to string
encoded, buffer = cv2.imencode(".jpg", frame, [cv2.IMWRITE_JPEG_QUALITY, 80])
data = np.array(buffer)
string_img = data.tostring()
# Send to the client
conn.send(str(len(string_img)).ljust(16).encode("utf8"))
conn.send(string_img)
try:
conn.recv(1024)
except (socket.timeout, BlockingIOError) as e:
print(e)
break
except socket.timeout as e:
pass
except Exception as e:
print(e)
server_socket.close()
@torch.no_grad()
def detect_faces(mtcnn, bucket, database, model):
global global_frame
measure_dis = nn.PairwiseDistance(p=2)
while running:
value = GPIO.input(detect_pin)
if value == GPIO.HIGH:
set_light(0, 0, 1)
now = datetime.now()
curr_time = now.strftime("%Y-%m-%d %H:%M:%S")
# Read in the frame
frame = global_frame.copy()
# Do the face alignment
pil_img = Image.fromarray(cv2.cvtColor(frame, cv2.COLOR_BGR2RGB))
boxes, _ = mtcnn.detect(pil_img)
if boxes is not None:
boxes = boxes[0].astype("int").tolist()
frame = frame[boxes[1]:boxes[3], boxes[0]:boxes[2]]
else:
set_light(0, 0, 0)
continue
# Detect the results
if not (np.array(frame.shape) == 0).any():
frame_tensor = img_transforms(frame).to(device)
res = model.get_features(frame_tensor.unsqueeze(0)).cpu()
name = "None"
pass_status = False
min_distance = 100
# Face result
identity = database.get(f"/users/{ID}", "identity")
if identity is not None:
for key, value in identity.items():
# Get the value tensor
value_tensor = torch.tensor(value).to(device).unsqueeze(0)
# Compute the distance
distance = measure_dis(value_tensor, res).item()
print(distance)
if distance < 0.2:
if min_distance > distance:
print(key, distance)
name = key
pass_status = True
min_distance = distance
# Upload the data info
frame_upload(frame, curr_time, bucket)
upload_info(curr_time, database, res, pass_status, name)
if pass_status:
set_light(0, 1, 0)
time.sleep(1)
else:
set_light(1, 0, 0)
time.sleep(1)
# Reset light
set_light(0, 0, 0)
# Reset light
set_light(0, 0, 0)
def frame_upload(frame, filename, bucket):
image_blob = bucket.blob(filename)
temp_file = Image.fromarray(cv2.resize(frame, (480, 480)))
temp_file_bytes = io.BytesIO()
temp_file.save(temp_file_bytes, format="JPEG")
# Read the bytes from beginning
temp_file_bytes.seek(0)
image_blob.upload_from_file(temp_file_bytes, content_type="image/jpeg")
def upload_info(curr_time, database, res, pass_status=False, name="None"):
database.put(f"/users/{ID}/secure", curr_time, {
"pass": pass_status,
"name": name,
"features": res.view(-1).tolist()
})
if __name__ == "__main__":
parse = argparse.ArgumentParser()
parse.add_argument(
"-d", "--download", default=False,
action="store_true"
)
args = parse.parse_args()
GPIO.setmode(GPIO.BCM)
GPIO.setup(detect_pin, GPIO.IN)
GPIO.setup(R_pin, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(G_pin, GPIO.OUT, initial=GPIO.LOW)
GPIO.setup(B_pin, GPIO.OUT, initial=GPIO.LOW)
# Cool your device!
set_light(1, 0, 0)
time.sleep(0.2)
set_light(0, 1, 0)
time.sleep(0.2)
set_light(0, 0, 1)
time.sleep(0.2)
set_light(0, 0, 0)
# Set up google cloud client
os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = "<your json file>"
client = storage.Client()
bucket = client.get_bucket("face_identity")
# Set up camera
cap = cv2.VideoCapture(0)
time.sleep(1)
# Set up firebase app
database = firebase.FirebaseApplication(
"<your firebase url>", None
)
if args.download:
# Download the pre-trained weight from google cloud
print("Start downloading new weight ...")
blob = bucket.blob("model.pt")
blob.download_to_filename("weight/model.pt")
print("Downloading end")
# Set up the model
model = TripletNet(pretrained=False).eval()
model.load_state_dict(torch.load("weight/model.pt", map_location=device))
# Face cropping
mtcnn = MTCNN(select_largest=False, post_process=False, device=device)
# Create a video thread for showing the video
detect_faces_thread = threading.Thread(
target=detect_faces,
args=(mtcnn, bucket, database, model),
daemon=True
)
detect_faces_thread.start()
# Create a monitor thread
monitor_thread = threading.Thread(
target=monitor,
daemon=True
)
monitor_thread.start()
# Catch signal if necessary
signal.signal(signal.SIGINT, exit_handler)
# Set up the full screen
monitor_info = get_monitors()[0]
screen_height, screen_width = monitor_info.height, monitor_info.width
cv2.namedWindow("frame", cv2.WND_PROP_FULLSCREEN)
cv2.setWindowProperty("frame", cv2.WND_PROP_FULLSCREEN, cv2.WINDOW_FULLSCREEN)
# Upload information
while running:
dummy_window = np.full((screen_height, screen_width, 3), 190, dtype="uint8")
_, frame = cap.read()
frame = imutils.resize(frame, width=640)
center_y = (screen_height - frame.shape[0]) // 2
center_x = (screen_width - frame.shape[1]) // 2
dummy_window[center_y:center_y + frame.shape[0], center_x:center_x + frame.shape[1]] = frame
cv2.imshow("frame", dummy_window)
global_frame = frame
cv2.waitKey(40)
detect_faces_thread.join()
monitor_thread.join()
# Release the resources
GPIO.cleanup()
cap.release()
print("\rProcess end")
```
#### File: model/InfoNCE/dataset.py
```python
import os
import numpy as np
import pandas as pd
import cv2
import torch
import torch.nn as nn
from torch.utils.data import Dataset, DataLoader
import torchvision.models as models
import torchvision.transforms as transforms
class GaussianBlur():
"""Blur a single image on CPU"""
def __init__(self, kernel_size):
radias = kernel_size // 2
kernel_size = radias * 2 + 1
self.blur_h = nn.Conv2d(3, 3, kernel_size=(kernel_size, 1),
stride=1, padding=0, bias=False, groups=3)
self.blur_v = nn.Conv2d(3, 3, kernel_size=(1, kernel_size),
stride=1, padding=0, bias=False, groups=3)
self.k = kernel_size
self.r = radias
self.blur = nn.Sequential(
nn.ReflectionPad2d(radias),
self.blur_h,
self.blur_v
)
self.pil_to_tensor = transforms.ToTensor()
self.tensor_to_pil = transforms.ToPILImage()
def __call__(self, img):
img = self.pil_to_tensor(img).unsqueeze(0)
sigma = np.random.uniform(0.1, 2.0)
x = np.arange(-self.r, self.r + 1)
x = np.exp(-np.power(x, 2) / (2 * sigma * sigma))
x = x / x.sum()
x = torch.from_numpy(x).view(1, -1).repeat(3, 1)
self.blur_h.weight.data.copy_(x.view(3, 1, self.k, 1))
self.blur_v.weight.data.copy_(x.view(3, 1, 1, self.k))
with torch.no_grad():
img = self.blur(img)
img = img.squeeze()
img = self.tensor_to_pil(img)
return img
class CelebADataset(Dataset):
def __init__(self, csv_path="celebA.csv",
img_root="img_align_celeba", size=224):
self.img_root = img_root
self.csv = pd.read_csv(csv_path, index_col=0)
color_jitter = transforms.ColorJitter(0.8, 0.8, 0.8, 0.2)
self.transforms = transforms.Compose([
transforms.ToPILImage(),
transforms.RandomResizedCrop(size=size),
transforms.RandomHorizontalFlip(),
transforms.RandomApply([color_jitter], p=0.8),
transforms.RandomGrayscale(p=0.2),
GaussianBlur(kernel_size=int(0.1 * size)),
transforms.ToTensor(),
])
def __len__(self):
return len(self.csv)
def __getitem__(self, idx):
# Read in image
img_path = os.path.join(self.img_root, str(self.csv.iloc[idx, 0]))
img = cv2.imread(img_path)
# Transformation
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img_one = self.transforms(img)
img_two = self.transforms(img)
return img_one, img_two
def make_loader(batch_size, csv_path, img_root, size):
dataset = CelebADataset(csv_path=csv_path, img_root=img_root, size=size)
dataloader = DataLoader(
dataset=dataset, batch_size=batch_size, shuffle=True,
drop_last=True, pin_memory=True
)
return dataset, dataloader
```
#### File: model/triplet/train.py
```python
import argparse
import numpy as np
import torch
import torch.nn as nn
import wandb
from tqdm import tqdm
from utils import plot_points
from dataset import make_loader
from model import TripletNet
def train(args):
model_config = {
"batch_size": args.batch_size,
"epochs": args.epochs,
"learning rate": args.lr,
}
run = wandb.init(
project="facial_identity",
resume=False,
config=model_config,
)
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
train_loader = make_loader(
batch_size=args.batch_size, img_root=args.img_root,
csv_path=args.csv_path
)
model = TripletNet(
model_type=args.model_type, pretrained=args.pretrained,
out_dim=args.out_dim
)
model = model.to(device)
# Set up hyper-parameters
criterion = nn.TripletMarginLoss(margin=args.margin)
lr = args.lr
optimizer = torch.optim.Adam(model.parameters(), lr=lr)
pair_dis = nn.PairwiseDistance(p=2)
for epoch in range(args.epochs):
tqdm_iter = tqdm(
train_loader,
bar_format="{l_bar}|{bar}| {n_fmt}/{total_fmt} [{rate_fmt}{postfix}|{elapsed}<{remaining}]"
)
for idx, batched_data in enumerate(tqdm_iter):
model.train()
# Get data and move to device
input_anchor = batched_data["anchor"].to(device)
input_positive = batched_data["positive_image"].to(device)
input_negative = batched_data["negative_image"].to(device)
anchor, pos, neg = model(input_anchor, input_positive, input_negative)
# Compute l2 distance of the model
pos_dists = pair_dis(anchor, pos)
neg_dists = pair_dis(anchor, neg)
all_image = (neg_dists - pos_dists < args.margin).cpu().numpy().flatten()
valid_triplets = np.where(all_image == 1)
# Compute loss
loss = criterion(anchor[valid_triplets], pos[valid_triplets], neg[valid_triplets])
# Update models
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Update the progress bar
tqdm_iter.set_description(f"Epoch: {epoch + 1}")
tqdm_iter.set_postfix_str(f"loss={loss.item():^7.3f} batch={len(valid_triplets[0])}/{args.batch_size}")
if idx % 100 == 0:
log = {
"loss": loss.item(),
"Image": plot_points(
model, csv_path=args.csv_path,
device=device, img_root=args.img_root,
num_points=1000
)
}
wandb.log(log)
# Save the weight
torch.save(model.state_dict(), f"{args.weight}/model_{epoch + 1}.pt")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--csv_path", type=str, required=True,
help="Path for the csv file for training data"
)
parser.add_argument(
"--img_root", type=str, required=True,
help="Root for the training images"
)
parser.add_argument(
"--weight", type=str, required=True,
help="Place for saving the weight"
)
parser.add_argument(
"--batch_size", type=int, default=128,
help="Batch size for training"
)
parser.add_argument(
"--margin", type=float, default=0.2,
help="Margin for triplet loss"
)
parser.add_argument(
"--epochs", type=int, default=5,
help="Training epochs"
)
parser.add_argument(
"--lr", type=float, default=3e-3,
help="Learning rate"
)
parser.add_argument(
"--model_type", type=str, default="resnet18",
help="Model used for training"
)
parser.add_argument(
"--pretrained", action="store_true",
default=False, help="Whether to use pretrained weight"
)
parser.add_argument(
"--out_dim", type=int, default=256,
help="Output dimension of the output"
)
args = parser.parse_args()
train(args)
``` |
{
"source": "JoyPang123/RL-Explore-with-Own-made-Env",
"score": 2
} |
#### File: src/DDPG_algo/ddpg.py
```python
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from src.DDPG_algo.replay_memory import ReplayMemory
from src.DDPG_algo.model import Actor, Critic
class DDPG:
def __init__(self, memory_size, num_actions,
actor_lr, critic_lr, gamma,
tau, device, img_transforms):
# Set up model
self.actor = Actor(num_actions).to(device)
self.target_actor = Actor(num_actions).to(device)
self.target_actor.eval()
self.critic = Critic(num_actions).to(device)
self.target_critic = Critic(num_actions).to(device)
self.target_critic.eval()
# Set up optimizer and criterion
self.critic_criterion = nn.MSELoss()
self.actor_optim = torch.optim.Adam(self.actor.parameters(), lr=actor_lr)
self.critic_optim = torch.optim.Adam(self.critic.parameters(), lr=critic_lr)
# Set up transforms and other hyper-parameters
self.device = device
self.img_transforms = img_transforms
self.num_actions = num_actions
self.memory = ReplayMemory(memory_size)
self.gamma = gamma
self.tau = tau
def choose_action(self, cur_state, eps):
# Open evaluation mode
self.actor.eval()
# Exploration
if np.random.uniform() < eps:
action = np.random.randint(0, self.num_actions)
else: # Exploitation
cur_state = self.img_transforms(cur_state).to(self.device).unsqueeze(0)
action_list = self.actor(cur_state)
action = torch.argmax(action_list, dim=-1).item()
# Open training mode
self.actor.train()
return action
def actor_update(self, batch_data):
# Separate the data into groups
cur_state_batch = []
for cur_state, *_ in batch_data:
cur_state_batch.append(self.img_transforms(cur_state).unsqueeze(0))
cur_state_batch = torch.cat(cur_state_batch, dim=0).to(self.device)
actor_actions = F.gumbel_softmax(torch.log(F.softmax(self.actor(cur_state_batch), dim=1)), hard=True)
loss = -self.critic(cur_state_batch, actor_actions).mean()
self.actor_optim.zero_grad()
loss.backward()
self.actor_optim.step()
def critic_update(self, batch_data):
# Separate the data into groups
cur_state_batch = []
reward_batch = []
action_batch = []
next_state_batch = []
done_batch = []
for cur_state, reward, action, next_state, done in batch_data:
cur_state_batch.append(self.img_transforms(cur_state).unsqueeze(0))
reward_batch.append(reward)
action_batch.append(action)
next_state_batch.append(self.img_transforms(next_state).unsqueeze(0))
done_batch.append(done)
cur_state_batch = torch.cat(cur_state_batch, dim=0).to(self.device)
reward_batch = torch.FloatTensor(reward_batch).to(self.device)
action_batch = torch.LongTensor(action_batch)
action_batch = torch.zeros(len(batch_data), self.num_actions).scatter_(
1, action_batch.unsqueeze(1), 1).to(self.device)
next_state_batch = torch.cat(next_state_batch, dim=0).to(self.device)
done_batch = torch.Tensor(done_batch).to(self.device)
# Compute the TD error between eval and target
Q_eval = self.critic(cur_state_batch, action_batch)
next_action = F.softmax(self.target_actor(next_state_batch), dim=1)
index = torch.argmax(next_action, dim=1).unsqueeze(1)
next_action = torch.zeros_like(next_action).scatter_(1, index, 1).to(self.device)
Q_target = reward_batch + self.gamma * (1 - done_batch) * self.target_critic(next_state_batch,
next_action).squeeze(1)
loss = self.critic_criterion(Q_eval.squeeze(1), Q_target)
self.critic_optim.zero_grad()
loss.backward()
self.critic_optim.step()
def soft_update(self):
# EMA for both actor and critic network
for param, target_param in zip(self.actor.parameters(), self.target_actor.parameters()):
target_param.data.copy_(self.tau * param.data + (1 - self.tau) * target_param.data)
for param, target_param in zip(self.critic.parameters(), self.target_critic.parameters()):
target_param.data.copy_(self.tau * param.data + (1 - self.tau) * target_param.data)
```
#### File: src/DDPG_algo/train.py
```python
import torch
import torchvision.transforms as transforms
import gym
from src.DDPG_algo.ddpg import DDPG
def train(max_time_steps, max_iter, memory_size,
num_actions, actor_lr, critic_lr,
gamma, tau, device, batch_size):
env = gym.make("snake:snake-v0", mode="hardworking")
# Set up model training
img_transforms = transforms.Compose([
transforms.ToTensor(),
transforms.Resize((64, 64))
])
ddpg = DDPG(
memory_size, num_actions,
actor_lr, critic_lr, gamma,
tau, device, img_transforms
)
max_reward = 0
running_reward = 0
running_episodes = 0
time_step = 0
print_freq = max_iter * 2
while time_step < max_time_steps:
state = env.reset()
current_ep_reward = 0
for _ in range(max_iter):
# Get reward and state
actions = ddpg.choose_action(state["frame"], 0.1)
new_state, reward, done, _ = env.step(actions)
current_ep_reward += reward
ddpg.memory.store_experience(state["frame"], reward, actions, new_state["frame"], done)
state = new_state
if done:
break
# Wait for updating
if ddpg.memory.size() < batch_size:
continue
batch_data = ddpg.memory.sample(batch_size)
ddpg.critic_update(batch_data)
ddpg.actor_update(batch_data)
ddpg.soft_update()
time_step += 1
if time_step % print_freq == 0:
avg_reward = running_reward / running_episodes
print(f"Iteration:{running_episodes}, get average reward: {avg_reward:.2f}")
running_reward = 0
running_episodes = 0
if avg_reward > max_reward:
max_reward = avg_reward
torch.save(ddpg.actor.state_dict(), "actor_best.pt")
torch.save(ddpg.critic.state_dict(), "critic_best.pt")
running_reward += current_ep_reward
running_episodes += 1
```
#### File: src/DQN_algo/dqn.py
```python
import math
import random
from collections import deque
import numpy as np
import torch
import torchvision.transforms as transforms
from torchvision.transforms import InterpolationMode
from src.DQN_algo.model import DQN
def update(model, batch_size,
optimizer, criterion,
tau=0.3,
gamma=0.9):
# Set up the device same as model
used_device = model.device
# Get the data from the experience
batch_data = random.sample(model.replay,
batch_size)
# Separate the data into groups
cur_state_batch = []
reward_batch = []
action_batch = []
next_state_batch = []
done_batch = []
for cur_state, reward, action, next_state, done in batch_data:
cur_state_batch.append(model.transforms(cur_state).unsqueeze(0))
reward_batch.append(reward)
action_batch.append(action)
next_state_batch.append(model.transforms(next_state).unsqueeze(0))
done_batch.append(done)
cur_state_batch = torch.cat(cur_state_batch, dim=0).to(used_device)
reward_batch = torch.FloatTensor(reward_batch).to(used_device)
action_batch = torch.FloatTensor(action_batch).to(used_device)
next_state_batch = torch.cat(next_state_batch, dim=0).to(used_device)
done_batch = torch.Tensor(done_batch).to(used_device)
# Compute the error between eval and target net
Q_eval = model.eval_net(cur_state_batch).gather(
dim=1,
index=action_batch.long().unsqueeze(1)
).squeeze(1)
# Detach from target net to avoid computing the gradient
Q_next = model.target_net(next_state_batch).detach()
Q_target = reward_batch + gamma * (1 - done_batch) * torch.max(Q_next, dim=1)[0]
# Compute loss and update the model
loss = criterion(Q_eval, Q_target)
optimizer.zero_grad()
loss.backward()
optimizer.step()
# Add the counter for the eval
model.step_counter += 1
# Replace target net with eval net
if model.step_counter == model.replace_iter:
model.step_counter = 0
for eval_parameters, target_parameters in zip(model.eval_net.parameters(),
model.target_net.parameters()):
target_parameters.data.copy_(tau * eval_parameters.data + \
(1.0 - tau) * target_parameters.data)
return loss.item()
class DQN:
def __init__(self, num_actions, device,
replace_iter=150, max_len=100,
EPS_START=0.9, EPS_END=0.05, EPS_DECAY=200):
# Create network for target and evaluation
self.eval_net = DQN(num_actions=num_actions).to(device)
self.target_net = DQN(num_actions=num_actions).to(device)
# Set up the replay experience
self.replay = deque(maxlen=max_len)
# Transform the image
self.transforms = transforms.Compose([
transforms.ToTensor(),
transforms.Resize(
(32, 32), interpolation=InterpolationMode.BICUBIC
)
])
# Set up the counter to update target from eval
self.target_counter = 0
# Set up hyper-parameters
self.device = device
self.num_actions = num_actions
self.replace_iter = replace_iter
self.step_counter = 0
# For exploration probability
self.EPS_START = EPS_START
self.EPS_END = EPS_END
self.EPS_DECAY = EPS_DECAY
self.step_total_count = 0
def choose_action(self, cur_state):
# Open evaluation mode
self.eval_net.eval()
eps_threshold = self.EPS_END + (self.EPS_START - self.EPS_END) \
* math.exp(-1. * self.step_total_count / self.EPS_DECAY)
self.step_total_count += 1
"""Choose the action using epsilon greedy policy"""
# Exploration
if np.random.uniform() < eps_threshold:
action = np.random.randint(0, self.num_actions)
else: # Exploitation
cur_state = self.transforms(cur_state).to(self.device).unsqueeze(0)
action_list = self.eval_net(cur_state)
action = torch.argmax(action_list, dim=-1).item()
# Open training mode
self.eval_net.train()
return action
def store_experience(self, state, reward,
action, next_state,
done):
"""Record the play experience into deque
The format of the experience:
[state, reward, action, next_state, done]
"""
self.replay.append([state, reward, action, next_state, done])
```
#### File: src/DQN_algo/model.py
```python
import torch.nn as nn
class DQN(nn.Module):
def __init__(self, num_actions, in_channels=3):
super(DQN, self).__init__()
# Create the layers for the model
self.layers = nn.Sequential(
nn.Conv2d(
in_channels=3, out_channels=16,
kernel_size=5, padding=2, stride=2
), # (16, 32, 32)
nn.BatchNorm2d(16),
nn.ReLU(inplace=True),
nn.Conv2d(
in_channels=16, out_channels=32,
kernel_size=3, padding=1, stride=2
), # (32, 16, 16)
nn.BatchNorm2d(32),
nn.Conv2d(
in_channels=32, out_channels=64,
kernel_size=3, padding=1, stride=2
), # (64, 8, 8)
nn.BatchNorm2d(64),
nn.Conv2d(
in_channels=64, out_channels=128,
kernel_size=3, padding=1, stride=2
), # (128, 4, 4)
nn.BatchNorm2d(128),
nn.ReLU(inplace=True),
nn.Flatten(start_dim=1),
nn.Linear(128 * 4 * 4, num_actions)
)
def forward(self, x):
return self.layers(x)
```
#### File: test/models/test_models.py
```python
import os
import torch
from src.DQN_algo import DQN
from src.A2C_algo.model import ActorCritic as AC
from src.DDPG_algo import Actor as DDPG_A
from src.DDPG_algo import Critic as DDPG_C
from src.PPO_algo.model import ActorCritic as PPO_AC
NUM_ACTIONS = 4
os.environ['KMP_DUPLICATE_LIB_OK'] = 'True'
def test_dqn():
random_tensor = torch.rand([1, 3, 64, 64])
model = DQN(NUM_ACTIONS)
model.eval()
q_value = model(random_tensor)
assert q_value.shape == (1, NUM_ACTIONS)
def test_actor_critic():
random_tensor = torch.rand([1, 3, 64, 64])
model = AC(NUM_ACTIONS)
model.eval()
actor, critic = model(random_tensor)
assert actor.shape == (1, NUM_ACTIONS) and critic.shape == (1, 1)
def test_dpg():
random_tensor = torch.rand([1, 3, 64, 64])
actor = DDPG_A(4).eval()
critic = DDPG_C(4).eval()
act_res = actor(random_tensor)
cri_res = critic(random_tensor, act_res)
assert act_res.shape == (1, 4)
assert cri_res.shape == (1, 1)
def test_ppo():
random_tensor = torch.rand([1, 3, 64, 64])
model = PPO_AC(NUM_ACTIONS)
model.eval()
act_prob, act_logprob = model.act(random_tensor)
act_logprobs, state_values, dist_entropy = model.evaluate(random_tensor, torch.tensor([0]))
assert act_prob.shape == (1,)
assert act_logprob.shape == (1,)
assert act_logprobs.shape == (1,)
assert state_values.shape == (1, 1)
assert dist_entropy.shape == (1,)
``` |
{
"source": "JoyPang123/Textmage",
"score": 2
} |
#### File: JoyPang123/Textmage/app.py
```python
import os
from pathlib import Path
import io
import base64
# For website
from flask import (
Flask,
request,
render_template,
send_from_directory,
)
from flask_mail import (
Mail,
Message,
)
# For loading dot file
from dotenv import load_dotenv
# DL package
import torch
import torchvision.transforms as transforms
# Model to use
from dalle_pytorch import DiscreteVAE, VQGanVAE1024, DALLE
from dalle_pytorch.tokenizer import tokenizer
# Time bar
from tqdm import tqdm
# Tensor operation
from einops import repeat
# Load env file
load_dotenv()
# Set up the flask email from dot file
app = Flask(__name__)
app.config.update(
MAIL_SERVER="smtp.gmail.com",
MAIL_PROT=465,
MAIL_USE_SSL=True,
MAIL_DEFAULT_SENDER=("Text to image", os.environ.get("MAIL_USERNAME")),
MAIL_USERNAME=os.environ.get("MAIL_USERNAME"),
MAIL_PASSWORD=<PASSWORD>("<PASSWORD>_PASSWORD"),
)
mail = Mail(app)
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
ALLOWED_EXTENSIONS = ("png", "jpg", "jpeg", "gif")
# Model Hyperparameters
REAL_DALLE_PATH = Path("weight/real-image.pt")
ICON_DALLE_PATH = Path("weight/icon.pt")
NUM_IMAGES = 24
BATCH_SIZE = 4
TOP_K = 0.9
device = "cuda" if torch.cuda.is_available() else "cpu"
def load_model(path, taming=False):
"""Load in the pretrained model"""
load_obj = torch.load(str(path),
map_location=torch.device(device))
dalle_params, vae_params, weights = \
load_obj.pop("hparams"), load_obj.pop("vae_params"), load_obj.pop("weights")
dalle_params.pop("vae", None)
if taming:
vae = VQGanVAE1024()
else:
vae = DiscreteVAE(**vae_params)
dalle = DALLE(vae=vae,
**dalle_params).to(device)
dalle.load_state_dict(weights)
return dalle
# Build the model
real_dalle = load_model(REAL_DALLE_PATH,
taming=True)
icon_dalle = load_model(ICON_DALLE_PATH)
@app.route("/")
def index():
return render_template("index.html",
show_gallery="none",
images="none",
display_success="none")
@app.route("/icon")
def icon_index():
return render_template("icon.html",
show_gallery="none",
images="none",
display_success="none")
def allowed_file(filename):
return filename.endswith(ALLOWED_EXTENSIONS)
@app.route("/send", methods=["POST"])
def send_message():
msg_title = "Feed back for text to Image"
msg_body = "From " + request.form.get("email") + "<br>" + request.form.get("message")
msg = Message(subject=msg_title,
recipients=[app.config.get("MAIL_USERNAME")],
html=msg_body)
mail.send(msg)
return render_template("index.html",
show_gallery="none",
images="none",
display_success="block")
@app.route("/upload/#")
def send_image(filename):
return send_from_directory("images", filename)
def make_images(category, text):
# Using to inverse the normalization of the image
def norm_ip(img, low, high):
img.clamp_(min=low, max=high)
img.sub_(low).div_(max(high - low, 1e-5))
def norm_range(t):
norm_ip(t, float(t.min()), float(t.max()))
if category == "icon":
dalle_model = icon_dalle
else:
dalle_model = real_dalle
# Generate images
text = tokenizer.tokenize([text], dalle_model.text_seq_len).to(device)
text = repeat(text, "() n -> b n",
b=NUM_IMAGES)
# Generate images
outputs = []
for text_chunk in tqdm(text.split(BATCH_SIZE),
desc=f"generating images for - {text}"):
output = dalle_model.generate_images(text_chunk,
filter_thres=TOP_K)
outputs.append(output)
outputs = torch.cat(outputs)
to_pil = transforms.ToPILImage()
html_images = []
count = 0
for output in outputs:
output = output.clone()
norm_range(output)
# Convert to pillow
pil_image = to_pil(output).convert("RGB")
# Encode to base64 for demonstrating in HTML
output_buffer = io.BytesIO()
pil_image.save(output_buffer, format="PNG")
byte_data = output_buffer.getvalue()
base64_str = base64.b64encode(byte_data).decode("ascii")
html_images.append(base64_str)
count += 1
return html_images
@app.route("/generate-icon", methods=["GET", "POST"])
def generate_icon():
show_gallery = "block"
# Get in the text
text = str(request.values.get("text"))
# Generate images
html_images = make_images("icon", text)
return render_template("icon.html",
images=html_images,
show_gallery=show_gallery,
display_success="none")
@app.route("/generate", methods=["GET", "POST"])
def generate():
show_gallery = "block"
# Get in the text
text = str(request.values.get("text"))
# Generate images
html_images = make_images("real", text)
return render_template("index.html",
images=html_images,
show_gallery=show_gallery,
display_success="none")
if __name__ == "__main__":
app.run(host="0.0.0.0", port=5000, debug=True)
``` |
{
"source": "joyrahman/zerocloud",
"score": 3
} |
#### File: contrib/vagrant/configure_swift.py
```python
import shutil
from ConfigParser import ConfigParser
def inject_before(some_list, item, target):
# make a copy
some_list = list(some_list)
for i, each in enumerate(some_list):
if each == target:
some_list.insert(i, item)
break
else:
raise RuntimeError("'%s' not found in pipeline" % target)
return some_list
def config_add_filter(cp, filter_name, func_name, inject_b4,
egg_name='zerocloud', extras=None):
"""
:param cp:
:class:`ConfigParser.ConfigParser` object
:param filter_name:
Name of the filter. This is the name that will be used to reference the
filter in the pipeline configuration.
:param func_name:
Middleware function name.
:param inject_b4:
When inserting a filter into the pipeline, place the filter (indicated
by `filter_name`) before `inject_b4`.
If `None`, don't modify the pipeline.
"""
filt = 'filter:%s' % filter_name
cp.add_section(filt)
cp.set(filt, 'use', 'egg:%(egg)s#%(func)s' % dict(egg=egg_name,
func=func_name))
if extras is not None:
for k, v in extras.items():
cp.set(filt, k, v)
if inject_b4 is not None:
pipeline = cp.get('pipeline:main', 'pipeline').split()
pipeline = inject_before(pipeline, filter_name, inject_b4)
cp.set('pipeline:main', 'pipeline', value=' '.join(pipeline))
def back_up(filename):
"""Make a copy of ``filename`` with the a .bak extension.
"""
shutil.copyfile(filename, '%s.bak' % filename)
if __name__ == '__main__':
obj_server = '/etc/swift/object-server/1.conf'
proxy_server = '/etc/swift/proxy-server.conf'
cont_server = '/etc/swift/container-server/1.conf'
back_up(obj_server)
back_up(proxy_server)
back_up(cont_server)
# Object server:
cp = ConfigParser()
cp.read(obj_server)
# basic ZeroVM object server config
config_add_filter(
cp,
'zerocloud-object-query',
'object_query',
'object-server',
extras={
'zerovm_sysimage_devices': 'python2.7 /usr/share/zerovm/python.tar',
'zerovm_timeout': '30',
}
)
# Set verbose logging on the object server
cp.set('DEFAULT', 'log_level', 'DEBUG')
with open(obj_server, 'w') as fp:
cp.write(fp)
# Proxy server:
cp = ConfigParser()
cp.read(proxy_server)
# basic ZeroVM proxy server config
config_add_filter(
cp,
'zerocloud-proxy-query',
'proxy_query',
'proxy-server',
extras={
'zerovm_sysimage_devices': ('python2.7 '
'/usr/share/zerovm/python.tar'),
'set log_name': 'zerocloud-proxy-query',
'zerovm_timeout': '30',
}
)
# proxy server job chaining middleware
config_add_filter(
cp,
'zerocloud-job-chain',
'job_chain',
'zerocloud-proxy-query',
extras={
'set log_name': 'zerocloud-job-chain',
'chain_timeout': '60',
}
)
# install swauth
config_add_filter(
cp,
'swauth',
'swauth',
None,
egg_name='swauth',
extras={
'set log_name': 'swauth',
'super_admin_key': 'swauthkey',
}
)
# replace tempauth with swauth
pipeline = cp.get('pipeline:main', 'pipeline')
pipeline = pipeline.replace('tempauth', 'swauth')
cp.set('pipeline:main', 'pipeline', pipeline)
# allow account management (needed for swauth)
cp.set('app:proxy-server', 'allow_account_management', 'true')
with open(proxy_server, 'w') as fp:
cp.write(fp)
# Container server:
cp = ConfigParser()
cp.read(cont_server)
config_add_filter(
cp,
'zerocloud-object-query',
'object_query',
'container-server',
extras={
'zerovm_sysimage_devices': 'python2.7 /usr/share/zerovm/python.tar',
'zerovm_timeout': '30',
}
)
with open(cont_server, 'w') as fp:
cp.write(fp)
```
#### File: test/unit/test_objectquery.py
```python
from contextlib import contextmanager
from StringIO import StringIO
import logging
from posix import rmdir
import unittest
import os
from time import time
from eventlet import GreenPool
from hashlib import md5
from tempfile import mkstemp, mkdtemp
from shutil import rmtree
from copy import copy
import math
import tarfile
from eventlet.wsgi import Input
from zerocloud import objectquery
from swift.common import utils
from test.unit import FakeLogger, create_random_numbers, get_sorted_numbers, \
create_tar
from test.unit import trim
from swift.common.swob import Request
from swift.common.utils import mkdirs, normalize_timestamp, get_logger
from swift.obj.server import ObjectController
from test_proxyquery import ZEROVM_DEFAULT_MOCK
from zerocloud.common import ACCESS_READABLE, ACCESS_WRITABLE, ACCESS_CDR, \
parse_location, ACCESS_RANDOM
from zerocloud import TAR_MIMES
from zerocloud.configparser import ZvmNode
from zerocloud.thread_pool import WaitPool, Zuid
def get_headers(self):
headers = {}
for key, value in self.pax_headers.items():
if isinstance(key, unicode):
key = key.encode('utf-8')
if isinstance(value, unicode):
value = value.encode('utf-8')
headers[key.title()] = value
return headers
tarfile.TarInfo.get_headers = get_headers
class FakeLoggingHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
class FakeApp(ObjectController):
def __init__(self, conf):
ObjectController.__init__(self, conf)
self.bytes_per_sync = 1
self.fault = False
def __call__(self, env, start_response):
if self.fault:
raise Exception
ObjectController.__call__(self, env, start_response)
class OsMock():
def __init__(self):
self.closed = False
self.unlinked = False
self.path = os.path
self.SEEK_SET = os.SEEK_SET
def close(self, fd):
self.closed = True
raise OSError
def unlink(self, fd):
self.unlinked = True
raise OSError
def write(self, fd, str):
return os.write(fd, str)
def read(self, fd, bufsize):
return os.read(fd, bufsize)
def lseek(self, fd, pos, how):
return os.lseek(fd, pos, how)
class TestObjectQuery(unittest.TestCase):
def setUp(self):
utils.HASH_PATH_SUFFIX = 'endcap'
self.testdir = \
os.path.join(mkdtemp(), 'tmp_test_object_server_ObjectController')
mkdirs(os.path.join(self.testdir, 'sda1', 'tmp'))
self.conf = {
'devices': self.testdir,
'mount_check': 'false',
'disable_fallocate': 'true',
'zerovm_sysimage_devices': ('sysimage1 /opt/zerovm/sysimage1 '
'sysimage2 /opt/zerovm/sysimage2')
}
self.obj_controller = FakeApp(self.conf)
self.app = objectquery.ObjectQueryMiddleware(
self.obj_controller, self.conf, logger=FakeLogger())
self.app.zerovm_maxoutput = 1024 * 1024 * 10
self.zerovm_mock = None
self.uid_generator = Zuid()
def tearDown(self):
""" Tear down for testing swift.object_server.ObjectController """
rmtree(os.path.dirname(self.testdir))
if self.zerovm_mock:
os.unlink(self.zerovm_mock)
def setup_zerovm_query(self, mock=None):
# ensure that python executable is used
zerovm_mock = ZEROVM_DEFAULT_MOCK
if mock:
fd, zerovm_mock = mkstemp()
os.write(fd, mock)
os.close(fd)
self.zerovm_mock = zerovm_mock
self.app.zerovm_exename = ['python', zerovm_mock]
# do not set it lower than 2 * BLOCKSIZE (2 * 512)
# it will break tar RPC protocol
self.app.app.network_chunk_size = 2 * 512
randomnumbers = create_random_numbers(10)
self.create_object(randomnumbers)
self._nexescript = 'return pickle.dumps(sorted(id))'
self._sortednumbers = get_sorted_numbers()
self._randomnumbers_etag = md5()
self._randomnumbers_etag.update(randomnumbers)
self._randomnumbers_etag = self._randomnumbers_etag.hexdigest()
self._sortednumbers_etag = md5()
self._sortednumbers_etag.update(self._sortednumbers)
self._sortednumbers_etag = self._sortednumbers_etag.hexdigest()
self._nexescript_etag = md5()
self._nexescript_etag.update(self._nexescript)
self._nexescript_etag = self._nexescript_etag.hexdigest()
self._stderr = '\nfinished\n'
self._emptyresult = '(l.'
self._emptyresult_etag = md5()
self._emptyresult_etag.update(self._emptyresult)
self._emptyresult_etag = self._emptyresult_etag.hexdigest()
def create_object(self, body, path='/sda1/p/a/c/o'):
timestamp = normalize_timestamp(time())
headers = {'X-Timestamp': timestamp,
'Content-Type': 'application/octet-stream'}
req = Request.blank(path,
environ={'REQUEST_METHOD': 'PUT'}, headers=headers)
req.body = body
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
def zerovm_object_request(self):
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'application/x-gtar',
'x-zerovm-execute': '1.0',
'x-account-name': 'a',
'x-zerovm-access': 'GET'})
req.headers['x-zerocloud-id'] = self.uid_generator.get()
return req
def zerovm_free_request(self):
req = Request.blank('/sda1/p/a',
environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'application/x-gtar',
'x-zerovm-execute': '1.0',
'x-account-name': 'a',
'x-zerovm-access': ''})
req.headers['x-zerocloud-id'] = self.uid_generator.get()
return req
def test_tmpdir_mkstemp_creates_dir(self):
tmpdir = os.path.join(self.testdir, 'sda1', 'tmp')
os.rmdir(tmpdir)
with objectquery.TmpDir(tmpdir, 'sda1').mkstemp():
self.assert_(os.path.exists(tmpdir))
def __test_QUERY_realzvm(self):
orig_exe = self.app.zerovm_exename
orig_sysimages = self.app.zerovm_sysimage_devices
try:
self.app.zerovm_sysimage_devices['python-image'] = (
'/media/40G/zerovm-samples/zshell/zpython2/python.tar'
)
self.setup_zerovm_query()
self.app.zerovm_exename = ['/opt/zerovm/bin/zerovm']
req = self.zerovm_free_request()
req.headers['x-zerovm-daemon'] = 'asdf'
conf = ZvmNode(1, 'python', parse_location(
'file://python-image:python'), args='hello.py')
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf.add_new_channel(
'python-image', ACCESS_READABLE | ACCESS_RANDOM)
conf.add_new_channel('image', ACCESS_CDR, removable='yes')
conf = conf.dumps()
sysmap = StringIO(conf)
image = open('/home/kit/python-script.tar', 'rb')
with self.create_tar({'sysmap': sysmap, 'image': image}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
resp = req.get_response(self.app)
print ['x-zerovm-daemon', resp.headers.get('x-zerovm-daemon',
'---')]
print ['x-nexe-cdr-line', resp.headers['x-nexe-cdr-line']]
if resp.content_type in TAR_MIMES:
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
for n, m in zip(names, members):
print [n, tar.extractfile(m).read()]
else:
print resp.body
finally:
self.app.zerovm_exename = orig_exe
self.app.zerovm_sysimage_devices = orig_sysimages
def test_QUERY_sort(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(members[-1].size, len(self._sortednumbers))
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), self._sortednumbers)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
# self.assertEqual(self.app.logger.log_dict['info'][0][0][0],
# 'Zerovm CDR: 0 0 0 0 1 46 2 56 0 0 0 0')
def test_QUERY_sort_textout(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO('return str(sorted(id))')
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
# self.assertEqual(self.app.logger.log_dict['info'][0][0][0],
# 'Zerovm CDR: 0 0 0 0 1 46 2 40 0 0 0 0')
def test_QUERY_http_message(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel(
'stdout', ACCESS_WRITABLE, content_type='message/http')
conf = conf.dumps()
sysmap = StringIO(conf)
nexefile = StringIO(trim(r'''
resp = '\n'.join([
'HTTP/1.1 200 OK',
'Content-Type: application/json',
'X-Object-Meta-Key1: value1',
'X-Object-Meta-Key2: value2',
'', ''
])
out = str(sorted(id))
return resp + out
'''))
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
stdout_headers = members[-1].get_headers()
self.assertEqual(stdout_headers['Content-Type'],
'application/json')
self.assertEqual(stdout_headers['X-Object-Meta-Key1'],
'value1')
self.assertEqual(stdout_headers['X-Object-Meta-Key2'],
'value2')
def test_QUERY_cgi_message(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel(
'stdout', ACCESS_WRITABLE, content_type='message/cgi')
conf = conf.dumps()
sysmap = StringIO(conf)
nexefile = StringIO(trim(r'''
resp = '\n'.join([
'Content-Type: application/json',
'X-Object-Meta-Key1: value1',
'X-Object-Meta-Key2: value2',
'', ''
])
out = str(sorted(id))
return resp + out
'''))
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), '[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
stdout_headers = members[-1].get_headers()
self.assertEqual(stdout_headers['Content-Type'],
'application/json')
self.assertEqual(stdout_headers['X-Object-Meta-Key1'],
'value1')
self.assertEqual(stdout_headers['X-Object-Meta-Key2'],
'value2')
def test_QUERY_invalid_http_message(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel(
'stdout', ACCESS_WRITABLE, content_type='message/http')
conf = conf.dumps()
sysmap = StringIO(conf)
nexefile = StringIO(trim('''
resp = '\\n'.join(['Status: 200 OK',
'Content-Type: application/json', '', ''])
out = str(sorted(id))
return resp + out
'''))
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
file = tar.extractfile(members[-1])
self.assertEqual(file.read(),
'Status: 200 OK\n'
'Content-Type: application/json\n\n'
'[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]')
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEqual(
resp.headers['content-type'], 'application/x-gtar')
stdout_headers = members[-1].get_headers()
self.assertEqual(stdout_headers['Content-Type'], 'message/http')
def test_QUERY_invalid_nexe(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO('INVALID')
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(members[-1].size, 0)
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), '')
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'nexe is invalid')
self.assertEqual(resp.headers['x-nexe-validation'], '1')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEqual(
resp.headers['content-type'], 'application/x-gtar')
# self.assertEqual(self.app.logger.log_dict['info'][0][0][0],
# 'Zerovm CDR: 0 0 0 0 0 0 0 0 0 0 0 0')
def test_QUERY_freenode(self):
# running code without input file
self.setup_zerovm_query()
rmdir(os.path.join(self.testdir, 'sda1', 'tmp'))
req = self.zerovm_free_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(members[-1].size, len(self._emptyresult))
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), self._emptyresult)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEqual(
resp.headers['content-type'], 'application/x-gtar')
# self.assertEqual(self.app.logger.log_dict['info'][0][0][0],
# 'Zerovm CDR: 0 0 0 0 1 0 2 13 0 0 0 0')
def test_QUERY_write_only(self):
# running the executable creates a new object in-place
self.setup_zerovm_query()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
meta = {'key1': 'value1',
'key2': 'value2'}
content_type = 'application/x-pickle'
conf.add_new_channel('stdout',
ACCESS_WRITABLE,
parse_location('swift://a/c/out'),
meta_data=meta,
content_type=content_type)
conf = conf.dumps()
sysmap = StringIO(conf)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/out',
environ={'REQUEST_METHOD': 'POST'},
headers={
'Content-Type': 'application/x-gtar',
'x-zerovm-execute': '1.0',
'x-zerocloud-id': self.uid_generator.get(),
'x-timestamp': timestamp,
'x-zerovm-access': 'PUT'
})
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 0)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
req = Request.blank('/sda1/p/a/c/out')
resp = self.obj_controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, len(self._emptyresult))
self.assertEqual(resp.body, self._emptyresult)
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEqual(resp.content_type, content_type)
for k, v in meta.iteritems():
self.assertEqual(resp.headers['x-object-meta-%s' % k], v)
def test_QUERY_write_and_report(self):
# running the executable creates a new object from stdout
# and sends stderr output to the user
self.setup_zerovm_query()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
meta = {'key1': 'value1',
'key2': 'value2'}
content_type = 'application/x-pickle'
conf.add_new_channel('stdout',
ACCESS_WRITABLE,
parse_location('swift://a/c/out'),
meta_data=meta,
content_type=content_type)
conf.add_new_channel('stderr', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
timestamp = normalize_timestamp(time())
req = Request.blank('/sda1/p/a/c/out',
environ={'REQUEST_METHOD': 'POST'},
headers={
'Content-Type': 'application/x-gtar',
'x-zerovm-execute': '1.0',
'x-zerocloud-id': self.uid_generator.get(),
'x-timestamp': timestamp,
'x-zerovm-access': 'PUT'
})
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stderr', names)
self.assertEqual(names[-1], 'stderr')
f = tar.extractfile(members[-1])
self.assertEqual(f.read(), self._stderr)
req = Request.blank('/sda1/p/a/c/out')
resp = self.obj_controller.GET(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, len(self._emptyresult))
self.assertEqual(resp.body, self._emptyresult)
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEqual(resp.content_type, content_type)
for k, v in meta.iteritems():
self.assertEqual(resp.headers['x-object-meta-%s' % k], v)
def test_QUERY_OsErr(self):
def mock(*args):
raise Exception('Mock lseek failed')
self.app.os_interface = OsMock()
self.setup_zerovm_query()
req = self.zerovm_free_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(members[-1].size, len(self._emptyresult))
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), self._emptyresult)
del self.app.parser_config['limits']['wbytes']
self.setup_zerovm_query()
req = self.zerovm_free_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 500)
self.setup_zerovm_query()
req = self.zerovm_free_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', '/c/exe')
conf.add_new_channel('stderr', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 500)
def test_QUERY_nexe_environment(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf.args = 'aaa bbb'
conf.env = {'KEY_A': 'value_a', 'KEY_B': 'value_b'}
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
def test_QUERY_multichannel(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'input', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel(
'output', ACCESS_WRITABLE, parse_location('swift://a/c/o2'))
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
def test_QUERY_std_list(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel(
'stdout', ACCESS_WRITABLE, parse_location('swift://a/c/o2'))
conf.add_new_channel('stderr', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stderr', names)
self.assertEqual(names[-1], 'stderr')
self.assertEqual(members[-1].size, len(self._stderr))
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), self._stderr)
self.assertIn('stdout', names)
self.assertEqual(names[0], 'stdout')
self.assertEqual(members[0].size, len(self._sortednumbers))
file = tar.extractfile(members[0])
self.assertEqual(file.read(), self._sortednumbers)
def test_QUERY_logger(self):
# check logger assignment
logger = get_logger({}, log_route='obj-query-test')
self.app = objectquery.ObjectQueryMiddleware(
self.obj_controller, self.conf, logger)
self.assertIs(logger, self.app.logger)
def test_QUERY_object_not_exists(self):
# check if querying non existent object
req = self.zerovm_object_request()
nexefile = StringIO('SCRIPT')
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 404)
def test_QUERY_invalid_path(self):
# check if just querying container fails
req = Request.blank('/sda1/p/a/c',
environ={'REQUEST_METHOD': 'POST'},
headers={
'x-zerovm-execute': '1.0',
'x-zerocloud-id': self.uid_generator.get()
})
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 400)
def test_QUERY_max_upload_time(self):
class SlowBody():
def __init__(self, body):
self.body = body
def read(self, size=-1):
return self.body.read(10)
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
fp = open(tar, 'rb')
length = os.path.getsize(tar)
req.body_file = Input(SlowBody(fp), length)
req.content_length = length
resp = req.get_response(self.app)
fp.close()
self.assertEquals(resp.status_int, 200)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar_out = tarfile.open(name)
names = tar_out.getnames()
members = tar_out.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[0], 'stdout')
self.assertEqual(members[0].size, len(self._sortednumbers))
file = tar_out.extractfile(members[0])
self.assertEqual(file.read(), self._sortednumbers)
orig_max_upload_time = self.app.max_upload_time
self.app.max_upload_time = 0.001
fp = open(tar, 'rb')
length = os.path.getsize(tar)
req.body_file = Input(SlowBody(fp), length)
req.content_length = length
resp = req.get_response(self.app)
fp.close()
self.app.max_upload_time = orig_max_upload_time
self.assertEquals(resp.status_int, 408)
def test_QUERY_no_content_type(self):
req = self.zerovm_object_request()
del req.headers['Content-Type']
req.body = 'SCRIPT'
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 400)
self.assert_('No content type' in resp.body)
def test_QUERY_invalid_content_type(self):
req = self.zerovm_object_request()
req.headers['Content-Type'] = 'application/blah-blah-blah'
req.body = 'SCRIPT'
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 400)
self.assert_('Invalid Content-Type' in resp.body)
def test_QUERY_invalid_path_encoding(self):
req = Request.blank('/sda1/p/a/c/o'.encode('utf-16'),
environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'application/x-gtar',
'x-zerovm-execute': '1.0',
'x-account-name': 'a'})
req.body = 'SCRIPT'
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 412)
self.assert_('Invalid UTF8' in resp.body)
def test_QUERY_error_upstream(self):
self.obj_controller.fault = True
req = Request.blank('/sda1/p/a/c/o',
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type': 'application/x-gtar'})
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 500)
self.assert_('Traceback' in resp.body)
def __test_QUERY_script_invalid_etag(self):
# we cannot etag the tar stream because we mangle it while
# transferring, on the fly
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', '/c/exe')
conf.add_new_channel('stdin', ACCESS_READABLE, '/c/o')
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
fp = open(tar, 'rb')
etag = md5()
etag.update(fp.read())
fp.close()
req.headers['etag'] = etag.hexdigest()
req.body_file = open(tar, 'rb')
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
etag = md5()
etag.update('blah-blah')
req.headers['etag'] = etag.hexdigest()
req.body_file = open(tar, 'rb')
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 422)
def test_QUERY_short_body(self):
# This test exercises a case where a request is submitted with a
# content length of X, but the actual amount of data sent in the body
# is _less_ than X.
# This is interpreted as a "499 Client Closed Request" (prematurely).
class ShortBody():
def __init__(self):
self.sent = False
def read(self, size=-1):
if not self.sent:
self.sent = True
return ' ' * 3
return ''
self.setup_zerovm_query()
req = Request.blank('/sda1/p/a/c/o',
environ={
'REQUEST_METHOD': 'POST',
'wsgi.input': Input(ShortBody(), 4)
},
headers={
'X-Timestamp': normalize_timestamp(time()),
'x-zerovm-execute': '1.0',
'x-zerocloud-id': self.uid_generator.get(),
'Content-Length': '4',
'Content-Type': 'application/x-gtar'
})
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 499)
def test_QUERY_long_body(self):
# This test exercises a case where a request is submitted with a
# content length of X, but the actual amount of data sent in the body
# is _greater_ than X.
# This would indicate that the `Content-Length` is wrong, and thus
# results in a "400 Bad Request".
class LongBody():
def __init__(self):
self.sent = False
def read(self, size=-1):
if not self.sent:
self.sent = True
return ' ' * 5
return ''
self.setup_zerovm_query()
req = Request.blank('/sda1/p/a/c/o',
environ={
'REQUEST_METHOD': 'POST',
'wsgi.input': Input(LongBody(), 4)
},
headers={
'X-Timestamp': normalize_timestamp(time()),
'x-zerovm-execute': '1.0',
'x-zerocloud-id': self.uid_generator.get(),
'Content-Length': '4',
'Content-Type': 'application/x-gtar'
})
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 400)
def test_QUERY_zerovm_stderr(self):
self.setup_zerovm_query(trim(r'''
import sys
sys.stderr.write('some shit happened\n')
'''))
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 500)
self.assertIn('ERROR OBJ.QUERY retcode=OK, '
'zerovm_stdout=some shit happened',
resp.body)
self.setup_zerovm_query(trim(r'''
import sys
import time
sys.stdout.write('0\n\nok.\n')
for i in range(20):
time.sleep(0.1)
sys.stderr.write(''.zfill(4096))
'''))
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 500)
self.assertIn('ERROR OBJ.QUERY retcode=Output too long', resp.body)
self.setup_zerovm_query(trim(r'''
import sys, time, signal
signal.signal(signal.SIGTERM, signal.SIG_IGN)
time.sleep(0.9)
sys.stdout.write('0\n\nok.\n')
sys.stderr.write(''.zfill(4096*20))
'''))
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
req.headers['x-zerovm-timeout'] = 1
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 500)
self.assertIn(
'ERROR OBJ.QUERY retcode=Output too long', resp.body)
def test_QUERY_zerovm_term_timeouts(self):
self.setup_zerovm_query(trim(r'''
from time import sleep
sleep(10)
'''))
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
req.headers['x-zerovm-timeout'] = 1
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 500)
self.assertIn('ERROR OBJ.QUERY retcode=Timed out', resp.body)
def test_QUERY_zerovm_kill_timeouts(self):
self.setup_zerovm_query(trim(r'''
import signal, time
signal.signal(signal.SIGTERM, signal.SIG_IGN)
time.sleep(10)
'''))
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
orig_kill_timeout = self.app.zerovm_kill_timeout
try:
self.app.zerovm_kill_timeout = 0.1
req.headers['x-zerovm-timeout'] = 1
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 500)
self.assertIn('ERROR OBJ.QUERY retcode=Killed', resp.body)
finally:
self.app.zerovm_kill_timeout = orig_kill_timeout
def test_QUERY_simulteneous_running_zerovm_limits(self):
self.setup_zerovm_query()
nexefile = StringIO('return sleep(.2)')
conf = ZvmNode(1, 'sleep', parse_location('swift://a/c/exe'))
conf = conf.dumps()
sysmap = StringIO(conf)
maxreq_factor = 2
r = range(0, maxreq_factor * 5)
req = copy(r)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
orig_zerovm_threadpools = self.app.zerovm_thread_pools
try:
pool = GreenPool()
t = copy(r)
def make_requests_storm(queue_factor, pool_factor):
for i in r:
req[i] = self.zerovm_free_request()
req[i].body_file = Input(open(tar, 'rb'), length)
req[i].content_length = length
req[i].headers['x-zerovm-timeout'] = 5
size = int(maxreq_factor * pool_factor * 5)
queue = int(maxreq_factor * queue_factor * 5)
self.app.zerovm_thread_pools[
'default'] = WaitPool(size, queue)
spil_over = size + queue
for i in r:
t[i] = pool.spawn(req[i].get_response, self.app)
pool.waitall()
resp = copy(r)
for i in r[:spil_over]:
resp[i] = t[i].wait()
# print 'expecting ok #%s: %s' % (i, resp[i])
self.assertEqual(resp[i].status_int, 200)
for i in r[spil_over:]:
resp[i] = t[i].wait()
# print 'expecting fail #%s: %s' % (i, resp[i])
self.assertEqual(resp[i].status_int, 503)
self.assertEqual(resp[i].body, 'Slot not available')
make_requests_storm(0.2, 0.4)
make_requests_storm(0, 1)
make_requests_storm(0.4, 0.6)
make_requests_storm(0, 0.1)
finally:
self.app.zerovm_thread_pools = orig_zerovm_threadpools
def test_QUERY_max_input_size(self):
self.setup_zerovm_query()
orig_maxinput = self.app.parser_config['limits']['rbytes']
try:
self.app.parser_config['limits']['rbytes'] = 0
req = self.zerovm_object_request()
req.body = 'xxxxxxxxx'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 413)
self.assertEqual(resp.body, 'RPC request too large')
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
self.create_object(
create_random_numbers(os.path.getsize(tar) + 2))
self.app.parser_config['limits'][
'rbytes'] = os.path.getsize(tar) + 1
req = self.zerovm_object_request()
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 413)
self.assertEqual(resp.body, 'Data object too large')
finally:
self.create_object(create_random_numbers(10))
self.app.parser_config['limits']['rbytes'] = orig_maxinput
def test_QUERY_max_nexe_size(self):
self.setup_zerovm_query()
orig_maxnexe = getattr(self.app, 'zerovm_maxnexe')
try:
setattr(self.app, 'zerovm_maxnexe', 0)
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
finally:
setattr(self.app, 'zerovm_maxnexe', orig_maxnexe)
def test_QUERY_bad_system_map(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = '{""}'
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual(resp.body, 'Cannot parse system map')
with create_tar({'boot': nexefile}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual(resp.body, 'No system map found in request')
def test_QUERY_sysimage(self):
self.setup_zerovm_query()
req = self.zerovm_free_request()
for dev, path in self.app.parser.sysimage_devices.items():
script = 'return mnfst.channels["/dev/%s"]["path"]'\
' + "\\n" + ' \
'open(mnfst.channels["/dev/nvram"]["path"]).read()' \
% dev
nexefile = StringIO(script)
conf = ZvmNode(
1, 'sysimage-test', parse_location('swift://a/c/exe'))
conf.add_new_channel(dev, ACCESS_CDR)
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
self.assertEqual(resp.status_int, 200)
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
data = tar.extractfile(members[-1]).read()
self.assertTrue('%s\n' % path in data)
self.assertTrue('channel=/dev/%s, mountpoint=/, access=ro, '
'removable=no\n' % dev in data)
self.assertTrue('channel=/dev/%s, mode=file\n' % dev in data)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(
resp.headers['x-nexe-system'], 'sysimage-test')
def test_QUERY_use_image_file(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', 'file://usr/bin/sort')
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf.add_new_channel('image', ACCESS_CDR)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'usr/bin/sort': nexefile}) as image_tar:
with create_tar({'image': open(image_tar, 'rb'),
'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
self.assertEqual(resp.status_int, 200)
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(members[-1].size, len(self._sortednumbers))
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), self._sortednumbers)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(
math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp))
)
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
# self.assertEqual(self.app.logger.log_dict['info'][0][0][0],
# 'Zerovm CDR: 0 0 0 0 1 46 2 56 0 0 0 0')
def test_QUERY_use_gzipped_image(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', 'file://usr/bin/sort')
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf.add_new_channel('image', ACCESS_CDR)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'usr/bin/sort': nexefile}) as image_tar:
import gzip
image_tar_gz = image_tar + '.gz'
try:
t = open(image_tar, 'rb')
gz = gzip.open(image_tar_gz, 'wb')
gz.writelines(t)
gz.close()
t.close()
with create_tar({'image.gz': open(image_tar_gz, 'rb'),
'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
self.assertEqual(resp.status_int, 200)
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(
os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(
members[-1].size, len(self._sortednumbers))
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), self._sortednumbers)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(
math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp))
)
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
finally:
try:
os.unlink(image_tar_gz)
except OSError:
pass
def test_QUERY_bypass_image_file(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf.add_new_channel('image', ACCESS_CDR)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'usr/bin/sort': StringIO('bla-bla')}) as image_tar:
with create_tar({'image': open(image_tar, 'rb'),
'sysmap': sysmap, 'boot': nexefile}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
self.assertEqual(resp.status_int, 200)
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(members[-1].size, len(self._sortednumbers))
file = tar.extractfile(members[-1])
self.assertEqual(file.read(), self._sortednumbers)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(
math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp))
)
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
# self.assertEqual(self.app.logger.log_dict['info'][0][0][0],
# 'Zerovm CDR: 0 0 0 0 1 46 1 46 0 0 0 0')
def test_QUERY_bad_channel_path(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel('stdin', ACCESS_READABLE, 'bla-bla')
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'sysmap': sysmap, 'boot': nexefile}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual(
resp.body, 'Could not resolve channel path "bla-bla" for '
'device: stdin')
def test_QUERY_filter_factory(self):
app = objectquery.filter_factory(self.conf)(FakeApp(self.conf))
self.assertIsInstance(app, objectquery.ObjectQueryMiddleware)
def test_QUERY_prevalidate(self):
self.setup_zerovm_query()
req = Request.blank('/sda1/p/a/c/exe',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'x-zerovm-validate': 'true',
'Content-Type': 'application/octet-stream'
})
req.body = self._nexescript
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-zerovm-valid'], 'true')
req = Request.blank('/sda1/p/a/c/exe',
headers={'x-zerovm-valid': 'true'})
req.body = self._nexescript
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.headers['x-zerovm-valid'], 'true')
req = Request.blank('/sda1/p/a/c/exe',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'
})
req.body = self._nexescript
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
self.assertNotIn('x-zerovm-valid', resp.headers)
req = Request.blank('/sda1/p/a/c/exe',
headers={'x-zerovm-valid': 'true'})
req.body = self._nexescript
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 200)
self.assertNotIn('x-zerovm-valid', resp.headers)
req = Request.blank('/sda1/p/a/c/exe',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/x-nexe'
})
req.body = self._nexescript
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
self.assertEquals(resp.headers['x-zerovm-valid'], 'true')
req = Request.blank('/sda1/p/a/c/exe',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'Content-Type': 'application/octet-stream'
})
req.body = 'INVALID'
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
self.assertNotIn('x-zerovm-valid', resp.headers)
req = Request.blank('/sda1/p/a/c/exe',
environ={'REQUEST_METHOD': 'PUT'},
headers={
'X-Timestamp': normalize_timestamp(time()),
'x-zerovm-validate': 'true',
'Content-Type': 'application/octet-stream'
})
req.body = 'INVALID'
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 201)
self.assertNotIn('x-zerovm-valid', resp.headers)
def test_QUERY_execute_prevalidated(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
req.headers['x-zerovm-valid'] = 'true'
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar_result = tarfile.open(name)
names = tar_result.getnames()
members = tar_result.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(members[-1].size, len(self._sortednumbers))
file = tar_result.extractfile(members[-1])
self.assertEqual(file.read(), self._sortednumbers)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '2')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
req.headers['x-zerovm-valid'] = 'false'
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar_result = tarfile.open(name)
names = tar_result.getnames()
members = tar_result.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
self.assertEqual(members[-1].size, len(self._sortednumbers))
file = tar_result.extractfile(members[-1])
self.assertEqual(file.read(), self._sortednumbers)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
timestamp = normalize_timestamp(time())
self.assertEqual(math.floor(float(resp.headers['X-Timestamp'])),
math.floor(float(timestamp)))
self.assertEquals(
resp.headers['content-type'], 'application/x-gtar')
def test_zerovm_bad_exit_code(self):
@contextmanager
def save_zerovm_exename():
exename = self.app.zerovm_exename
try:
yield True
finally:
self.app.zerovm_exename = exename
self.setup_zerovm_query()
with save_zerovm_exename():
(zfd, zerovm) = mkstemp()
os.write(zfd, trim(r'''
from sys import exit
exit(255)
'''))
os.close(zfd)
self.app.zerovm_exename = ['python', zerovm]
req = self.zerovm_object_request()
nexefile = StringIO(self._nexescript)
conf = ZvmNode(1, 'exit', parse_location('swift://a/c/exe'))
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEquals(resp.status_int, 500)
self.assertIn(
'ERROR OBJ.QUERY retcode=Error, zerovm_stdout=',
resp.body
)
os.unlink(zerovm)
def test_zerovm_bad_retcode(self):
self.setup_zerovm_query()
req = self.zerovm_object_request()
nexe = trim(r'''
global error_code
error_code = 4
return pickle.dumps(sorted(id))
''')
nexefile = StringIO(nexe)
conf = ZvmNode(1, 'sort', parse_location('swift://a/c/exe'))
conf.add_new_channel(
'stdin', ACCESS_READABLE, parse_location('swift://a/c/o'))
conf.add_new_channel('stdout', ACCESS_WRITABLE)
conf = conf.dumps()
sysmap = StringIO(conf)
with create_tar({'boot': nexefile, 'sysmap': sysmap}) as tar:
length = os.path.getsize(tar)
req.body_file = Input(open(tar, 'rb'), length)
req.content_length = length
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['x-nexe-retcode'], '0')
self.assertEqual(resp.headers['x-nexe-status'], 'ok.')
self.assertEqual(resp.headers['x-nexe-validation'], '0')
self.assertEqual(resp.headers['x-nexe-system'], 'sort')
self.assertEqual(resp.headers['x-nexe-error'], 'bad return code')
fd, name = mkstemp()
for chunk in resp.app_iter:
os.write(fd, chunk)
os.close(fd)
self.assertEqual(os.path.getsize(name), resp.content_length)
tar = tarfile.open(name)
names = tar.getnames()
members = tar.getmembers()
self.assertIn('stdout', names)
self.assertEqual(names[-1], 'stdout')
fh = tar.extractfile(members[-1])
self.assertEqual(fh.read(), self._sortednumbers)
self.assertEqual(
resp.headers['content-type'], 'application/x-gtar')
class TestUtils(unittest.TestCase):
"""
Tests for misc. utilities in :mod:`zerocloud.objectquery`.
"""
def test_get_zerovm_sysimage_devices(self):
conf = dict(zerovm_sysimage_devices='image1 path1 image2 path2')
exp = dict(image1='path1', image2='path2')
self.assertEqual(exp, objectquery.get_zerovm_sysimage_devices(conf))
# If there are any trailing items in the list (that is, an odd number
# of list items), just ignore them:
conf = dict(zerovm_sysimage_devices='image1 path1 image2')
exp = dict(image1='path1')
self.assertEqual(exp, objectquery.get_zerovm_sysimage_devices(conf))
if __name__ == '__main__':
unittest.main()
```
#### File: zerocloud/zerocloud/common.py
```python
import re
ACCESS_READABLE = 0x1
ACCESS_WRITABLE = 0x1 << 1
ACCESS_RANDOM = 0x1 << 2
ACCESS_NETWORK = 0x1 << 3
ACCESS_CDR = 0x1 << 4
ACCESS_CHECKPOINT = 0x1 << 5
DEVICE_MAP = {
'stdin': ACCESS_READABLE,
'stdout': ACCESS_WRITABLE,
'stderr': ACCESS_WRITABLE,
'input': ACCESS_RANDOM | ACCESS_READABLE,
'output': ACCESS_RANDOM | ACCESS_WRITABLE,
'debug': ACCESS_NETWORK,
'image': ACCESS_CDR,
'db': ACCESS_CHECKPOINT,
'script': ACCESS_RANDOM | ACCESS_READABLE,
}
CLUSTER_CONFIG_FILENAME = 'boot/cluster.map'
NODE_CONFIG_FILENAME = 'boot/system.map'
ACCOUNT_HOME_PATH = ['.', '~']
RE_ILLEGAL = u'([\u0000-\u0008\u000b-\u000c\u000e-\u001f\ufffe-\uffff])' + \
u'|' + \
u'([%s-%s][^%s-%s])|([^%s-%s][%s-%s])|([%s-%s]$)|(^[%s-%s])' % \
(unichr(0xd800), unichr(0xdbff), unichr(0xdc00), unichr(0xdfff),
unichr(0xd800), unichr(0xdbff), unichr(0xdc00), unichr(0xdfff),
unichr(0xd800), unichr(0xdbff), unichr(0xdc00), unichr(0xdfff),)
def split_path(path, minsegs=1, maxsegs=None, rest_with_last=False):
"""
Validate and split the given HTTP request path.
**Examples**::
['a'] = split_path('/a')
['a', None] = split_path('/a', 1, 2)
['a', 'c'] = split_path('/a/c', 1, 2)
['a', 'c', 'o/r'] = split_path('/a/c/o/r', 1, 3, True)
:param path: HTTP Request path to be split
:param minsegs: Minimum number of segments to be extracted
:param maxsegs: Maximum number of segments to be extracted
:param rest_with_last: If True, trailing data will be returned as part
of last segment. If False, and there is
trailing data, raises ValueError.
:returns: list of segments with a length of maxsegs (non-existent
segments will return as None)
:raises: ValueError if given an invalid path
"""
if not maxsegs:
maxsegs = minsegs
if minsegs > maxsegs:
raise ValueError('minsegs > maxsegs: %d > %d' % (minsegs, maxsegs))
if rest_with_last:
segs = path.split('/', maxsegs)
minsegs += 1
maxsegs += 1
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs or
'' in segs[1:minsegs]):
raise ValueError('Invalid path: %s' % path)
else:
minsegs += 1
maxsegs += 1
segs = path.split('/', maxsegs)
count = len(segs)
if (segs[0] or count < minsegs or count > maxsegs + 1 or
'' in segs[1:minsegs] or
(count == maxsegs + 1 and segs[maxsegs])):
raise ValueError('Invalid path: %s' % path)
segs = segs[1:maxsegs]
segs.extend([None] * (maxsegs - 1 - len(segs)))
return segs
def has_control_chars(line):
if line:
if re.search(RE_ILLEGAL, line):
return True
if re.search(r"[\x01-\x1F\x7F]", line):
return True
return False
class ObjPath:
def __init__(self, url, path):
self.url = url
self.path = path
def __eq__(self, other):
if not isinstance(other, ObjPath):
return False
if self.url == other.url:
return True
return False
def __ne__(self, other):
if not isinstance(other, ObjPath):
return True
if self.url != other.url:
return True
return False
class SwiftPath(ObjPath):
def __init__(self, url):
(_junk, path) = url.split('swift:/')
ObjPath.__init__(self, url, path)
(account, container, obj) = split_path(path, 1, 3, True)
self.account = account
self.container = container
self.obj = obj
@classmethod
def create_url(cls, account, container, obj):
if not account:
return None
return 'swift://' + \
'/'.join(filter(None,
(account, container, obj)))
@classmethod
def init(cls, account, container, obj):
if not account:
return None
return cls(SwiftPath.create_url(account, container, obj))
def expand_account(self, account_name):
if self.account in ACCOUNT_HOME_PATH:
self.account = account_name
self.url = SwiftPath.create_url(account_name, self.container,
self.obj)
self.path = self.url.split('swift:/')[1]
class ImagePath(ObjPath):
def __init__(self, url):
(_junk, path) = url.split('file://')
ObjPath.__init__(self, url, path)
parts = path.split(':', 1)
if len(parts) > 1:
self.image = parts[0]
self.path = parts[1]
else:
self.image = 'image'
class ZvmPath(ObjPath):
def __init__(self, url):
(_junk, path) = url.split('zvm://')
ObjPath.__init__(self, url, path)
(host, device) = path.split(':', 1)
self.host = host
if device.startswith('/dev/'):
self.device = device
else:
self.device = '/dev/%s' % device
class CachePath(ObjPath):
def __init__(self, url):
(_junk, path) = url.split('cache:/')
ObjPath.__init__(self, url, path)
(etag, account, container, obj) = split_path(path, 1, 4, True)
self.etag = etag
self.account = account
self.container = container
self.obj = obj
self.path = '/%s/%s/%s' % (account, container, obj)
class NetPath(ObjPath):
def __init__(self, url):
(proto, path) = url.split('://')
ObjPath.__init__(self, url, '%s:%s' % (proto, path))
def parse_location(url):
if not url:
return None
if url.startswith('swift://'):
return SwiftPath(url)
elif url.startswith('file://'):
return ImagePath(url)
elif url.startswith('zvm://'):
return ZvmPath(url)
elif url.startswith('cache://'):
return CachePath(url)
elif url.startswith('tcp://') or url.startswith('udp://'):
return NetPath(url)
return None
class ZvmChannel(object):
def __init__(self, device, access, path=None,
content_type=None, meta_data=None,
mode=None, removable='no', mountpoint='/', min_size=0):
self.device = device
self.access = access
self.path = path
self.content_type = content_type
self.meta = meta_data if meta_data else {}
self.mode = mode
self.removable = removable
self.mountpoint = mountpoint
self.min_size = min_size
```
#### File: zerocloud/zerocloud/configparser.py
```python
try:
import simplejson as json
except ImportError:
import json
import re
import traceback
from collections import OrderedDict
from copy import deepcopy
from zerocloud.common import parse_location, ZvmPath
from zerocloud.common import SwiftPath
from zerocloud.common import ObjPath
from zerocloud.common import ZvmChannel
from zerocloud.common import ACCESS_READABLE
from zerocloud.common import ACCESS_CDR
from zerocloud.common import ACCESS_WRITABLE
from zerocloud.common import ACCESS_RANDOM
from zerocloud.common import ACCESS_NETWORK
from zerocloud.common import DEVICE_MAP
from zerocloud.common import has_control_chars
CHANNEL_TYPE_MAP = {
'stdin': 0,
'stdout': 0,
'stderr': 0,
'input': 3,
'output': 3,
'debug': 0,
'image': 3,
'sysimage': 3,
'script': 3
}
ENV_ITEM = 'name=%s, value=%s\n'
STD_DEVICES = ['stdin', 'stdout', 'stderr']
# quotes commas as \x2c for [env] stanza in nvram file
# see ZRT docs
def quote_for_env(val):
return re.sub(r',', '\\x2c', str(val))
class ConfigFetcher(object):
def __init__(self, *args):
self.arg_list = args
def fetch_from(self, config):
for key in self.arg_list:
if key in config:
return config.get(key)
return None
FILE_LIST = ConfigFetcher('file_list', 'devices')
DEVICE = ConfigFetcher('device', 'name')
class ClusterConfigParsingError(Exception):
def __init__(self, msg):
self.msg = msg
def __str__(self):
return str(self.msg)
def _tcp_string(replication_level, destination_id, node_count, access_type):
if access_type & ACCESS_READABLE:
suffix = '0'
else:
suffix = ''
proto = ';'.join(map(
lambda i: 'tcp:%d:%s' % ((destination_id + i * node_count), suffix),
range(replication_level)
))
return proto
def _opaque_string(replication_level, cluster_id, node_count,
source_id, destination_id, access_type):
inbound = True
if access_type & ACCESS_READABLE:
suffix = ''
else:
suffix = '>'
inbound = False
fmt_func = lambda i: (
'opaque:local|%s%s-%d-%d' %
(suffix,
cluster_id,
source_id if inbound else (destination_id + i * node_count),
(destination_id + i * node_count) if inbound else source_id)
)
proto = ';'.join(map(fmt_func, range(replication_level)))
return proto
class ClusterConfigParser(object):
def __init__(self, sysimage_devices, default_content_type,
parser_config,
list_account_callback, list_container_callback,
network_type='tcp'):
"""
Create a new parser instance
:param sysimage_devices: dict of known system image devices
:param default_content_type: default content type
to use for writable objects
:param parser_config: configuration dictionary
:param list_account_callback: callback function that can be called with
(account_name, mask) to get a list of container names
that match the mask regex in an account
:param list_container_callback: callback function that can be called
with (account_name, container_name, mask) to get a list
of object names in a container that match the mask regex
"""
self.sysimage_devices = sysimage_devices
self.default_content_type = default_content_type
self.parser_config = parser_config
self.list_account = list_account_callback
self.list_container = list_container_callback
self.network_type = network_type
self.nodes = OrderedDict()
self._node_id = 1
self.total_count = 0
def find_objects(self, path, **kwargs):
"""
Find all objects in SwiftPath with wildcards
:param path: SwiftPath object that has wildcards in url string
:param **kwargs: optional arguments for list_container,
list_account callbacks
:returns list of object names
:raises ClusterConfigParsingError: on empty list
:raises ClusterConfigParsingError: on all other errors
"""
temp_list = []
if '*' in path.account:
raise ClusterConfigParsingError('Invalid path: %s'
% path.url)
if '*' in path.container:
mask = re.compile(re.escape(path.container).replace('\\*', '.*'))
try:
containers = self.list_account(path.account,
mask=mask,
**kwargs)
except Exception:
raise ClusterConfigParsingError(
'Error querying object server '
'for account: %s' % path.account)
if path.obj:
obj = path.obj
if '*' in obj:
obj = re.escape(obj).replace('\\*', '.*')
mask = re.compile(obj)
else:
mask = None
for container in containers:
if mask:
try:
obj_list = self.list_container(path.account,
container,
mask=mask, **kwargs)
except Exception:
raise ClusterConfigParsingError(
'Error querying object server '
'for container: %s' % container)
for obj in obj_list:
temp_list.append(SwiftPath.init(path.account,
container,
obj))
else:
temp_list.append(SwiftPath.init(path.account,
container,
None))
else:
obj = re.escape(path.obj).replace('\\*', '.*')
mask = re.compile(obj)
try:
for obj in self.list_container(path.account,
path.container,
mask=mask, **kwargs):
temp_list.append(SwiftPath.init(path.account,
path.container,
obj))
except Exception:
raise ClusterConfigParsingError(
'Error querying object server '
'for container: %s' % path.container)
if not temp_list:
raise ClusterConfigParsingError('No objects found in path %s'
% path.url)
return temp_list
def _get_or_create_node(self, zvm_node, index=0):
if index == 0:
new_name = zvm_node.name
else:
new_name = _create_node_name(zvm_node.name, index)
new_node = self.nodes.get(new_name)
if not new_node:
new_node = zvm_node.copy(self._node_id, new_name)
self.nodes[new_name] = new_node
self._node_id += 1
return new_node
def _add_all_connections(self, node_name, connections, source_devices):
if self.nodes.get(node_name):
connect_node = self.nodes.get(node_name)
for bind_name in connections:
src_dev = None
dst_dev = None
if source_devices:
devices = source_devices.get(bind_name, None)
if devices:
(src_dev, dst_dev) = devices
self._add_connection(connect_node, bind_name,
src_dev, dst_dev)
elif self.nodes.get(node_name + '-1'):
j = 1
connect_node = self.nodes.get(_create_node_name(node_name, j))
while connect_node:
for bind_name in connections:
src_dev = None
dst_dev = None
if source_devices:
devices = source_devices.get(bind_name, None)
if devices:
(src_dev, dst_dev) = devices
self._add_connection(connect_node, bind_name,
src_dev, dst_dev)
j += 1
connect_node = self.nodes.get(
_create_node_name(node_name, j))
else:
raise ClusterConfigParsingError(
'Non existing node in connect string for node %s'
% node_name)
def parse(self, cluster_config, add_user_image, account_name=None,
replica_resolver=None, **kwargs):
"""
Parse deserialized config and build separate job configs per node
:param cluster_config: deserialized JSON cluster map
:param add_user_image: True if we need to add user image channel
to all nodes
:param **kwargs: optional arguments for list_container and
list_account callbacks
:raises ClusterConfigParsingError: on all errors
"""
self.nodes = OrderedDict()
self._node_id = 1
try:
connect_devices = {}
for node in cluster_config:
zvm_node = ZvmNode.fromdict(node)
if isinstance(zvm_node.exe, SwiftPath):
zvm_node.exe.expand_account(account_name)
node_count = node.get('count', 1)
if isinstance(node_count, int) and node_count > 0:
pass
else:
raise ClusterConfigParsingError(
'Invalid node count: %s' % str(node_count))
file_list = FILE_LIST.fetch_from(node)
read_list = []
write_list = []
other_list = []
if file_list:
for f in file_list:
channel = _create_channel(
f, zvm_node,
default_content_type=self.default_content_type)
if isinstance(channel.path, ZvmPath):
_add_connected_device(connect_devices,
channel,
zvm_node)
continue
if isinstance(channel.path, SwiftPath):
channel.path.expand_account(account_name)
if not channel.path.obj \
and not channel.access & ACCESS_READABLE:
raise ClusterConfigParsingError(
'Container path must be read-only')
if channel.access & ACCESS_READABLE:
read_list.insert(0, channel)
elif channel.access & ACCESS_CDR:
read_list.append(channel)
elif channel.access & ACCESS_WRITABLE:
write_list.append(channel)
else:
other_list.append(channel)
read_group = False
for chan in read_list:
needs_data_in = (not chan.path
and chan.device == 'stdin')
if isinstance(chan.path, SwiftPath) \
and '*' in chan.path.path:
read_group = True
object_list = self.find_objects(chan.path,
**kwargs)
read_mask = \
re.escape(chan.path.path).replace('\\*',
'(.*)')
read_mask = re.compile(read_mask)
node_count = len(object_list)
for i in range(node_count):
new_path = object_list[i]
new_node = self._get_or_create_node(
zvm_node, index=(i + 1))
new_node.add_channel(channel=chan,
path=new_path)
new_node.store_wildcards(new_path, read_mask)
else:
if node_count > 1 or read_group:
for i in range(1, node_count + 1):
new_node = self._get_or_create_node(
zvm_node, index=i)
new_node.add_channel(channel=chan)
if needs_data_in:
new_node.data_in = True
else:
new_node = self._get_or_create_node(zvm_node)
new_node.add_channel(channel=chan)
if needs_data_in:
new_node.data_in = True
for chan in write_list:
if chan.path and isinstance(chan.path, SwiftPath):
if '*' in chan.path.url:
if read_group:
self._add_to_group(node_count, zvm_node,
chan)
else:
if node_count > 1:
read_group = True
self._create_new_group(node_count,
zvm_node, chan)
else:
if node_count > 1:
raise ClusterConfigParsingError(
'Single path %s for multiple node '
'definition: %s, please use wildcard'
% (chan.path.url, zvm_node.name))
new_node = self._get_or_create_node(zvm_node)
new_node.add_channel(channel=chan)
else:
if 'stdout' not in chan.device \
and 'stderr' not in chan.device:
raise ClusterConfigParsingError(
'Immediate response is not available '
'for device %s' % chan.device)
if node_count > 1 or read_group:
for i in range(1, node_count + 1):
new_node = self._get_or_create_node(
zvm_node, index=i)
new_node.add_channel(channel=chan)
else:
new_node = self._get_or_create_node(zvm_node)
new_node.add_channel(channel=chan)
for chan in other_list:
if not chan.path:
chan.access = ACCESS_RANDOM | ACCESS_READABLE
if node_count > 1 or read_group:
for i in range(1, node_count + 1):
new_node = self._get_or_create_node(
zvm_node, index=i)
new_node.add_channel(channel=chan)
else:
new_node = self._get_or_create_node(zvm_node)
new_node.add_channel(channel=chan)
if not any(read_list + write_list + other_list):
self._get_or_create_node(zvm_node)
except ClusterConfigParsingError:
raise
except Exception:
print traceback.format_exc()
raise ClusterConfigParsingError('Config parser internal error')
if not self.nodes:
raise ClusterConfigParsingError('Config parser cannot resolve '
'any job nodes')
for node in cluster_config:
connection_list = node.get('connect')
node_name = node.get('name')
src_devices = connect_devices.get(node_name, None)
if not connection_list:
if src_devices:
connection_list = [connected_node for connected_node in
src_devices.iterkeys()]
else:
continue
self._add_all_connections(node_name, connection_list, src_devices)
if add_user_image:
for node in self.nodes.itervalues():
node.add_new_channel('image', ACCESS_CDR, removable='yes')
if account_name:
self.resolve_path_info(account_name, replica_resolver)
self.total_count = 0
for n in self.nodes.itervalues():
self.total_count += n.replicate
return ClusterConfig(self.nodes, self.total_count)
def _add_to_group(self, node_count, zvm_node, chan):
for i in range(1, node_count + 1):
new_node = self.nodes.get(_create_node_name(zvm_node.name, i))
new_url = _extract_stored_wildcards(chan.path, new_node)
new_loc = parse_location(new_url)
new_node.add_channel(channel=chan, path=new_loc)
def _create_new_group(self, node_count, zvm_node, chan):
if node_count == 1:
new_url = chan.path.url.replace('*', zvm_node.name)
new_loc = parse_location(new_url)
new_node = self._get_or_create_node(zvm_node)
new_node.add_channel(channel=chan, path=new_loc)
new_node.wildcards = [zvm_node.name] * chan.path.url.count('*')
return
for i in range(1, node_count + 1):
new_name = _create_node_name(zvm_node.name, i)
new_url = chan.path.url.replace('*', new_name)
new_loc = parse_location(new_url)
new_node = self._get_or_create_node(zvm_node, index=i)
new_node.add_channel(channel=chan, path=new_loc)
new_node.wildcards = [new_name] * chan.path.url.count('*')
def _add_connection(self, node, bind_name,
src_device=None,
dst_device=None):
if not dst_device:
dst_device = '/dev/in/' + node.name
else:
dst_device = _resolve_wildcards(node, dst_device)
if self.nodes.get(bind_name):
bind_node = self.nodes.get(bind_name)
if bind_node is node:
raise ClusterConfigParsingError(
'Cannot bind to itself: %s' % bind_name)
bind_node.bind.append((node.name, dst_device))
if not src_device:
node.connect.append((bind_name,
'/dev/out/%s' % bind_name))
else:
src_device = _resolve_wildcards(bind_node, src_device)
node.connect.append((bind_name, src_device))
elif self.nodes.get(bind_name + '-1'):
i = 1
bind_node = self.nodes.get(bind_name + '-1')
while bind_node:
if bind_node is not node:
bind_node.bind.append((node.name, dst_device))
if not src_device:
node.connect.append(('%s-%d' % (bind_name, i),
'/dev/out/%s-%d'
% (bind_name, i)))
else:
src_device = _resolve_wildcards(bind_node, src_device)
node.connect.append(('%s-%d' % (bind_name, i),
src_device))
i += 1
bind_node = self.nodes.get(bind_name + '-' + str(i))
else:
raise ClusterConfigParsingError(
'Non-existing node in connect %s' % bind_name)
def build_connect_string(self, node, cluster_id=''):
"""
Builds connect strings from connection information stored in job config
:param node: ZvmNode object we build strings for
"""
if not self.nodes:
return
node_count = len(self.nodes)
tmp = []
for (dst, dst_dev) in node.bind:
dst_id = self.nodes.get(dst).id
dst_repl = self.nodes.get(dst).replicate
if self.network_type == 'opaque':
proto = _opaque_string(dst_repl, cluster_id, node_count,
node.id, dst_id, ACCESS_READABLE)
else:
proto = _tcp_string(dst_repl, dst_id, node_count,
ACCESS_READABLE)
tmp.append(
','.join([proto,
dst_dev,
'0,0', # type = 0, sequential, etag = 0, not needed
str(self.parser_config['limits']['reads']),
str(self.parser_config['limits']['rbytes']),
'0,0'])
)
node.bind = tmp
tmp = []
for (dst, dst_dev) in node.connect:
dst_id = self.nodes.get(dst).id
dst_repl = self.nodes.get(dst).replicate
if self.network_type == 'opaque':
proto = _opaque_string(dst_repl, cluster_id, node_count,
node.id, dst_id, ACCESS_WRITABLE)
else:
proto = _tcp_string(dst_repl, dst_id, node_count,
ACCESS_WRITABLE)
tmp.append(
','.join([proto,
dst_dev,
'0,0', # type = 0, sequential, etag = 0, not needed
'0,0',
str(self.parser_config['limits']['writes']),
str(self.parser_config['limits']['wbytes'])])
)
node.connect = tmp
def is_sysimage_device(self, device_name):
"""
Checks if the particular device name is in sysimage devices dict
:param device_name: name of the device
:returns True if device is in dict, False otherwise
"""
return device_name in self.sysimage_devices.keys()
def get_sysimage(self, device_name):
"""
Gets real file path for particular sysimage device name
:param device_name: name of the device
:returns file path if device is in dict, None otherwise
"""
return self.sysimage_devices.get(device_name, None)
def prepare_for_daemon(self, config, nvram_file, zerovm_nexe,
local_object, daemon_sock, timeout=None):
return self.prepare_zerovm_files(config, nvram_file,
local_object=local_object,
zerovm_nexe=zerovm_nexe,
use_dev_self=False,
job=daemon_sock,
timeout=timeout)
def prepare_for_forked(self, config, nvram_file, local_object,
timeout=None):
return self.prepare_zerovm_files(config, nvram_file,
local_object=local_object,
zerovm_nexe=None,
use_dev_self=False,
job=None, timeout=timeout)
def prepare_for_standalone(self, config, nvram_file, zerovm_nexe,
local_object, timeout=None):
return self.prepare_zerovm_files(config, nvram_file,
local_object=local_object,
zerovm_nexe=zerovm_nexe,
use_dev_self=True,
job=None, timeout=timeout)
def prepare_zerovm_files(self, config, nvram_file, local_object=None,
zerovm_nexe=None, use_dev_self=True, job=None,
timeout=None):
"""
Prepares all the files needed for zerovm session run
:param config: single node config in deserialized format
:param nvram_file: nvram file name to write nvram data to
:param local_object: specific channel object from config
that is a local channel, can be None
:param zerovm_nexe: path to nexe binary file
:param use_dev_self: whether we map nexe binary as /dev/self or not
:returns zerovm manifest data as string
"""
if not timeout:
timeout = self.parser_config['manifest']['Timeout']
zerovm_inputmnfst = (
'Version=%s\n'
'Program=%s\n'
'Timeout=%s\n'
'Memory=%s,0\n'
% (
self.parser_config['manifest']['Version'],
zerovm_nexe or '/dev/null',
timeout,
self.parser_config['manifest']['Memory']
))
if job:
zerovm_inputmnfst += 'Job=%s\n' % job
mode_mapping = {}
fstab = None
def add_to_fstab(fstab_string, device, access, removable='no',
mountpoint='/'):
if not fstab_string:
fstab_string = '[fstab]\n'
fstab_string += \
'channel=/dev/%s, mountpoint=%s, ' \
'access=%s, removable=%s\n' \
% (device, mountpoint, access, removable)
return fstab_string
channels = []
for ch in config['channels']:
device = ch['device']
ch_type = CHANNEL_TYPE_MAP.get(device)
if ch_type is None:
if self.is_sysimage_device(device):
ch_type = CHANNEL_TYPE_MAP.get('sysimage')
else:
continue
access = ch['access']
if self.is_sysimage_device(device):
fstab = add_to_fstab(fstab, device, 'ro')
if access & ACCESS_READABLE:
zerovm_inputmnfst += \
'Channel=%s,/dev/%s,%s,0,%s,%s,0,0\n' % \
(ch['lpath'], device, ch_type,
self.parser_config['limits']['reads'],
self.parser_config['limits']['rbytes'])
elif access & ACCESS_CDR:
zerovm_inputmnfst += \
'Channel=%s,/dev/%s,%s,0,%s,%s,%s,%s\n' % \
(ch['lpath'], device, ch_type,
self.parser_config['limits']['reads'],
self.parser_config['limits']['rbytes'],
self.parser_config['limits']['writes'],
self.parser_config['limits']['wbytes'])
if device in 'image':
fstab = add_to_fstab(fstab, device, 'ro',
removable=ch['removable'])
elif access & ACCESS_WRITABLE:
tag = '0'
if not ch['path'] or ch is local_object:
tag = '1'
zerovm_inputmnfst += \
'Channel=%s,/dev/%s,%s,%s,0,0,%s,%s\n' % \
(ch['lpath'], device, ch_type, tag,
self.parser_config['limits']['writes'],
self.parser_config['limits']['wbytes'])
elif access & ACCESS_NETWORK:
zerovm_inputmnfst += \
'Channel=%s,/dev/%s,%s,0,0,0,%s,%s\n' % \
(ch['lpath'], device, ch_type,
self.parser_config['limits']['writes'],
self.parser_config['limits']['wbytes'])
mode = ch.get('mode', None)
if mode:
mode_mapping[device] = mode
else:
# map everything to file by default
mode_mapping[device] = 'file'
channels.append(device)
network_devices = []
for conn in config['connect'] + config['bind']:
zerovm_inputmnfst += 'Channel=%s\n' % conn
dev = conn.split(',', 2)[1][5:] # len('/dev/') = 5
if dev in STD_DEVICES:
network_devices.append(dev)
# map everything to file by default
mode_mapping[dev] = 'file'
for dev in STD_DEVICES:
if dev not in channels and dev not in network_devices:
if 'stdin' in dev:
zerovm_inputmnfst += \
'Channel=/dev/null,/dev/stdin,0,0,%s,%s,0,0\n' % \
(self.parser_config['limits']['reads'],
self.parser_config['limits']['rbytes'])
else:
zerovm_inputmnfst += \
'Channel=/dev/null,/dev/%s,0,0,0,0,%s,%s\n' % \
(dev, self.parser_config['limits']['writes'],
self.parser_config['limits']['wbytes'])
if use_dev_self:
zerovm_inputmnfst += \
'Channel=%s,/dev/self,3,0,%s,%s,0,0\n' % \
(zerovm_nexe, self.parser_config['limits']['reads'],
self.parser_config['limits']['rbytes'])
env = None
if config.get('env'):
env = '[env]\n'
if local_object:
if local_object['access'] & (ACCESS_READABLE | ACCESS_CDR):
metadata = local_object['meta']
content_type = metadata.get('Content-Type',
'application/octet-stream')
env += ENV_ITEM % ('LOCAL_CONTENT_LENGTH', local_object[
'size'])
env += ENV_ITEM % ('LOCAL_CONTENT_TYPE',
quote_for_env(content_type))
for k, v in metadata.iteritems():
meta = k.upper()
if meta.startswith('X-OBJECT-META-'):
env += ENV_ITEM \
% ('LOCAL_HTTP_%s' % meta.replace('-', '_'),
quote_for_env(v))
continue
for hdr in ['X-TIMESTAMP', 'ETAG', 'CONTENT-ENCODING']:
if hdr in meta:
env += ENV_ITEM \
% ('LOCAL_HTTP_%s' % meta.replace('-',
'_'),
quote_for_env(v))
break
elif local_object['access'] & ACCESS_WRITABLE:
content_type = local_object.get('content_type',
'application/octet-stream')
env += ENV_ITEM % ('LOCAL_CONTENT_TYPE',
quote_for_env(content_type))
meta = local_object.get('meta', None)
if meta:
for k, v in meta.iteritems():
env += ENV_ITEM \
% ('LOCAL_HTTP_X_OBJECT_META_%s'
% k.upper().replace('-', '_'),
quote_for_env(v))
env += ENV_ITEM % ('LOCAL_DOCUMENT_ROOT',
'/dev/%s'
% local_object['device'])
config['env']['LOCAL_OBJECT'] = 'on'
config['env']['LOCAL_PATH_INFO'] = local_object['path_info']
for k, v in config['env'].iteritems():
if v:
env += ENV_ITEM % (k, quote_for_env(v))
exe_name = config.get('exe_name') or config['name']
args = '[args]\nargs = %s' % exe_name
if config.get('args'):
args += ' %s' % config['args']
args += '\n'
mapping = None
if mode_mapping:
mapping = '[mapping]\n'
for ch_device, mode in mode_mapping.iteritems():
mapping += 'channel=/dev/%s, mode=%s\n' % (ch_device, mode)
fd = open(nvram_file, 'wb')
for chunk in [fstab, args, env, mapping]:
fd.write(chunk or '')
fd.close()
zerovm_inputmnfst += \
'Channel=%s,/dev/nvram,3,0,%s,%s,%s,%s\n' % \
(nvram_file,
self.parser_config['limits']['reads'],
self.parser_config['limits']['rbytes'], 0, 0)
zerovm_inputmnfst += 'Node=%d\n' \
% (config['id'])
if 'name_service' in config:
zerovm_inputmnfst += 'NameServer=%s\n' \
% config['name_service']
return zerovm_inputmnfst
def resolve_path_info(self, account_name, replica_resolver):
default_path_info = '/%s' % account_name
for node in self.nodes.itervalues():
top_channel = None
if node.channels:
if node.attach == 'default':
top_channel = node.channels[0]
if top_channel.device == 'script' \
and len(node.channels) > 1:
top_channel = node.channels[1]
else:
for chan in node.channels:
if node.attach == chan.device\
and isinstance(chan.path, SwiftPath):
top_channel = chan
break
if top_channel and isinstance(top_channel.path, SwiftPath):
if top_channel.access & (ACCESS_READABLE | ACCESS_CDR):
node.path_info = top_channel.path.path
node.access = 'GET'
elif top_channel.access & ACCESS_WRITABLE \
and node.replicate > 0:
node.path_info = top_channel.path.path
if replica_resolver:
node.replicate = replica_resolver(
top_channel.path.account,
top_channel.path.container)
node.access = 'PUT'
else:
node.path_info = default_path_info
else:
node.path_info = default_path_info
if node.replicate == 0:
node.replicate = 1
def _add_connected_device(devices, channel, zvm_node):
if not devices.get(zvm_node.name, None):
devices[zvm_node.name] = {}
devices[zvm_node.name][channel.path.host] = (
'/dev/' + channel.device, channel.path.device)
def _create_node_name(node_name, i):
return '%s-%d' % (node_name, i)
def _resolve_wildcards(node, param):
if param.count('*') > 0:
for wc in getattr(node, 'wildcards', []):
param = param.replace('*', wc, 1)
if param.count('*') > 0:
raise ClusterConfigParsingError(
'Cannot resolve wildcard for node %s' % node.name)
return param
def _extract_stored_wildcards(path, node):
new_url = path.url
for wc in node.wildcards:
new_url = new_url.replace('*', wc, 1)
if new_url.count('*') > 0:
raise ClusterConfigParsingError('Wildcards in input cannot be '
'resolved into output path %s'
% path)
return new_url
def _create_channel(channel, node, default_content_type=None):
device = DEVICE.fetch_from(channel)
if has_control_chars(device):
raise ClusterConfigParsingError(
'Bad device name: %s in %s' % (device, node.name))
path = parse_location(channel.get('path'))
if not device:
raise ClusterConfigParsingError(
'Must specify device for file in %s' % node.name)
access = DEVICE_MAP.get(device, 0)
mode = channel.get('mode', None)
meta = channel.get('meta', {})
min_size = channel.get('min_size', 0)
content_type = channel.get('content_type',
default_content_type if path else 'text/html')
if access & ACCESS_READABLE and path:
if not isinstance(path, SwiftPath):
raise ClusterConfigParsingError(
'Readable device must be a swift object')
if not path.account or not path.container:
raise ClusterConfigParsingError('Invalid path %s in %s'
% (path.url, node.name))
return ZvmChannel(device, access, path=path,
content_type=content_type, meta_data=meta,
mode=mode, min_size=min_size)
class ZvmNode(object):
"""ZeroVM instance.
"""
def __init__(self, id=None, name=None, exe=None, args=None, env=None,
replicate=1, attach=None, exe_name=None, location=None):
self.id = id
self.name = name
self.exe = exe
self.args = args
self.env = env
self.replicate = replicate
self.channels = []
self.connect = []
self.bind = []
self.replicas = []
self.skip_validation = False
self.wildcards = None
self.attach = attach
self.access = ''
self.exe_name = exe_name
self.data_in = False
self.location = location or None
@classmethod
def fromdict(cls, node_config):
name = node_config.get('name')
if not name:
raise ClusterConfigParsingError('Must specify node name')
if has_control_chars(name):
raise ClusterConfigParsingError('Invalid node name')
nexe = node_config.get('exec')
if not nexe:
raise ClusterConfigParsingError(
'Must specify exec stanza for %s' % name)
exe = parse_location(nexe.get('path'))
if not exe:
raise ClusterConfigParsingError(
'Must specify executable path for %s' % name)
if isinstance(exe, ZvmPath):
raise ClusterConfigParsingError(
'Executable path cannot be a zvm path in %s' % name)
args = nexe.get('args')
env = nexe.get('env')
if has_control_chars('%s %s %s' % (exe.url, args, env)):
raise ClusterConfigParsingError(
'Invalid nexe property for %s' % name)
replicate = node_config.get('replicate', 1)
attach = node_config.get('attach', 'default')
exe_name = nexe.get('name')
location = node_config.get('location')
return ZvmNode(0, name, exe, args, env, replicate, attach, exe_name,
location)
def copy(self, id, name=None):
newnode = deepcopy(self)
newnode.id = id
if name:
newnode.name = name
return newnode
def add_channel(self, path=None,
content_type=None, channel=None):
channel = deepcopy(channel)
if path:
channel.path = path
if content_type:
channel.content_type = content_type
self.channels.append(channel)
def add_new_channel(self, device=None, access=None, path=None,
content_type='application/octet-stream',
meta_data=None, mode=None, removable='no',
mountpoint='/'):
channel = ZvmChannel(device, access, path,
content_type=content_type,
meta_data=meta_data, mode=mode,
removable=removable, mountpoint=mountpoint)
self.channels.append(channel)
def get_channel(self, device=None, path=None):
if device:
for chan in self.channels:
if chan.device == device:
return chan
if path:
for chan in self.channels:
if chan.path == path:
return chan
return None
def copy_cgi_env(self, request=None, cgi_env=None):
if not self.env:
self.env = {}
self.env['REMOTE_USER'] = request.remote_user
self.env['QUERY_STRING'] = request.query_string
self.env['SERVER_PROTOCOL'] = \
request.environ.get('SERVER_PROTOCOL', 'HTTP/1.0')
self.env['PATH_INFO'] = request.path_info
self.env['REQUEST_METHOD'] = 'GET'
self.env['SERVER_SOFTWARE'] = 'zerocloud'
self.env['GATEWAY_INTERFACE'] = 'CGI/1.1'
self.env['SCRIPT_NAME'] = self.exe_name or self.name
self.env['SCRIPT_FILENAME'] = self.exe
if cgi_env:
self.env.update(cgi_env)
# we need to show the real host name, if possible
parts = self.env.get('HTTP_HOST',
request.environ.get('SERVER_NAME',
'localhost')).split(':', 1)
self.env['SERVER_NAME'] = parts[0]
if len(parts) > 1:
self.env['SERVER_PORT'] = parts[1]
else:
self.env['SERVER_PORT'] = '80'
def add_data_source(self, data_sources, resp, dev='sysmap', append=False):
if append:
data_sources.append(resp)
else:
data_sources.insert(0, resp)
if not getattr(self, 'last_data', None) or append:
self.last_data = resp
resp.nodes = [{'node': self, 'dev': dev}]
def store_wildcards(self, path, mask):
new_match = mask.match(path.path)
self.wildcards = map(lambda idx: new_match.group(idx),
range(1, new_match.lastindex + 1))
def dumps(self, indent=None):
return json.dumps(self, cls=NodeEncoder, indent=indent)
def get_list_of_remote_objects(self):
channels = []
if isinstance(self.exe, SwiftPath):
# we never co-locate with executable
# we assume exe is small and used by many jobs
channels.append(ZvmChannel('boot', None, path=self.exe))
for ch in self.channels:
if (isinstance(ch.path, SwiftPath)
and (ch.access & (ACCESS_READABLE | ACCESS_CDR))
# node is NOT co-located with this object
# path_info is None
and ch.path.path != getattr(self, 'path_info', None)):
channels.append(ch)
return channels
class NodeEncoder(json.JSONEncoder):
def default(self, o):
if isinstance(o, ZvmNode) or isinstance(o, ZvmChannel):
return o.__dict__
if isinstance(o, ObjPath):
return o.url
return json.JSONEncoder.default(self, o)
class ClusterConfig(object):
def __init__(self, nodes, total_count):
self.nodes = nodes
self.total_count = total_count
``` |
{
"source": "joyride9999/fluffi",
"score": 2
} |
#### File: fluffiweb/tests/test_ui.py
```python
import unittest, os, shutil
from selenium import webdriver
from app import app, models
class TestUI(unittest.TestCase):
def setUp(self):
self.driver = webdriver.Firefox()
fuzzjob = models.Fuzzjob.query.first()
if fuzzjob:
self.projId = fuzzjob.id
else:
print("Error: No fuzzjob exists for testing!")
def tearDown(self):
self.driver.close()
def test_create_project_and_remove(self):
""" Tests if the create project form works """
testProject="testProject"
self.driver.get("http://localhost:5000/projects/createProject")
project_name_field = self.driver.find_element_by_name("name")
project_name_field.send_keys(testProject)
targetCMDLine_field = self.driver.find_element_by_name("targetCMDLine")
targetCMDLine_field.send_keys("C:testCMDLine")
target_module_field = self.driver.find_element_by_name("1_targetname")
target_module_field.send_keys("example.dll")
population_file = self.driver.find_element_by_name("filename")
population_file.send_keys("C:\\TestDev\\test_files\\example1.dll")
login_button = self.driver.find_element_by_id("fuzzButton")
login_button.click()
self.driver.get("http://localhost:5000/projects")
self.assertIn(testProject, self.driver.page_source)
def test_update_and_delete(self):
""" Tests if the update and delete works """
testName="testName"
testValue="testValue"
self.driver.get("http://localhost:5000/projects/view/{}".format(self.projId))
update_name_field = self.driver.find_element_by_id("option_module")
update_name_field.send_keys(testName)
update_value_field = self.driver.find_element_by_id("option_module_value")
update_value_field.send_keys(testValue)
add_settings_button = self.driver.find_element_by_id("addSettings")
add_settings_button.click()
self.assertIn(testName, self.driver.page_source)
self.assertIn(testValue, self.driver.page_source)
delete_setting_btn = self.driver.find_element_by_id("deleteSetting{}{}".format(testName, self.projId))
delete_setting_btn.click()
self.assertNotIn(testName, self.driver.page_source)
self.assertNotIn(testValue, self.driver.page_source)
update_name_field = self.driver.find_element_by_id("1_targetname")
update_name_field.send_keys(testName)
add_target_modules_btn = self.driver.find_element_by_id("addTargetModules")
add_target_modules_btn.click()
self.assertIn(testName, self.driver.page_source)
delete_setting_btn = self.driver.find_element_by_id("deleteModule{}{}".format(testName, self.projId))
delete_setting_btn.click()
self.assertNotIn(testName, self.driver.page_source)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "joy-rosie/ibis",
"score": 2
} |
#### File: pyspark/tests/test_window.py
```python
import pandas.util.testing as tm
import pyspark.sql.functions as F
import pytest
from pyspark.sql.window import Window
from pytest import param
import ibis
pytest.importorskip('pyspark')
pytestmark = pytest.mark.pyspark
@pytest.mark.parametrize(
('ibis_window', 'spark_range'),
[
param(
ibis.trailing_window(
preceding=ibis.interval(hours=1),
order_by='time',
group_by='key',
),
(-3600, 0),
),
param(
ibis.trailing_window(
preceding=ibis.interval(hours=2),
order_by='time',
group_by='key',
),
(-7200, 0),
),
param(
ibis.range_window(
preceding=0,
following=ibis.interval(hours=1),
order_by='time',
group_by='key',
),
(0, 3600),
),
],
)
def test_time_indexed_window(client, ibis_window, spark_range):
table = client.table('time_indexed_table')
result = table.mutate(
mean=table['value'].mean().over(ibis_window)
).compile()
result_pd = result.toPandas()
spark_table = table.compile()
spark_window = (
Window.partitionBy('key')
.orderBy(F.col('time').cast('long'))
.rangeBetween(*spark_range)
)
expected = spark_table.withColumn(
'mean', F.mean(spark_table['value']).over(spark_window),
).toPandas()
tm.assert_frame_equal(result_pd, expected)
# TODO: multi windows don't update scope correctly
@pytest.mark.xfail(
reason='Issue #2412 Same window op with different window size on table '
'lead to incorrect results for pyspark backend',
strict=True,
)
def test_multiple_windows(client):
table = client.table('time_indexed_table')
window1 = ibis.trailing_window(
preceding=ibis.interval(hours=1), order_by='time', group_by='key'
)
window2 = ibis.trailing_window(
preceding=ibis.interval(hours=2), order_by='time', group_by='key'
)
result = table.mutate(
mean_1h=table['value'].mean().over(window1),
mean_2h=table['value'].mean().over(window2),
).compile()
result_pd = result.toPandas()
df = table.compile().toPandas()
expected_win_1 = (
df.set_index('time')
.groupby('key')
.value.rolling('1h', closed='both')
.mean()
.rename('mean_1h')
).reset_index(drop=True)
expected_win_2 = (
df.set_index('time')
.groupby('key')
.value.rolling('2h', closed='both')
.mean()
.rename('mean_2h')
).reset_index(drop=True)
tm.assert_series_equal(result_pd['mean_1h'], expected_win_1)
tm.assert_series_equal(result_pd['mean_2h'], expected_win_2)
```
#### File: tests/all/conftest.py
```python
import operator
import os
import numpy as np
import pandas as pd
import pytest
import ibis
import ibis.common.exceptions as com
import ibis.util as util
from ibis.tests.backends import Backend
def _random_identifier(suffix):
return '__ibis_test_{}_{}'.format(suffix, util.guid())
def subclasses(cls):
"""Get all child classes of `cls` not including `cls`, transitively."""
assert isinstance(cls, type), "cls is not a class, type: {}".format(
type(cls)
)
children = set(cls.__subclasses__())
return children.union(*map(subclasses, children))
ALL_BACKENDS = sorted(subclasses(Backend), key=operator.attrgetter("__name__"))
def pytest_runtest_call(item):
"""Dynamically add various custom markers."""
nodeid = item.nodeid
for marker in list(item.iter_markers(name="only_on_backends")):
(backend_types,) = map(tuple, marker.args)
backend = item.funcargs["backend"]
assert isinstance(backend, Backend), "backend has type {!r}".format(
type(backend).__name__
)
if not isinstance(backend, backend_types):
pytest.skip(nodeid)
for marker in list(item.iter_markers(name="skip_backends")):
(backend_types,) = map(tuple, marker.args)
backend = item.funcargs["backend"]
assert isinstance(backend, Backend), "backend has type {!r}".format(
type(backend).__name__
)
if isinstance(backend, backend_types):
pytest.skip(nodeid)
for marker in list(item.iter_markers(name="skip_missing_feature")):
backend = item.funcargs["backend"]
(features,) = marker.args
missing_features = [
feature for feature in features if not getattr(backend, feature)
]
if missing_features:
pytest.mark.skip(
'Backend {} is missing features {} needed to run {}'.format(
type(backend).__name__, ', '.join(missing_features), nodeid
)
)
for marker in list(item.iter_markers(name="xfail_backends")):
(backend_types,) = map(tuple, marker.args)
backend = item.funcargs["backend"]
assert isinstance(backend, Backend), "backend has type {!r}".format(
type(backend).__name__
)
item.add_marker(
pytest.mark.xfail(
condition=isinstance(backend, backend_types),
reason='Backend {} does not pass this test'.format(
type(backend).__name__
),
**marker.kwargs,
)
)
for marker in list(item.iter_markers(name="xpass_backends")):
(backend_types,) = map(tuple, marker.args)
backend = item.funcargs["backend"]
assert isinstance(backend, Backend), "backend has type {!r}".format(
type(backend).__name__
)
item.add_marker(
pytest.mark.xfail(
condition=not isinstance(backend, backend_types),
reason='{} does not pass this test'.format(
type(backend).__name__
),
**marker.kwargs,
)
)
@pytest.hookimpl(hookwrapper=True)
def pytest_pyfunc_call(pyfuncitem):
"""Dynamically add an xfail marker for specific backends."""
outcome = yield
try:
outcome.get_result()
except (
com.OperationNotDefinedError,
com.UnsupportedOperationError,
com.UnsupportedBackendType,
NotImplementedError,
) as e:
markers = list(pyfuncitem.iter_markers(name="xfail_unsupported"))
assert (
len(markers) == 1
), "More than one xfail_unsupported marker found on test {}".format(
pyfuncitem
)
(marker,) = markers
backend = pyfuncitem.funcargs["backend"]
assert isinstance(backend, Backend), "backend has type {!r}".format(
type(backend).__name__
)
pytest.xfail(reason='{}: {}'.format(type(backend).__name__, e))
pytestmark = pytest.mark.backend
pytest_backends = os.environ.get('PYTEST_BACKENDS', '').split(' ')
params_backend = [
pytest.param(backend, marks=getattr(pytest.mark, backend.__name__.lower()))
for backend in ALL_BACKENDS
if backend.__name__.lower() in pytest_backends or not pytest_backends
]
if len(pytest_backends) != len(params_backend):
unknown_backends = set(pytest_backends) - set(
b.__name__.lower() for b in ALL_BACKENDS
)
raise ValueError(
'PYTEST_BACKENDS environment variable contain unknown '
f'backends {unknown_backends}'
)
@pytest.fixture(params=params_backend, scope='session')
def backend(request, data_directory):
return request.param(data_directory)
@pytest.fixture(scope='session')
def con(backend):
return backend.connection
@pytest.fixture(scope='session')
def alltypes(backend):
return backend.functional_alltypes
@pytest.fixture(scope='session')
def sorted_alltypes(alltypes):
return alltypes.sort_by('id')
@pytest.fixture(scope='session')
def batting(backend):
return backend.batting
@pytest.fixture(scope='session')
def awards_players(backend):
return backend.awards_players
@pytest.fixture(scope='session')
def geo(backend):
if backend.geo is None:
pytest.skip(
'Geo Spatial type not supported for {} backend.'.format(
backend.name
)
)
return backend.geo
@pytest.fixture
def analytic_alltypes(alltypes):
return alltypes
@pytest.fixture(scope='session')
def df(alltypes):
return alltypes.execute()
@pytest.fixture(scope='session')
def sorted_df(df):
return df.sort_values('id').reset_index(drop=True)
@pytest.fixture(scope='session')
def batting_df(batting):
return batting.execute(limit=None)
@pytest.fixture(scope='session')
def awards_players_df(awards_players):
return awards_players.execute(limit=None)
@pytest.fixture(scope='session')
def geo_df(geo):
# Currently geo is implemented just for OmniSciDB
if geo is not None:
return geo.execute(limit=None)
return None
_spark_testing_client = None
_pyspark_testing_client = None
def get_spark_testing_client(data_directory):
global _spark_testing_client
if _spark_testing_client is None:
_spark_testing_client = get_common_spark_testing_client(
data_directory, lambda session: ibis.spark.connect(session)
)
return _spark_testing_client
def get_pyspark_testing_client(data_directory):
global _pyspark_testing_client
if _pyspark_testing_client is None:
_pyspark_testing_client = get_common_spark_testing_client(
data_directory, lambda session: ibis.pyspark.connect(session)
)
return _pyspark_testing_client
def get_common_spark_testing_client(data_directory, connect):
pytest.importorskip('pyspark')
import pyspark.sql.types as pt
from pyspark.sql import SparkSession
spark = SparkSession.builder.getOrCreate()
_spark_testing_client = connect(spark)
s = _spark_testing_client._session
df_functional_alltypes = s.read.csv(
path=str(data_directory / 'functional_alltypes.csv'),
schema=pt.StructType(
[
pt.StructField('index', pt.IntegerType(), True),
pt.StructField('Unnamed: 0', pt.IntegerType(), True),
pt.StructField('id', pt.IntegerType(), True),
# cast below, Spark can't read 0/1 as bool
pt.StructField('bool_col', pt.ByteType(), True),
pt.StructField('tinyint_col', pt.ByteType(), True),
pt.StructField('smallint_col', pt.ShortType(), True),
pt.StructField('int_col', pt.IntegerType(), True),
pt.StructField('bigint_col', pt.LongType(), True),
pt.StructField('float_col', pt.FloatType(), True),
pt.StructField('double_col', pt.DoubleType(), True),
pt.StructField('date_string_col', pt.StringType(), True),
pt.StructField('string_col', pt.StringType(), True),
pt.StructField('timestamp_col', pt.TimestampType(), True),
pt.StructField('year', pt.IntegerType(), True),
pt.StructField('month', pt.IntegerType(), True),
]
),
mode='FAILFAST',
header=True,
)
df_functional_alltypes = df_functional_alltypes.withColumn(
"bool_col", df_functional_alltypes["bool_col"].cast("boolean")
)
df_functional_alltypes.createOrReplaceTempView('functional_alltypes')
df_batting = s.read.csv(
path=str(data_directory / 'batting.csv'),
schema=pt.StructType(
[
pt.StructField('playerID', pt.StringType(), True),
pt.StructField('yearID', pt.IntegerType(), True),
pt.StructField('stint', pt.IntegerType(), True),
pt.StructField('teamID', pt.StringType(), True),
pt.StructField('lgID', pt.StringType(), True),
pt.StructField('G', pt.IntegerType(), True),
pt.StructField('AB', pt.DoubleType(), True),
pt.StructField('R', pt.DoubleType(), True),
pt.StructField('H', pt.DoubleType(), True),
pt.StructField('X2B', pt.DoubleType(), True),
pt.StructField('X3B', pt.DoubleType(), True),
pt.StructField('HR', pt.DoubleType(), True),
pt.StructField('RBI', pt.DoubleType(), True),
pt.StructField('SB', pt.DoubleType(), True),
pt.StructField('CS', pt.DoubleType(), True),
pt.StructField('BB', pt.DoubleType(), True),
pt.StructField('SO', pt.DoubleType(), True),
pt.StructField('IBB', pt.DoubleType(), True),
pt.StructField('HBP', pt.DoubleType(), True),
pt.StructField('SH', pt.DoubleType(), True),
pt.StructField('SF', pt.DoubleType(), True),
pt.StructField('GIDP', pt.DoubleType(), True),
]
),
header=True,
)
df_batting.createOrReplaceTempView('batting')
df_awards_players = s.read.csv(
path=str(data_directory / 'awards_players.csv'),
schema=pt.StructType(
[
pt.StructField('playerID', pt.StringType(), True),
pt.StructField('awardID', pt.StringType(), True),
pt.StructField('yearID', pt.IntegerType(), True),
pt.StructField('lgID', pt.StringType(), True),
pt.StructField('tie', pt.StringType(), True),
pt.StructField('notes', pt.StringType(), True),
]
),
header=True,
)
df_awards_players.createOrReplaceTempView('awards_players')
df_simple = s.createDataFrame([(1, 'a')], ['foo', 'bar'])
df_simple.createOrReplaceTempView('simple')
df_struct = s.createDataFrame([((1, 2, 'a'),)], ['struct_col'])
df_struct.createOrReplaceTempView('struct')
df_nested_types = s.createDataFrame(
[([1, 2], [[3, 4], [5, 6]], {'a': [[2, 4], [3, 5]]})],
[
'list_of_ints',
'list_of_list_of_ints',
'map_string_list_of_list_of_ints',
],
)
df_nested_types.createOrReplaceTempView('nested_types')
df_complicated = s.createDataFrame(
[({(1, 3): [[2, 4], [3, 5]]},)], ['map_tuple_list_of_list_of_ints']
)
df_complicated.createOrReplaceTempView('complicated')
df_udf = s.createDataFrame(
[('a', 1, 4.0, 'a'), ('b', 2, 5.0, 'a'), ('c', 3, 6.0, 'b')],
['a', 'b', 'c', 'key'],
)
df_udf.createOrReplaceTempView('udf')
df_udf_nan = s.createDataFrame(
pd.DataFrame(
{
'a': np.arange(10, dtype=float),
'b': [3.0, np.NaN] * 5,
'key': list('ddeefffggh'),
}
)
)
df_udf_nan.createOrReplaceTempView('udf_nan')
df_udf_null = s.createDataFrame(
[
(float(i), None if i % 2 else 3.0, 'ddeefffggh'[i])
for i in range(10)
],
['a', 'b', 'key'],
)
df_udf_null.createOrReplaceTempView('udf_null')
df_udf_random = s.createDataFrame(
pd.DataFrame(
{
'a': np.arange(4, dtype=float).tolist()
+ np.random.rand(3).tolist(),
'b': np.arange(4, dtype=float).tolist()
+ np.random.rand(3).tolist(),
'key': list('ddeefff'),
}
)
)
df_udf_random.createOrReplaceTempView('udf_random')
return _spark_testing_client
@pytest.fixture
def temp_table(con: ibis.client.Client) -> str:
"""
Return a temporary table name.
Parameters
----------
con : ibis.client.Client
Yields
------
name : string
Random table name for a temporary usage.
"""
name = _random_identifier('table')
try:
yield name
finally:
if hasattr(con, 'drop_table'):
con.drop_table(name, force=True)
@pytest.fixture
def temp_view(con) -> str:
"""Return a temporary view name.
Parameters
----------
con : ibis.omniscidb.OmniSciDBClient
Yields
------
name : string
Random view name for a temporary usage.
"""
name = _random_identifier('view')
try:
yield name
finally:
if hasattr(con, 'drop_view'):
con.drop_view(name, force=True)
@pytest.fixture(scope='session')
def current_data_db(con, backend) -> str:
"""Return current database name."""
if not hasattr(con, 'current_database'):
pytest.skip(
f'{backend.name} backend doesn\'t have current_database method.'
)
return con.current_database
@pytest.fixture
def alternate_current_database(con, backend, current_data_db: str) -> str:
"""Create a temporary database and yield its name.
Drops the created database upon completion.
Parameters
----------
con : ibis.client.Client
current_data_db : str
Yields
-------
str
"""
name = _random_identifier('database')
if not hasattr(con, 'create_database'):
pytest.skip(
f'{backend.name} backend doesn\'t have create_database method.'
)
con.create_database(name)
try:
yield name
finally:
con.set_database(current_data_db)
con.drop_database(name, force=True)
``` |
{
"source": "joy-rosie/recipeasy",
"score": 3
} |
#### File: recipeasy/data/util.py
```python
from typing import Optional, Tuple, Dict, List, NoReturn
from dataclasses import dataclass
import os
import json
import csv
import copy
from recipeasy.food import FoodElement
@dataclass(frozen=True)
class FoodElementWithData(FoodElement):
cofid_food_code: Optional[str] = None
description: Optional[str] = None
all_names: Optional[Tuple[str]] = None
def get_foods(
path: Optional[str] = None,
**kwargs
) -> Dict[str, FoodElementWithData]:
if path is None:
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'food.json')
_, file_extension = os.path.splitext(path)
if file_extension == '.json':
raw_food_data = get_raw_foods_from_json(path=path, **kwargs)
elif file_extension == '.csv':
raw_food_data = get_raw_foods_from_json(path=path, **kwargs)
else:
raise NotImplementedError(f'File extension "{file_extension}" not yet implemented.')
for index, item in enumerate(raw_food_data):
raw_food_data[index]['all_names'] = tuple(item['all_names'])
food_data = {item['all_names']: FoodElementWithData(**item) for item in raw_food_data}
food_data = {item: value for key, value in food_data.items() for item in key}
return food_data
def get_raw_foods_from_json(
path: Optional[str] = None,
**kwargs,
) -> List[Dict]:
if path is None:
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'food.json')
with open(path, 'r') as json_file:
raw_food_data = json.load(json_file)
return raw_food_data
def raw_foods_to_json(
raw_food_data: List[Dict],
path: Optional[str] = None,
**kwargs,
) -> NoReturn:
if path is None:
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'food.json')
with open(path, 'w') as json_file:
json.dump(raw_food_data, json_file)
def get_raw_foods_from_csv(
path: Optional[str] = None,
delimiter: Optional[str] = None,
**kwargs,
) -> List[Dict]:
if path is None:
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'food.csv')
if delimiter is None:
delimiter = '|'
raw_food_data = []
with open(path, 'r', newline='') as f:
reader = csv.DictReader(f, delimiter=delimiter)
for row in reader:
row['all_names'] = row['all_names'].split(', ')
raw_food_data.append(row)
return raw_food_data
def raw_foods_to_csv(
raw_food_data: List[Dict],
path: Optional[str] = None,
delimiter: Optional[str] = None,
**kwargs,
) -> NoReturn:
if path is None:
path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'food.csv')
raw_food_data_dump = copy.deepcopy(raw_food_data)
for index, item in enumerate(raw_food_data_dump):
raw_food_data_dump[index]['all_names'] = ', '.join(item['all_names'])
csv_header = list(raw_food_data_dump[0].keys())
with open(path, 'w') as f:
dw = csv.DictWriter(f, delimiter=delimiter, fieldnames=csv_header)
dw.writerow(dict((fn, fn) for fn in csv_header))
for row in raw_food_data_dump:
dw.writerow(row)
```
#### File: tests/test_data/test_util.py
```python
import recipeasy.data.util as data_util
def test_get_foods():
food_data = data_util.get_foods()
assert isinstance(food_data, dict)
```
#### File: recipeasy/tests/test_food.py
```python
import pytest
from recipeasy.food import FoodState, FoodElement, Food
@pytest.mark.parametrize('inputs, expected', [
(dict(), FoodState()),
(dict(name='deshelled'), FoodState(name='deshelled')),
(dict(name='cooked'), FoodState(name='cooked')),
(dict(name='chopped'), FoodState(name='chopped')),
])
def test_food_state(inputs, expected):
food_state = FoodState(**inputs)
assert food_state == expected
@pytest.mark.parametrize('inputs, expected', [
(dict(name='apple'), FoodElement(name='apple')),
(
dict(name='apple', state=FoodState('chopped')),
FoodElement(name='apple', state=FoodState('chopped'))
),
(
dict(name='apple', state=FoodState('chopped'), previous=FoodElement(name='apple')),
FoodElement(name='apple', state=FoodState('chopped'), previous=FoodElement(name='apple')),
),
])
def test_food_element(inputs, expected):
food_element = FoodElement(**inputs)
assert food_element == expected
@pytest.mark.parametrize('food_element, inputs, expected', [
(
FoodElement(name='apple'),
dict(new_state=FoodState('chopped')),
FoodElement(name='apple', state=FoodState('chopped'), previous=FoodElement(name='apple')),
),
])
def test_food_element_change_state(food_element, inputs, expected):
food_element_changed_state = food_element.change_state(**inputs)
assert food_element_changed_state == expected
@pytest.mark.parametrize('inputs, expected', [
(
dict(elements=frozenset({FoodElement(name='apple')})),
Food(elements=frozenset({FoodElement(name='apple')})),
),
])
def test_food(inputs, expected):
food = Food(**inputs)
assert food == expected
@pytest.mark.parametrize('food, inputs, expected', [
(
Food(elements=frozenset({FoodElement(name='apple')})),
dict(new_state=FoodState('chopped')),
Food(
elements=frozenset({
FoodElement(name='apple', state=FoodState('chopped'), previous=FoodElement(name='apple')),
}),
previous=frozenset({Food(elements=frozenset({FoodElement(name='apple')}))}),
),
),
])
def test_food_change_state(food, inputs, expected):
food_changed_state = food.change_state(**inputs)
assert food_changed_state == expected
@pytest.mark.parametrize('food, inputs, expected', [
(
Food(elements=frozenset({FoodElement(name='apple')})),
dict(other=Food(elements=frozenset({FoodElement(name='banana')})),),
Food(
elements=frozenset({
FoodElement(name='apple'),
FoodElement(name='banana'),
}),
previous=frozenset({
Food(elements=frozenset({FoodElement(name='apple')})),
Food(elements=frozenset({FoodElement(name='banana')})),
}),
),
),
])
def test_food_mix(food, inputs, expected):
food_mixed = food.mix(**inputs)
assert food_mixed == expected
@pytest.mark.parametrize('food, inputs, expected', [
(
Food(elements=frozenset({FoodElement(name='apple'), FoodElement(name='banana')})),
dict(food_element=FoodElement(name='banana')),
Food(
elements=frozenset({FoodElement(name='apple')}),
previous=frozenset({Food(
elements=frozenset({FoodElement(name='apple'), FoodElement(name='banana')}),
)}),
),
),
])
def test_food_remove(food, inputs, expected):
food_removed = food.remove(**inputs)
assert food_removed == expected
``` |
{
"source": "joysboy/ncclient",
"score": 3
} |
#### File: ncclient/examples/huawei.py
```python
import sys
from ncclient import manager
filter_vlan_snippet = """
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
</vlan>"""
def create_vlan(mgr, vlanid, vlanname):
snippet = """
<vlan xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<vlans>
<vlan>
<vlanId>%s</vlanId>
<vlanName/>
<vlanDesc>%s</vlanDesc>
</vlan>
</vlans>
</vlan>"""
confstr = snippet % (vlanid, vlanname)
mgr.edit_config(target='running', config=confstr)
def test_huawei_api(host, user, password):
device = {"name": "huawei"}
with manager.connect(host, port=830, user=user, password=password, device_params=device) as m:
create_vlan(m, '20', 'customer')
result = m.get_config(source="running", filter=("subtree", filter_vlan_snippet))
print result
if __name__ == '__main__':
test_huawei_api(sys.argv[1], sys.argv[2], sys.argv[3])
``` |
{
"source": "Joystickplays/GoMod",
"score": 2
} |
#### File: GoMod/cogs/views.py
```python
import discord
class Caseactionsview(discord.ui.View):
def __init__(self, ctx):
super().__init__()
self.value = None
self.ctx = ctx
@discord.ui.button(label='Ban', style=discord.ButtonStyle.red)
async def ban(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "b"
self.stop()
@discord.ui.button(label='Kick', style=discord.ButtonStyle.red)
async def kick(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "k"
self.stop()
@discord.ui.button(label='Delete', style=discord.ButtonStyle.green)
async def delete(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "d"
self.stop()
@discord.ui.button(label='Ignore', style=discord.ButtonStyle.gray)
async def ignore(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "i"
self.stop()
class Helpview(discord.ui.View):
def __init__(self, ctx):
super().__init__()
self.value = None
self.ctx = ctx
self.timeout = 60
@discord.ui.button(label='Moderator', style=discord.ButtonStyle.gray)
async def mod(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "m"
self.stop()
@discord.ui.button(label='AiMod [BETA]', style=discord.ButtonStyle.gray)
async def ai(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "a"
self.stop()
@discord.ui.button(label='Server backups', style=discord.ButtonStyle.gray, disabled=True)
async def server(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "s"
self.stop()
@discord.ui.button(label='Logging', style=discord.ButtonStyle.gray, disabled=True)
async def log(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "l"
self.stop()
@discord.ui.button(label='ModRep', style=discord.ButtonStyle.gray)
async def modrep(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "mr"
self.stop()
@discord.ui.button(label='CC', style=discord.ButtonStyle.gray)
async def cc(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "cc"
self.stop()
@discord.ui.button(label='Others', style=discord.ButtonStyle.gray)
async def other(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "o"
self.stop()
@discord.ui.button(label='Exit', style=discord.ButtonStyle.gray)
async def ex(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.ctx.author != interaction.user:
await interaction.response.send_message("You can't do this, sorry.", ephemeral=True)
return
self.value = "x"
self.stop()
async def getvotes(self, member):
upvotes = await self.bot.db.fetch("SELECT COUNT(*) FROM repvotes WHERE who = $1 AND type = 'up'", member.id)
downvotes = await self.bot.db.fetch("SELECT COUNT(*) FROM repvotes WHERE who = $1 AND type = 'down'", member.id)
votes = upvotes[0]["count"] - downvotes[0]["count"]
return discord.Embed(title="Reputation", description=f"{member.mention} has {votes} votes.", color=0x00b2ff)
class UpDownvote(discord.ui.View):
def __init__(self, bot, mem):
super().__init__()
self.value = None
self.bot = bot
self.mem = mem
@discord.ui.button(label='Upvote', style=discord.ButtonStyle.green)
async def upvote(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.mem.id == interaction.user.id:
await interaction.response.send_message("You cannot upvote or downvote yourself.", ephemeral=True)
return
lookup = await self.bot.db.fetchrow("SELECT * FROM repvotes WHERE who = $1 AND voted = $2", self.mem.id, interaction.user.id)
if lookup:
if lookup["type"] == "down":
await self.bot.db.execute("DELETE FROM repvotes WHERE who = $1 AND voted = $2 AND type = 'down'", self.mem.id, interaction.user.id)
await self.bot.db.execute("INSERT INTO repvotes (who, voted, type) VALUES ($1, $2, 'up')", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(content=f"Update: {interaction.user.mention} cancelled the downvote and upvote this member.", embed=embed)
return
elif lookup["type"] == "up":
await self.bot.db.execute("DELETE FROM repvotes WHERE who = $1 AND voted = $2 AND type = 'up'", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(f"Update: {interaction.user.mention} cancelled the upvote for this user.", embed=embed)
return
await self.bot.db.execute("INSERT INTO repvotes (who, voted, type) VALUES ($1, $2, 'up')", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(content=f"Update: {interaction.user.mention} upvoted this member.", embed=embed)
@discord.ui.button(label='Downvote', style=discord.ButtonStyle.red)
async def downvote(self, button: discord.ui.Button, interaction: discord.Interaction):
if self.mem.id == interaction.user.id:
await interaction.response.send_message("You cannot upvote or downvote yourself.", ephemeral=True)
return
lookup = await self.bot.db.fetchrow("SELECT * FROM repvotes WHERE who = $1 AND voted = $2", self.mem.id, interaction.user.id)
if lookup:
if lookup["type"] == "down":
await self.bot.db.execute("DELETE FROM repvotes WHERE who = $1 AND voted = $2 AND type = 'down'", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(f"Update: {interaction.user.mention} cancelled the downvote for this user.", embed=embed)
return
elif lookup["type"] == "up":
await self.bot.db.execute("DELETE FROM repvotes WHERE who = $1 AND voted = $2 AND type = 'up'", self.mem.id, interaction.user.id)
await self.bot.db.execute("INSERT INTO repvotes (who, voted, type) VALUES ($1, $2, 'down')", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(content=f"Update: {interaction.user.mention} cancelled the upvote and downvoted this member.", embed=embed)
return
await self.bot.db.execute("INSERT INTO repvotes (who, voted, type) VALUES ($1, $2, 'down')", self.mem.id, interaction.user.id)
embed = await getvotes(self, self.mem)
await interaction.message.edit(content=f"Update: {interaction.user.mention} downvoted this member.", embed=embed)
# class SingleConfirm(discord.ui.View):
# def __init__(self, bot, ):
# super().__init__()
# self.value = None
# self.bot = bot
``` |
{
"source": "JoystreamClassic/paymentchannel-cpp",
"score": 2
} |
#### File: paymentchannel-cpp/conan_package/dev_include_source.py
```python
import os
import shutil
from distutils.dir_util import copy_tree
from base import PaymentChannelBase
# Use this recipe when checking out a tagged version locally and you wish to use it in your projects
# on the stable channel. Source will be copied to cache from local folder
# do not upload the recipie if you don't want to include source files with the recipe
class PaymnetChannelRelease(PaymentChannelBase):
exports = "base.py"
exports_sources = "../sources*"
build_policy="always"
def source(self):
os.mkdir("repo")
shutil.move("sources", "repo/")
``` |
{
"source": "joytao-zhu/odooExtModel",
"score": 2
} |
#### File: odoo_crm/wizard/sale_opportunity.py
```python
import logging
from odoo import api, fields, models
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
SALESTATED = [
('find', '发现需求'),
('confirm', '确认需求'),
('solve', '解决方案'),
('talk', '商务谈判'),
('bid', '招投标'),
('win', '赢单'),
('losing', '输单'),
('cancel', '取消'),
]
class SaleOpportunityState(models.TransientModel):
_name = 'crm.sale.opportunity.state'
_description = "机会状态变更"
state = fields.Selection(string="变更状态", selection=SALESTATED, default='find', required=True)
note = fields.Text(string="变更原因", required=True)
opportunity_id = fields.Many2one(comodel_name='crm.sale.opportunity', string="机会", required=True)
@api.model
def default_get(self, fields):
res = super(SaleOpportunityState, self).default_get(fields)
res['opportunity_id'] = self.env.context.get('active_id')
return res
def commit_state(self):
"""
确认变更状态
:return:
"""
self.ensure_one()
self.opportunity_id.write({'state': self.state})
note = "变更状态原因:{}".format(self.note)
self.opportunity_id.message_post(body=note, message_type='notification')
return {'type': 'ir.actions.act_window_close'}
```
#### File: odoo_hcm/wizard/user_location.py
```python
import json
import logging
import requests
from requests import ReadTimeout
from odoo import api, fields, models
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
class GetUserLocation(models.TransientModel):
_name = 'get.user.location.tran'
_description = "获取位置信息"
name = fields.Char(string='详细地址')
area = fields.Char(string='地区')
def get_location(self):
"""
获取位置信息
:return:
"""
self.ensure_one()
# 获取腾讯地图配置
qqmap = self.env['hcm.qq.map'].search([('active', '=', True)], limit=1)
if not qqmap:
raise UserError("请先配置一条可用的腾讯地图信息!")
url = "https://apis.map.qq.com/ws/place/v1/search"
data = {
'keyword': self.name,
'boundary': "region({},0)".format(self.area),
'key': qqmap.key,
'page_size': 10,
'output': 'json',
}
try:
result = requests.get(url=url, params=data, timeout=5)
result = json.loads(result.text)
if result.get('status') == 0:
for d_res in result['data']:
data = {
'address': d_res.get('address'),
'category': d_res.get('category'),
}
location = d_res.get('location')
data.update({
'latitude': location.get('lat'),
'longitude': location.get('lng'),
})
ad_info = d_res.get('ad_info')
data.update({
'adcode': ad_info.get('adcode'),
'province': ad_info.get('province'),
'city': ad_info.get('city'),
'district': ad_info.get('district'),
})
locations = self.env['hcm.location.manage'].search([('address', '=', data.get('address'))])
if locations:
locations.write(data)
else:
self.env['hcm.location.manage'].create(data)
else:
raise UserError("提示:{}".format(result.get('message')))
except ReadTimeout:
raise UserError("连接腾讯位置服务'WebService'超时!")
return {'type': 'ir.actions.act_window_close'}
```
#### File: odoo_performance_manage/models/performance_assessment.py
```python
import logging
from odoo import api, fields, models
from odoo.exceptions import UserError
_logger = logging.getLogger(__name__)
class PerformanceAssessment(models.Model):
_name = 'performance.assessment'
_description = "绩效考评"
_rec_name = 'employee_id'
_order = 'id'
_inherit = ['mail.thread', 'mail.activity.mixin']
PerState = [
('setting', '目标制定'),
('executing', '执行中'),
('evaluation', '自评'),
('close', '结束'),
]
AssessmentType = [
('month', '月度'),
('quarter', '季度'),
('semiannual', '半年度'),
('year', '年度'),
('probation', '试用期'),
]
company_id = fields.Many2one('res.company', string=u'公司', default=lambda self: self.env.user.company_id.id)
active = fields.Boolean(string=u'active', default=True)
name = fields.Char(string='名称', track_visibility='onchange')
evaluation_id = fields.Many2one(comodel_name='evaluation.groups.manage', string=u'考评组')
performance_name = fields.Char(string='考评分组名称', help="根据类型拼接名称,用于分组")
state = fields.Selection(string=u'考评状态', selection=PerState, default='setting', track_visibility='onchange')
employee_id = fields.Many2one(comodel_name='hr.employee', string=u'考评员工', index=True, required=True)
department_id = fields.Many2one(comodel_name='hr.department', string=u'所属部门', index=True)
assessment_type = fields.Selection(string=u'考核类型', selection=AssessmentType, default='month', required=True)
start_date = fields.Date(string=u'开始日期', required=True, track_visibility='onchange')
end_date = fields.Date(string=u'截至日期', required=True, track_visibility='onchange')
line_ids = fields.One2many('performance.assessment.line', 'performance_id', string=u'绩效考评项目')
notes = fields.Text(string=u'备注', track_visibility='onchange')
@api.onchange('state')
@api.constrains('state')
def _update_line_state(self):
"""
当当前单据状态发生变化时,将状态信息写入到子表
:return:
"""
for res in self:
for line in res.line_ids:
line.state = res.state
@api.multi
def return_setting(self):
"""
回到初始状态
:return:
"""
for res in self:
res.state = 'setting'
@api.multi
def summit_performance(self):
"""
提交目标
:return:
"""
for res in self:
# 检查权重是否等于100
dimension_weights = 0
for line in res.line_ids:
dimension_weights += line.dimension_weights
if dimension_weights != 100:
raise UserError("您的考评项目权重小于或大于100,请纠正!")
res.state = 'executing'
@api.multi
def summit_rating(self):
"""
提交评分
:return:
"""
for res in self:
for line in res.line_ids:
for library in line.library_ids:
if library.employee_rating <= 0:
raise UserError("您还有未评分项未完成或评分值不正确,请纠正!")
res.state = 'close'
@api.constrains('assessment_type')
def _constrains_assessment_type(self):
"""
根据考评类型生成分组名称
:return:
"""
for res in self:
if res.assessment_type == 'month':
res.performance_name = "%s月绩效考核" % str(res.start_date)[:7]
elif res.assessment_type == 'year':
res.performance_name = "%s年度绩效考核" % str(res.start_date)[:4]
@api.constrains('employee_id', 'performance_name')
def _constrains_name(self):
"""
生成name字段
:return:
"""
for res in self:
res.name = "%s的%s" % (res.employee_id.name, res.performance_name)
res.department_id = res.employee_id.department_id.id if res.employee_id.department_id else False
@api.multi
def unlink(self):
"""
删除方法
:return:
"""
for res in self:
if res.state != 'setting':
raise UserError("已在进行中的流程不允许删除!")
return super(PerformanceAssessment, self).unlink()
class PerformanceAssessmentLine(models.Model):
_name = 'performance.assessment.line'
_description = "绩效考评项目"
_rec_name = 'dimension_id'
PerState = [
('setting', '目标制定'),
('executing', '执行中'),
('evaluation', '自评'),
('close', '结束'),
]
performance_id = fields.Many2one(comodel_name='performance.assessment', string=u'绩效考评')
state = fields.Selection(string=u'考评状态', selection=PerState, default='setting')
sequence = fields.Integer(string=u'序号')
dimension_id = fields.Many2one(comodel_name='performance.dimension.manage', string=u'考评维度', required=True)
dimension_weights = fields.Integer(string=u'权重')
library_ids = fields.One2many('performance.assessment.line.library', 'assessment_line_id', string=u'考评指标')
assessment_result = fields.Integer(string=u'考核结果', compute='_compute_result', store=True)
performance_grade_id = fields.Many2one('performance.grade.manage', string=u'绩效等级', compute='_compute_result', store=True)
@api.onchange('dimension_id')
def _onchange_dimension_id(self):
"""
:return:
"""
if self.dimension_id:
self.library_ids = False
self.dimension_weights = self.dimension_id.dimension_weights
@api.onchange('state')
@api.constrains('state')
def _update_line_state(self):
"""
当当前单据状态发生变化时,将状态信息写入到子表
:return:
"""
for res in self:
for library in res.library_ids:
library.state = res.state
@api.depends('library_ids.employee_rating')
def _compute_result(self):
"""
计算结果
:return:
"""
for res in self:
if res.state == 'evaluation':
result = 0
for library in res.library_ids:
result += library.employee_rating
res.assessment_result = result
grades = self.env['performance.grade.manage'].sudo().search([('active', '=', True)])
for grade in grades:
if grade.interval_from <= result < grade.interval_end:
res.performance_grade_id = grade.id
break
class PerformanceAssessmentLineLibrary(models.Model):
_name = 'performance.assessment.line.library'
_description = "考评指标"
_rec_name = 'indicator_id'
PerState = [
('setting', '目标制定'),
('executing', '执行中'),
('evaluation', '自评'),
('close', '结束'),
]
assessment_line_id = fields.Many2one(comodel_name='performance.assessment.line', string=u'绩效考评项目')
state = fields.Selection(string=u'考评状态', selection=PerState, default='setting')
dimension_id = fields.Many2one(comodel_name='performance.dimension.manage', string=u'考评维度')
sequence = fields.Integer(string=u'序号')
indicator_id = fields.Many2one(comodel_name='performance.indicator.library', string=u'考评指标', domain=[('name', '=', 'False')])
extra_end = fields.Integer(string=u'加扣分上限')
threshold_value = fields.Integer(string='门槛值')
target_value = fields.Char(string='目标值')
challenge_value = fields.Integer(string='挑战值')
assessment_criteria = fields.Text(string='考核标准')
weights = fields.Integer(string=u'权重')
notes = fields.Text(string=u'备注')
employee_rating = fields.Integer(string=u'员工评分')
employee_notes = fields.Text(string=u'评分说明')
@api.onchange('dimension_id')
def _onchange_dimension_id(self):
"""
根据维度值动态返回过滤规则
:return:
"""
if self.dimension_id:
self.indicator_ids = False
self.dimension_weights = self.dimension_id.dimension_weights
return {'domain': {'indicator_id': [('indicator_type', '=', self.dimension_id.dimension_type)]}}
else:
return {'domain': {'indicator_id': [('name', '=', 'False')]}}
@api.onchange('indicator_id')
def _onchange_indicator_id(self):
"""
:return:
"""
for res in self:
if res.indicator_id:
res.threshold_value = res.indicator_id.threshold_value
res.target_value = res.indicator_id.target_value
res.challenge_value = res.indicator_id.challenge_value
res.assessment_criteria = res.indicator_id.assessment_criteria
res.weights = res.indicator_id.weights
res.notes = res.indicator_id.notes
if res.indicator_id.indicator_type == 'bonus':
res.extra_end = res.indicator_id.extra_end
elif res.indicator_id.indicator_type == 'deduction':
res.extra_end = res.indicator_id.deduction_end
else:
res.extra_end = 0
``` |
{
"source": "joythegreat/IkaLog",
"score": 2
} |
#### File: ikalog/outputs/debug_video_writer.py
```python
import os
import sys
import threading
import time
import numpy as np
import cv2
from ikalog.utils import *
_ = Localization.gettext_translation('flight_recorder', fallback=True).gettext
# Needed in GUI mode
try:
import wx
except:
pass
# IkaLog Output Plugin: Write debug logs.
class DebugVideoWriter(object):
def generate_mp4_filename(self):
timestr = time.strftime("%Y%m%d_%H%M%S", time.localtime())
destfile = os.path.join(self.dir, 'ikalog_debug_%s.avi' % timestr)
return destfile
def start_recording(self, filename=None):
self.lock.acquire()
if filename is None:
filename = self.generate_mp4_filename()
fps = 2
capture_size = (1280, 720)
if IkaUtils.isWindows():
fourcc = cv2.VideoWriter_fourcc(
'M', 'J', 'P', 'G')
else:
fourcc = cv2.VideoWriter_fourcc(
'm', 'p', '4', 'v') # note the lower case
print('opening record file %s ....' % filename)
self.movie_writer = cv2.VideoWriter()
success = self.movie_writer.open(
filename, fourcc, fps, capture_size, True)
self._recording = True
print('ok, started recording')
self.lock.release()
def stop_recording(self):
self.lock.acquire()
if not self._recording:
self.lock.release()
return None
print('stopped recording')
self.movie_writer = None
self._recording = False
self.lock.release()
def on_debug_read_next_frame(self, context):
self.lock.acquire()
if self._recording:
self.movie_writer.write(context['engine']['frame'])
else:
self.movie_out = None
self.lock.release()
def on_config_apply(self, context):
self.dir = self.edit_dir.GetValue()
def refresh_ui(self):
if self.dir is None:
self.edit_dir.SetValue('')
else:
self.edit_dir.SetValue(self.dir)
def on_button_record_start_click(self, event):
self.start_recording()
def on_button_record_stop_click(self, event):
self.stop_recording()
def on_option_tab_create(self, notebook):
self.panel = wx.Panel(notebook, wx.ID_ANY)
self.page = notebook.InsertPage(0, self.panel, _('Flight recorder'))
self.layout = wx.BoxSizer(wx.VERTICAL)
self.panel.SetSizer(self.layout)
self.edit_dir = wx.TextCtrl(self.panel, wx.ID_ANY, 'hoge')
self.button_record_start = wx.Button(self.panel, wx.ID_ANY, _('Start recording'))
self.button_record_stop = wx.Button(self.panel, wx.ID_ANY, _('Stop recording'))
self.layout.Add(wx.StaticText(
self.panel, wx.ID_ANY, _('Flight recorder for IkaLog video recognition problems.')))
self.layout.Add(wx.StaticText(
self.panel, wx.ID_ANY, _('Output Folder')))
self.layout.Add(self.edit_dir, flag=wx.EXPAND)
self.layout.Add(self.button_record_start)
self.layout.Add(self.button_record_stop)
self.panel.SetSizer(self.layout)
self.refresh_ui()
self.button_record_start.Bind(wx.EVT_BUTTON, self.on_button_record_start_click)
self.button_record_stop.Bind(wx.EVT_BUTTON, self.on_button_record_stop_click)
def on_key_press(self, context, key):
if key == ord('v'):
if self._recording:
self.stop_recording()
else:
self.start_recording()
def __init__(self, dir='debug_videos/'):
self._recording = False
self.dir = dir
self.lock = threading.Lock()
```
#### File: ikalog/outputs/fluentd.py
```python
from ikalog.utils import *
# Needed in GUI mode
try:
import wx
except:
pass
# IkaOutput_Fluentd: IkaLog Output Plugin for Fluentd ecosystem
#
class Fluentd(object):
def apply_ui(self):
self.enabled = self.checkEnable.GetValue()
self.host = self.editHost.GetValue()
self.port = self.editPort.GetValue()
self.tag = self.editTag.GetValue()
self.username = self.editUsername.GetValue()
def refresh_ui(self):
self._internal_update = True
self.checkEnable.SetValue(self.enabled)
if not self.host is None:
self.editHost.SetValue(self.host)
else:
self.editHost.SetValue('')
if not self.port is None:
self.editPort.SetValue(self.port)
else:
self.editPort.SetValue('')
if not self.tag is None:
self.editTag.SetValue(self.tag)
else:
self.editTag.SetValue('')
if not self.username is None:
self.editUsername.SetValue(self.username)
else:
self.editUsername.SetValue('')
def on_config_reset(self, context=None):
self.enabled = False
self.host = ''
self.port = ''
self.tag = ''
self.username = ''
def on_config_load_from_context(self, context):
self.on_config_reset(context)
try:
conf = context['config']['fluentd']
except:
conf = {}
if 'Enable' in conf:
self.enabled = conf['Enable']
if 'Host' in conf:
self.host = conf['Host']
if 'Port' in conf:
self.port = conf['Port']
if 'Tag' in conf:
self.tag = conf['Tag']
if 'Username' in conf:
self.username = conf['Username']
self.refresh_ui()
return True
def on_config_save_to_context(self, context):
context['config']['fluentd'] = {
'Enable': self.enabled,
'Host': self.host,
'Port': self.port,
'Username': self.username,
}
def on_config_apply(self, context):
self.apply_ui()
def on_option_tab_create(self, notebook):
self.panel = wx.Panel(notebook, wx.ID_ANY, size=(640, 360))
self.page = notebook.InsertPage(0, self.panel, 'Fluentd')
self.layout = wx.BoxSizer(wx.VERTICAL)
self.checkEnable = wx.CheckBox(
self.panel, wx.ID_ANY, u'Fluentd へ戦績を送信する')
self.editHost = wx.TextCtrl(self.panel, wx.ID_ANY, u'hoge')
self.editPort = wx.TextCtrl(self.panel, wx.ID_ANY, u'hoge')
self.editTag = wx.TextCtrl(self.panel, wx.ID_ANY, u'hoge')
self.editUsername = wx.TextCtrl(self.panel, wx.ID_ANY, u'hoge')
try:
layout = wx.GridSizer(2, 4)
except:
layout = wx.GridSizer(2)
layout.Add(wx.StaticText(self.panel, wx.ID_ANY, u'ホスト'))
layout.Add(self.editHost)
layout.Add(wx.StaticText(self.panel, wx.ID_ANY, u'ポート'))
layout.Add(self.editPort)
layout.Add(wx.StaticText(self.panel, wx.ID_ANY, u'タグ'))
layout.Add(self.editTag)
layout.Add(wx.StaticText(self.panel, wx.ID_ANY, u'ユーザ名'))
layout.Add(self.editUsername)
self.layout.Add(self.checkEnable)
self.layout.Add(layout)
self.panel.SetSizer(self.layout)
##
# Log a record to Fluentd.
# @param self The Object Pointer.
# @param recordType Record Type (tag)
# @param record Record
#
def submit_record(self, recordType, record):
try:
from fluent import sender
from fluent import event
if self.host is None:
sender = sender.setup(self.tag)
else:
sender.setup(self.tag, host=self.host, port=self.port)
event.Event(recordType, record)
except:
print("Fluentd: Failed to submit a record")
##
# Generate a record for on_game_individual_result.
# @param self The Object Pointer.
# @param context IkaLog context
#
def get_record_game_individual_result(self, context):
map = IkaUtils.map2text(context['game']['map'])
rule = IkaUtils.rule2text(context['game']['rule'])
won = IkaUtils.getWinLoseText(
context['game']['won'], win_text="win", lose_text="lose", unknown_text="unknown")
return {
'username': self.username,
'map': map,
'rule': rule,
'result': won
}
##
# on_game_individual_result Hook
# @param self The Object Pointer
# @param context IkaLog context
#
def on_game_individual_result(self, context):
IkaUtils.dprint('%s (enabled = %s)' % (self, self.enabled))
if not self.enabled:
return
record = self.get_record_game_individual_result(context)
self.submit_record('gameresult', record)
##
# Check availability of modules this plugin depends on.
# @param self The Object Pointer.
#
def check_import(self):
try:
from fluent import sender
from fluent import event
except:
print("モジュール fluent-logger がロードできませんでした。 Fluentd 連携ができません。")
print("インストールするには以下のコマンドを利用してください。\n pip install fluent-logger\n")
##
# Constructor
# @param self The Object Pointer.
# @param tag tag
# @param username Username of the player.
# @param host Fluentd host if Fluentd is on a different node
# @param port Fluentd port
# @param username Name the bot use on Slack
#
def __init__(self, tag='ikalog', username='ika', host=None, port=24224):
self.enabled = False
self.tag = tag
self.username = username
self.host = host
self.port = port
self.check_import()
if __name__ == "__main__":
obj = Fluentd()
```
#### File: scenes/game/go_sign.py
```python
import sys
import cv2
from ikalog.utils import *
from ikalog.scenes.scene import Scene
class GameGoSign(Scene):
def reset(self):
super(GameGoSign, self).reset()
self._last_game_start_msec = - 100 * 1000
self._last_event_msec = - 100 * 1000
def match_no_cache(self, context):
if not self.is_another_scene_matched(context, 'GameTimerIcon'):
return False
frame = context['engine']['frame']
matched = self.mask_go_sign.match(frame)
if not matched:
return False
if not self.matched_in(context, 60 * 1000, attr='_last_game_start_msec'):
return False
if not self.matched_in(context, 60 * 1000, attr='_last_event_msec'):
self._call_plugins('on_game_go_sign')
self._last_event_msec = context['engine']['msec']
self._last_game_start_msec = -100 * 1000
return matched
def _analyze(self, context):
pass
def on_game_start(self, context):
self._last_game_start_msec = context['engine']['msec']
def _init_scene(self, debug=False):
self.mask_go_sign = IkaMatcher(
472, 140, 332, 139,
img_file='game_go_sign.png',
threshold=0.90,
orig_threshold=0.5,
label='Go!',
bg_method=matcher.MM_WHITE(sat=(32, 255), visibility=(0, 210)),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
if __name__ == "__main__":
GameGoSign.main_func()
```
#### File: scenes/game/inklings_tracker.py
```python
import sys
import cv2
import numpy as np
from ikalog.scenes.scene import Scene
from ikalog.utils import *
class InklingsTracker(Scene):
def lives(self, context):
if not context['engine']['inGame']:
return None, None
img = context['engine']['frame'][self.meter_top:self.meter_top +
self.meter_height, self.meter_left:self.meter_left + self.meter_width]
img_hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV)
img2 = cv2.resize(img, (self.meter_width, 100))
# for i in range(2):
# img2[20:40,:, i] = cv2.resize(img_hsv[:,:,0], (self.meter_width, 20))
# img2[40:60,:, i] = cv2.resize(img_hsv[:,:,1], (self.meter_width, 20))
# img2[60:80,:, i] = cv2.resize(img_hsv[:,:,2], (self.meter_width, 20))
#
# cv2.imshow('yagura', img2)
# cv2.imshow('yagura_hsv', cv2.resize(img_hsv, (self.meter_width, 100)))
# VS 文字の位置(白)を検出する (s が低く v が高い)
white_mask_s = cv2.inRange(img_hsv[:, :, 1], 0, 8)
white_mask_v = cv2.inRange(img_hsv[:, :, 2], 248, 256)
white_mask = np.minimum(white_mask_s, white_mask_v)
x_list = np.arange(self.meter_width)
vs_x = np.extract(white_mask > 128, x_list)
vs_xPos = np.average(vs_x) # VS があるX座標の中心がわかった
# print(vs_xPos)
# 明るい白以外を検出する (グレー画像から)
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
img_gray2 = cv2.resize(img_gray, (self.meter_width, 20))
img_gray3 = cv2.inRange(img_gray2, 48, 256)
team1 = []
team2 = []
# 左チーム
x = vs_xPos - 20
x2 = x
direction = -3
for i in range(4):
while img_gray3[0, x] < 128:
x2 = x
x = x + direction
while img_gray3[0, x] > 128:
x = x + direction
x1 = x
# プレイヤー画像は x1:x2 の間にある
squid_xPos = int((x1 + x2) / 2)
#print(x1, squid_xPos, x2)
team1.append(squid_xPos)
# 右チーム
x = vs_xPos + 20
x1 = x
direction = 3
for i in range(4):
while img_gray3[0, x] < 128:
x1 = x
x = x + direction
while img_gray3[0, x] > 128:
x = x + direction
x2 = x
# プレイヤー画像は x1:x2 の間にある
squid_xPos = int((x1 + x2) / 2)
#print(x1, squid_xPos, x2)
team2.append(squid_xPos)
team1 = np.sort(team1)
team2 = np.sort(team2)
# 目の部分が白かったら True なマスクをつくる
img_eye = context['engine']['frame'][
44:50, self.meter_left:self.meter_left + self.meter_width]
img_eye_hsv = cv2.cvtColor(img_eye, cv2.COLOR_BGR2HSV)
eye_white_mask_s = cv2.inRange(img_eye_hsv[:, :, 1], 0, 48)
eye_white_mask_v = cv2.inRange(img_eye_hsv[:, :, 2], 200, 256)
eye_white_mask = np.minimum(eye_white_mask_s, eye_white_mask_v)
a = []
team1_color = None
team2_color = None
for i in team1:
eye_score = np.sum(eye_white_mask[:, i - 4: i + 4]) / 255
alive = eye_score > 1
a.append(alive)
if alive:
team1_color = img[0, i] # BGR
team1_color_hsv = img_hsv[0, i]
cv2.rectangle(context['engine']['frame'], (self.meter_left +
i - 4, 44), (self.meter_left + i + 4, 50), (255, 255, 255), 1)
b = []
for i in team2:
eye_score = np.sum(eye_white_mask[:, i - 4: i + 4]) / 255
alive = eye_score > 1
b.append(alive)
if alive:
team2_color = img[0, i] # BGR
team2_color_hsv = img_hsv[0, i]
cv2.rectangle(context['engine']['frame'], (self.meter_left +
i - 4, 44), (self.meter_left + i + 4, 50), (255, 255, 255), 1)
# print("色: 味方 %d 敵 %d" % (team1_color, team2_color))
# print("味方 %s 敵 %s" % (a,b))
# cv2.imshow('yagura_gray', img_gray2)
# cv2.imshow('yagura_gray2', img_gray3)
# cv2.imshow('eyes', eye_white_mask)
hasTeamColor = ('team_color_bgr' in context['game'])
if (not team1_color is None) and (not team2_color is None) and not hasTeamColor:
context['game']['team_color_bgr'] = [
team1_color,
team2_color
]
context['game']['team_color_hsv'] = [
team1_color_hsv,
team2_color_hsv
]
callPlugins = context['engine']['service']['callPlugins']
callPlugins('on_game_team_color')
return (a, b)
def matchPaintScore(self, context):
x_list = [938, 988, 1032, 1079]
paint_score = 0
for x in x_list:
# Extract a digit.
img = context['engine']['frame'][33:33 + 41, x:x + 37, :]
# Check if the colr distribution in in expected range.
img_gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
hist = cv2.calcHist([img_gray], [0], None, [5], [0, 256])
try:
black_raito = hist[0] / np.sum(hist)
black_white_raito = (hist[0] + hist[4]) / np.sum(hist)
except ZeroDivisionError:
score = 0
if (black_raito < 0.5) or (0.8 < black_raito) or \
(black_white_raito < 0.8):
# Seems not to be a white character on black background.
return None
# Recoginize a digit.
digit = self.number_recoginizer.match_digits(
img,
num_digits=(1, 1),
char_width=(11, 40),
char_height=(28, 33),
)
if digit is None:
return None
paint_score = (paint_score * 10) + digit
# Set latest paint_score to the context.
context['game']['paint_score'] = \
max(context['game'].get('paint_score', 0), paint_score)
```
#### File: scenes/game/ranked_battle_events.py
```python
import sys
import cv2
from ikalog.utils import *
from ikalog.scenes.stateful_scene import StatefulScene
class GameRankedBattleEvents(StatefulScene):
def reset(self):
super(GameRankedBattleEvents, self).reset()
self._last_event_msec = - 100 * 1000
self._last_mask_matched = None
self._last_mask_triggered_msec = - 100 * 1000
def find_best_match(self, frame, matchers_list):
most_possible = (0, None)
for matcher in matchers_list:
matched, fg_score, bg_score = matcher.match_score(frame)
if matched and (most_possible[0] < fg_score):
most_possible = (fg_score, matcher)
return most_possible[1]
def on_game_reset(self, context):
self._masks_active = {}
def on_game_start(self, context):
rule_id = IkaUtils.rule2id(context['game']['rule'])
if rule_id == 'area':
self._masks_active = self._masks_splatzone.copy()
self._masks_active.update(self._masks_ranked)
elif rule_id == 'hoko':
self._masks_active = self._masks_rainmaker.copy()
self._masks_active.update(self._masks_ranked)
elif rule_id == 'yagura':
self._masks_active = self._masks_towercontrol.copy()
self._masks_active.update(self._masks_ranked)
else:
self._masks_active = {}
def _state_triggered(self, context):
frame = context['engine']['frame']
if frame is None:
return False
most_possible = self.find_best_match(
frame, list(self._masks_active.keys()))
if most_possible is None:
self._switch_state(self._state_default)
if most_possible != self._last_mask_matched:
IkaUtils.dprint('%s: matched %s' % (self, most_possible))
self._last_mask_matched = most_possible
# self._switch_state(self._state_pending)
return True
def _state_pending(self, context):
# if self.is_another_scene_matched(context, 'GameTimerIcon'):
# return False
frame = context['engine']['frame']
if frame is None:
return False
most_possible = self.find_best_match(
frame, list(self._masks_active.keys()))
if most_possible is None:
self._switch_state(self._state_default)
if most_possible != self._last_mask_matched:
self._last_mask_matched = most_possible
return True
# else: # if most_possbile == self._last_mask_matched:
# go through
# not self.matched_in(context, 3000, attr='_last_mask_triggered_msec'):
if 1:
event = self._masks_active[most_possible]
IkaUtils.dprint('%s: trigger an event %s' % (self, event))
self._call_plugins(event)
self._last_mask_triggered = most_possible
self._last_mask_triggered_msec = context['engine']['msec']
self._switch_state(self._state_triggered)
def _state_default(self, context):
if 0:
if self.is_another_scene_matched(context, 'GameTimerIcon'):
return False
frame = context['engine']['frame']
if frame is None:
return False
most_possible = self.find_best_match(
frame, list(self._masks_active.keys()))
if most_possible is None:
return False
# IkaUtils.dprint('%s: matched %s' % (self, most_possible))
self._last_mask_matched = most_possible
self._switch_state(self._state_pending)
return True
def _analyze(self, context):
pass
def _load_splatzone_masks(self, debug=False):
mask_we_got = IkaMatcher(
452, 177, 361, 39,
img_file='splatzone_we_got.png',
threshold=0.9,
orig_threshold=0.1,
label='splatzone/we_got',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_we_lost = IkaMatcher(
432, 176, 404, 40,
img_file='splatzone_we_lost.png',
threshold=0.9,
orig_threshold=0.1,
label='splatzone/we_lost',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_they_got = IkaMatcher(
452, 177, 361, 39,
img_file='splatzone_they_got.png',
threshold=0.9,
orig_threshold=0.1,
label='splatzone/they_got',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_they_lost = IkaMatcher(
432, 176, 404, 40,
img_file='splatzone_they_lost.png',
threshold=0.9,
orig_threshold=0.1,
label='splatzone/they_lost',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
self._masks_splatzone = {
mask_we_got: 'on_game_splatzone_we_got',
mask_we_lost: 'on_game_splatzone_we_lost',
mask_they_got: 'on_game_splatzone_they_got',
mask_they_lost: 'on_game_splatzone_they_lost',
}
def _load_rainmaker_masks(self, debug=False):
mask_we_got = IkaMatcher(
452, 177, 361, 39,
img_file='rainmaker_we_got.png',
threshold=0.9,
orig_threshold=0.1,
label='rainmaker/we_got',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_we_lost = IkaMatcher(
432, 176, 404, 40,
img_file='rainmaker_we_lost.png',
threshold=0.9,
orig_threshold=0.1,
label='rainmaker/we_lost',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_they_got = IkaMatcher(
452, 177, 361, 39,
img_file='rainmaker_they_got.png',
threshold=0.9,
orig_threshold=0.1,
label='rainmaker/they_got',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_they_lost = IkaMatcher(
432, 176, 404, 40,
img_file='rainmaker_they_lost.png',
threshold=0.9,
orig_threshold=0.1,
label='rainmaker/they_lost',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
self._masks_rainmaker = {
mask_we_got: 'on_game_rainmaker_we_got',
mask_we_lost: 'on_game_rainmaker_we_lost',
mask_they_got: 'on_game_rainmaker_they_got',
mask_they_lost: 'on_game_rainmaker_they_lost',
}
def _load_towercontrol_masks(self, debug=False):
mask_we_took = IkaMatcher(
452, 177, 361, 39,
img_file='towercontrol_we_took.png',
threshold=0.9,
orig_threshold=0.1,
label='towercontrol/we_took',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_we_lost = IkaMatcher(
432, 176, 404, 40,
img_file='towercontrol_we_lost.png',
threshold=0.9,
orig_threshold=0.1,
label='towercontrol/we_lost',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_they_took = IkaMatcher(
452, 177, 361, 39,
img_file='towercontrol_they_took.png',
threshold=0.9,
orig_threshold=0.1,
label='towercontrol/they_took',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
mask_they_lost = IkaMatcher(
432, 176, 404, 40,
img_file='towercontrol_they_lost.png',
threshold=0.9,
orig_threshold=0.1,
label='towercontrol/they_lost',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
self._masks_towercontrol = {
mask_we_took: 'on_game_towercontrol_we_took',
mask_we_lost: 'on_game_towercontrol_we_lost',
mask_they_took: 'on_game_towercontrol_they_took',
mask_they_lost: 'on_game_towercontrol_they_lost',
}
def _init_scene(self, debug=False):
self._masks_active = {}
self._load_rainmaker_masks(debug=debug)
self._load_splatzone_masks(debug=debug)
self._load_towercontrol_masks(debug=debug)
self.mask_we_lead = IkaMatcher(
473, 173, 322, 40,
img_file='ranked_we_lead.png',
threshold=0.9,
orig_threshold=0.1,
label='splatzone/we_lead',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
self.mask_they_lead = IkaMatcher(
473, 173, 322, 40,
img_file='ranked_they_lead.png',
threshold=0.9,
orig_threshold=0.1,
label='splatzone/they_lead',
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
debug=debug,
)
self._masks_ranked = {
self.mask_we_lead: 'on_game_ranked_we_lead',
self.mask_they_lead: 'on_game_ranked_they_lead',
}
if __name__ == "__main__":
GameRankedBattleEvents.main_func()
```
#### File: ikalog/scenes/result_detail.py
```python
import copy
import datetime
import os
import pickle
import re
import sys
import threading
import traceback
from datetime import datetime
import cv2
import numpy as np
from ikalog.api import APIClient
from ikalog.scenes.stateful_scene import StatefulScene
from ikalog.utils import *
from ikalog.inputs.filters import OffsetFilter
class ResultDetail(StatefulScene):
#
# AKAZE ベースのオフセット/サイズ調整
#
def result_detail_normalizer(self, img):
# キーポイントとして不要な部分を削除
img = copy.deepcopy(img)
cv2.rectangle(img, (0, 000), (680, 720), (0, 0, 0), -1)
# 特徴画像の生成
white_filter = matcher.MM_WHITE()
dark_filter = matcher.MM_DARK(visibility=(0, 16))
img_w = white_filter.evaluate(img)
img_dark = 255 - dark_filter.evaluate(img)
img_features = img_dark + img_w
img_features[:, 1000:1280] = \
img_dark[:, 1000:1280] - img_w[:, 1000:1280]
# cv2.imshow('features', img_features)
# cv2.waitKey(10000)
return img_features
def get_keypoints(self, img):
detector = cv2.AKAZE_create()
keypoints, descriptors = detector.detectAndCompute(
img,
None,
)
return keypoints, descriptors
def filter_matches(self, kp1, kp2, matches, ratio=0.75):
mkp1, mkp2 = [], []
for m in matches:
if len(m) == 2 and m[0].distance < m[1].distance * ratio:
m = m[0]
mkp1.append(kp1[m.queryIdx])
mkp2.append(kp2[m.trainIdx])
p1 = np.float32([kp.pt for kp in mkp1])
p2 = np.float32([kp.pt for kp in mkp2])
kp_pairs = zip(mkp1, mkp2)
return p1, p2, kp_pairs
def tuples_to_keypoints(self, tuples):
new_l = []
for point in tuples:
pt, size, angle, response, octave, class_id = point
new_l.append(cv2.KeyPoint(
pt[0], pt[1], size, angle, response, octave, class_id))
return new_l
def keypoints_to_tuples(self, points):
new_l = []
for point in points:
new_l.append((point.pt, point.size, point.angle, point.response, point.octave,
point.class_id))
return new_l
def load_model_from_file(self, filename):
f = open(filename, 'rb')
l = pickle.load(f)
f.close()
self.ref_image_geometry = l[0]
self.ref_keypoints = self.tuples_to_keypoints(l[1])
self.ref_descriptors = l[2]
def save_model_to_file(self, filename):
f = open(filename, 'wb')
pickle.dump([
self.ref_image_geometry,
self.keypoints_to_tuples(self.ref_keypoints),
self.ref_descriptors,
], f)
f.close()
def rebuild_model(self, dest_filename, src_filename=None, img=None, normalizer_func=None):
if img is None:
img = cv2.imread(src_filename, 0)
assert img is not None
if normalizer_func is not None:
img = normalizer_func(img)
assert img is not None
self.ref_keypoints, self.ref_descriptors = \
self.get_keypoints(img)
self.ref_image_geometry = img.shape[:2]
self.save_model_to_file(dest_filename)
IkaUtils.dprint('%s: Created model data %s' % (self, dest_filename))
def load_akaze_model(self):
model_filename = os.path.join(
IkaUtils.baseDirectory(), 'data', 'result_detail_features.akaze.model')
try:
self.load_model_from_file(model_filename)
if self.ref_keypoints == None:
raise
except:
IkaUtils.dprint(
'%s: Failed to load akaze model. trying to rebuild...' % self)
self.rebuild_model(
model_filename,
img=cv2.imread('data/result_detail_features.png'),
normalizer_func=self.result_detail_normalizer
)
self.load_model_from_file(model_filename)
def auto_warp(self, context):
# 画面のオフセットを自動検出して image を返す (AKAZE利用)
frame = context['engine'].get('frame', None)
if frame is None:
return None
keypoints, descs = self.get_keypoints(
self.result_detail_normalizer(frame))
matcher = cv2.BFMatcher(cv2.NORM_HAMMING)
raw_matches = matcher.knnMatch(
descs,
trainDescriptors=self.ref_descriptors,
k=2
)
p2, p1, kp_pairs = self.filter_matches(
keypoints,
self.ref_keypoints,
raw_matches,
)
if len(p1) >= 4:
H, status = cv2.findHomography(p1, p2, cv2.RANSAC, 5.0)
print('%d / %d inliers/matched' % (np.sum(status), len(status)))
else:
H, status = None, None
print('%d matches found, not enough for homography estimation' % len(p1))
raise
w = 1280
h = 720
corners = np.float32([[0, 0], [w, 0], [w, h], [0, h]])
pts2 = np.float32([[0, 0], [w, 0], [w, h], [0, h]])
pts1 = np.float32(cv2.perspectiveTransform(
corners.reshape(1, -1, 2), H).reshape(-1, 2) + (0, 0))
M = cv2.getPerspectiveTransform(pts1, pts2)
# out = cv2.drawKeypoints(img2, keypoints1, None)
new_frame = cv2.warpPerspective(frame, M, (w, h))
# 変形した画像がマスクと一致するか?
matched = IkaUtils.matchWithMask(
new_frame, self.winlose_gray, 0.997, 0.22)
if matched:
return new_frame
IkaUtils.dprint('%s: auto_warp() function broke the image.' % self)
return None
def auto_offset(self, context):
# 画面のオフセットを自動検出して image を返す
filter = OffsetFilter(self)
filter.enable()
# filter が必要とするので...
self.out_width = 1280
self.out_height = 720
best_match = (context['engine']['frame'], 0.0, 0, 0)
offset_list = [0, -5, -4, -3, -2, -1, 1, 2, 3, 4, 5]
for ox in offset_list:
for oy in offset_list:
filter.offset = (ox, oy)
img = filter.execute(context['engine']['frame'])
IkaUtils.matchWithMask(
context['engine']['frame'], self.winlose_gray, 0.997, 0.22)
score = self.mask_win.match_score(img)
if not score[0]:
continue
if best_match[1] < score[1]:
best_match = (img, score[1], ox, oy)
if best_match[2] != 0 or best_match[3] != 0:
IkaUtils.dprint('%s: Offset detected. (%d, %d)' %
(self, best_match[2], best_match[3]))
return best_match[0]
def async_recoginiton_worker(self, context):
IkaUtils.dprint('%s: weapons recoginition started.' % self)
weapons_list = []
for player in context['game']['players']:
weapons_list.append(player.get('img_weapon', None))
# local
try:
if self._client_local is not None:
weapon_response_list = self._client_local.recoginize_weapons(
weapons_list)
for entry_id in range(len(weapon_response_list)):
context['game']['players'][entry_id]['weapon'] = \
weapon_response_list[entry_id]
except:
IkaUtils.dprint('Exception occured in weapon recoginization.')
IkaUtils.dprint(traceback.format_exc())
# remote
try:
if self._client_remote is not None:
weapon_response_list = self._client_remote.recoginize_weapons(
weapons_list)
for entry_id in range(len(weapon_response_list)):
context['game']['players'][entry_id]['weapon'] = \
weapon_response_list[entry_id]
except:
IkaUtils.dprint('Exception occured in weapon recoginization.')
IkaUtils.dprint(traceback.format_exc())
IkaUtils.dprint('%s: weapons recoginition done.' % self)
self._call_plugins_later('on_result_detail')
self._call_plugins_later('on_game_individual_result')
def is_entry_me(self, img_entry):
# ヒストグラムから、入力エントリが自分かを判断
if len(img_entry.shape) > 2 and img_entry.shape[2] != 1:
img_me = cv2.cvtColor(img_entry[:, 0:43], cv2.COLOR_BGR2GRAY)
else:
img_me = img_entry[:, 0:43]
img_me = cv2.threshold(img_me, 230, 255, cv2.THRESH_BINARY)[1]
me_score = np.sum(img_me)
me_score_normalized = 0
try:
me_score_normalized = me_score / (43 * 45 * 255 / 10)
except ZeroDivisionError as e:
me_score_normalized = 0
#print("score=%3.3f" % me_score_normalized)
return (me_score_normalized > 1)
def guess_fest_title_ja(self, img_fest_title):
img_fest_title_hsv = cv2.cvtColor(img_fest_title, cv2.COLOR_BGR2HSV)
yellow = cv2.inRange(img_fest_title_hsv[:, :, 0], 32 - 2, 32 + 2)
yellow2 = cv2.inRange(img_fest_title_hsv[:, :, 2], 240, 255)
img_fest_title_mask = np.minimum(yellow, yellow2)
is_fes = np.sum(img_fest_title_mask) > img_fest_title_mask.shape[
0] * img_fest_title_mask.shape[1] * 16
# 文字と判断したところを 1 にして縦に足し算
img_fest_title_hist = np.sum(
img_fest_title_mask / 255, axis=0) # 列毎の検出dot数
a = np.array(range(len(img_fest_title_hist)), dtype=np.int32)
b = np.extract(img_fest_title_hist > 0, a)
x1 = np.amin(b)
x2 = np.amax(b)
if (x2 - x1) < 4:
return None, None, None
# 最小枠で crop
img_fest_title_new = img_fest_title[:, x1:x2]
# ボーイ/ガールは経験上横幅 56 ドット
gender_x1 = x2 - 36
gender_x2 = x2
img_fest_gender = img_fest_title_mask[:, gender_x1:gender_x2]
# ふつうの/まことの/スーパー/カリスマ/えいえん
img_fest_level = img_fest_title_mask[:, 0:52]
try:
if self.fest_gender_recoginizer:
gender = self.fest_gender_recoginizer.match(
cv2.cvtColor(img_fest_gender, cv2.COLOR_GRAY2BGR))
except:
IkaUtils.dprint(traceback.format_exc())
gender = None
try:
if self.fest_level_recoginizer:
level = self.fest_level_recoginizer.match(
cv2.cvtColor(img_fest_level, cv2.COLOR_GRAY2BGR))
except:
IkaUtils.dprint(traceback.format_exc())
level = None
team = None
return gender, level, team
def guess_fest_title_en_NA(self, img_fest_title):
IkaUtils.dprint(
'%s: Fest recoginiton in this language is not implemented'
% self
)
return None, None, None
def guess_fest_title_en_UK(self, img_fest_title):
IkaUtils.dprint(
'%s: Fest recoginiton in this language is not implemented'
% self
)
return None, None, None
def guess_fest_title(self, img_fest_title):
guess_fest_title_funcs = {
'ja': self.guess_fest_title_ja,
'en_NA': self.guess_fest_title_en_NA,
'en_UK': self.guess_fest_title_en_UK,
}
func = None
for lang in Localization.get_game_languages():
func = guess_fest_title_funcs.get(lang, None)
if func is not None:
break
if func is None:
IkaUtils.dprint(
'%s: Fest recoginiton in this language is not implemented'
% self
)
return None
return func(img_fest_title)
def analyze_team_colors(self, context, img):
# スクリーンショットからチームカラーを推測
assert 'won' in context['game']
assert img is not None
if context['game']['won']:
my_team_color_bgr = img[115:116, 1228:1229]
counter_team_color_bgr = img[452:453, 1228:1229]
else:
counter_team_color_bgr = img[115:116, 1228:1229]
my_team_color_bgr = img[452:453, 1228:1229]
my_team_color = {
'rgb': cv2.cvtColor(my_team_color_bgr, cv2.COLOR_BGR2RGB).tolist()[0][0],
'hsv': cv2.cvtColor(my_team_color_bgr, cv2.COLOR_BGR2HSV).tolist()[0][0],
}
counter_team_color = {
'rgb': cv2.cvtColor(counter_team_color_bgr, cv2.COLOR_BGR2RGB).tolist()[0][0],
'hsv': cv2.cvtColor(counter_team_color_bgr, cv2.COLOR_BGR2HSV).tolist()[0][0],
}
return (my_team_color, counter_team_color)
def analyze_entry(self, img_entry):
# 各プレイヤー情報のスタート左位置
entry_left = 610
# 各プレイヤー報の横幅
entry_width = 610
# 各プレイヤー情報の高さ
entry_height = 46
# 各エントリ内での名前スタート位置と長さ
entry_xoffset_weapon = 760 - entry_left
entry_xoffset_weapon_me = 719 - entry_left
entry_width_weapon = 47
entry_xoffset_name = 809 - entry_left
entry_xoffset_name_me = 770 - entry_left
entry_width_name = 180
entry_xoffset_nawabari_score = 995 - entry_left
entry_width_nawabari_score = 115
entry_xoffset_score_p = entry_xoffset_nawabari_score + entry_width_nawabari_score
entry_width_score_p = 20
entry_xoffset_kd = 1185 - entry_left
entry_width_kd = 31
entry_height_kd = 21
me = self.is_entry_me(img_entry)
if me:
weapon_left = entry_xoffset_weapon_me
name_left = entry_xoffset_name_me
rank_left = 2
else:
weapon_left = entry_xoffset_weapon
name_left = entry_xoffset_name
rank_left = 43
img_rank = img_entry[20:45, rank_left:rank_left + 43]
img_weapon = img_entry[:, weapon_left:weapon_left + entry_width_weapon]
img_name = img_entry[:, name_left:name_left + entry_width_name]
img_score = img_entry[
:, entry_xoffset_nawabari_score:entry_xoffset_nawabari_score + entry_width_nawabari_score]
img_score_p = img_entry[
:, entry_xoffset_score_p:entry_xoffset_score_p + entry_width_score_p]
ret, img_score_p_thresh = cv2.threshold(cv2.cvtColor(
img_score_p, cv2.COLOR_BGR2GRAY), 230, 255, cv2.THRESH_BINARY)
img_kills = img_entry[0:entry_height_kd,
entry_xoffset_kd:entry_xoffset_kd + entry_width_kd]
img_deaths = img_entry[entry_height_kd:entry_height_kd *
2, entry_xoffset_kd:entry_xoffset_kd + entry_width_kd]
img_fes_title = img_name[0:entry_height / 2, :]
img_fes_title_hsv = cv2.cvtColor(img_fes_title, cv2.COLOR_BGR2HSV)
yellow = cv2.inRange(img_fes_title_hsv[:, :, 0], 32 - 2, 32 + 2)
yellow2 = cv2.inRange(img_fes_title_hsv[:, :, 2], 240, 255)
img_fes_title_mask = np.minimum(yellow, yellow2)
is_fes = np.sum(img_fes_title_mask) > img_fes_title_mask.shape[
0] * img_fes_title_mask.shape[1] * 16
if is_fes:
fes_gender, fes_level, fes_team = self.guess_fest_title(
img_fes_title
)
# フェス中ではなく、 p の表示があれば(avg = 55.0) ナワバリ。なければガチバトル
isRankedBattle = (not is_fes) and (
np.average(img_score_p_thresh[:, :]) < 16)
isNawabariBattle = (not is_fes) and (not isRankedBattle)
entry = {
"me": me,
"img_rank": img_rank,
"img_weapon": img_weapon,
"img_name": img_name,
"img_score": img_score,
"img_kills": img_kills,
"img_deaths": img_deaths,
}
if is_fes:
entry['img_fes_title'] = img_fes_title
if fes_gender and ('ja' in fes_gender):
entry['gender'] = fes_gender['ja']
if fes_level and ('ja' in fes_level):
entry['prefix'] = fes_level['ja']
if fes_gender and ('en' in fes_gender):
entry['gender_en'] = fes_gender['en']
if fes_level and ('boy' in fes_level):
entry['prefix_en'] = fes_level['boy']
if self.udemae_recoginizer and isRankedBattle:
try:
entry['udemae_pre'] = self.udemae_recoginizer.match(
entry['img_score']).upper()
except:
IkaUtils.dprint('Exception occured in Udemae recoginization.')
IkaUtils.dprint(traceback.format_exc())
if self.number_recoginizer:
try:
entry['rank'] = self.number_recoginizer.match_digits(
entry['img_rank'])
entry['kills'] = self.number_recoginizer.match_digits(
entry['img_kills'])
entry['deaths'] = self.number_recoginizer.match_digits(
entry['img_deaths'])
if isNawabariBattle:
entry['score'] = self.number_recoginizer.match_digits(
entry['img_score'])
except:
IkaUtils.dprint('Exception occured in K/D recoginization.')
IkaUtils.dprint(traceback.format_exc())
return entry
def analyze(self, context):
# 各プレイヤー情報のスタート左位置
entry_left = 610
# 各プレイヤー情報の横幅
entry_width = 610
# 各プレイヤー情報の高さ
entry_height = 45
entry_top = [101, 166, 231, 296, 431, 496, 561, 626]
# auto_warp() or auto_offset() で画面位置の調整
# img = self.auto_warp(context)
img = None
if img is not None:
matched = IkaUtils.matchWithMask(
img,
self.winlose_gray, 0, 9969, 0.22
)
if not matched:
img = None
if img is None:
# IkaUtils.dprint('%s: Falling back to auto_offset()' % self)
img = self.auto_offset(context)
# インクリング一覧
context['game']['players'] = []
weapon_list = []
entry_id = 0
for entry_id in range(len(entry_top)): # 0..7
top = entry_top[entry_id]
img_entry = img[top:top + entry_height,
entry_left:entry_left + entry_width]
e = self.analyze_entry(img_entry)
if e.get('rank', None) is None:
continue
# team, rank_in_team
e['team'] = 1 if entry_id < 4 else 2
e['rank_in_team'] = entry_id + \
1 if e['team'] == 1 else entry_id - 3
# won
if e['me']:
context['game']['won'] = (entry_id < 4)
context['game']['players'].append(e)
if 0:
e_ = e.copy()
for f in list(e.keys()):
if f.startswith('img_'):
del e_[f]
print(e_)
if 0:
worker = threading.Thread(
target=self.async_recoginiton_worker, args=(context,))
worker.start()
else:
self.async_recoginiton_worker(context)
# チームカラー
team_colors = self.analyze_team_colors(context, img)
context['game']['my_team_color'] = team_colors[0]
context['game']['counter_team_color'] = team_colors[1]
# フェス関係
context['game']['is_fes'] = ('prefix' in context['game']['players'][0])
# そのほか
# context['game']['timestamp'] = datetime.now()
self._call_plugins_later('on_result_detail_still')
return True
def reset(self):
super(ResultDetail, self).reset()
self._last_event_msec = - 100 * 1000
self._match_start_msec = - 100 * 1000
self._last_frame = None
self._diff_pixels = []
def _state_default(self, context):
if self.matched_in(context, 30 * 1000):
return False
if self.is_another_scene_matched(context, 'GameTimerIcon'):
return False
frame = context['engine']['frame']
if frame is None:
return False
matched = IkaUtils.matchWithMask(
context['engine']['frame'], self.winlose_gray, 0.997, 0.22)
if matched:
self._match_start_msec = context['engine']['msec']
self._switch_state(self._state_tracking)
return matched
def _state_tracking(self, context):
frame = context['engine']['frame']
if frame is None:
return False
# マッチ1: 既知のマスクでざっくり
matched = IkaUtils.matchWithMask(
context['engine']['frame'], self.winlose_gray, 0.997, 0.22)
# マッチ2: マッチ1を満たした場合は、白文字が安定するまで待つ
# 条件1: 前回のイメージとの白文字の diff が 0 pixel になること
# 条件2: 過去n回文の白文字の diff が <10 pixels になること
# (ノイズが多いキャプチャデバイス向けの救済策)
diff_pixels = None
img_current_h_i16 = None
matched_diff0 = False
matched_diff10 = False
if matched:
img_current_bgr = frame[626:626 + 45, 640:1280]
img_current_hsv = cv2.cvtColor(img_current_bgr, cv2.COLOR_BGR2HSV)
img_current_h_i16 = np.array(img_current_hsv[:, :, 1], np.int16)
if matched and (self._last_frame is not None):
img_diff = abs(img_current_h_i16 - self._last_frame)
img_diff_u8 = np.array(img_diff, np.uint8)
img_white = self._white_filter.evaluate(img_current_bgr)
img_diff_u8[img_white < 128] = 0
img_diff_u8[img_diff_u8 < 16] = 0
img_diff_u8[img_diff_u8 > 1] = 255
# cv2.imshow('DIFF', img_diff_u8)
# cv2.imshow('white', img_white)
diff_pixels = int(np.sum(img_diff_u8) / 255)
if img_current_h_i16 is not None:
self._last_frame = img_current_h_i16
if diff_pixels is not None:
matched_diff0 = (diff_pixels == 0)
self._diff_pixels.append(diff_pixels)
if len(self._diff_pixels) > 4:
self._diff_pixels.pop(0)
matched_diff10 = np.max(self._diff_pixels) < 10
# print('img_diff_pixels', diff_pixels, self._diff_pixels, matched_diff0, matched_diff10)
# escaped: 1000ms 以上の非マッチが続きシーンを抜けたことが確定
# matched2: 白文字が安定している(条件1 or 条件2を満たしている)
# triggered: すでに一定時間以内にイベントが取りがされた
escaped = not self.matched_in(context, 1000)
matched2 = matched_diff0 or matched_diff10
triggered = self.matched_in(
context, 30 * 1000, attr='_last_event_msec')
if matched2 and (not triggered):
self.analyze(context)
# self.dump(context)
# self._call_plugins('on_result_detail')
# self._call_plugins('on_game_individual_result')
self._last_event_msec = context['engine']['msec']
triggered = True
if matched:
return True
if escaped:
if (not triggered) and (len(self._diff_pixels) > 0):
IkaUtils.dprint(''.join((
'%s: 戦績画面を検出しましたが静止画を認識できませんでした。考えられる原因\n' % self,
' ・HDMIキャプチャデバイスからのノイズ入力が多い\n',
' ・ブロックノイズが多いビデオファイルを処理している\n',
' ・正しいフォーマットで画像が入力されていない\n',
' min(diff_pixels): %s' % min(self._diff_pixels),
)))
self._match_start_msec = - 100 * 1000
self._last_frame = None
self._diff_pixels = []
self._switch_state(self._state_default)
return False
def dump(self, context):
matched = True
analyzed = True
won = IkaUtils.getWinLoseText(
context['game']['won'], win_text="win", lose_text="lose", unknown_text="unknown")
fes = context['game']['is_fes']
print("matched %s analyzed %s result %s fest %s" %
(matched, analyzed, won, fes))
print('--------')
for e in context['game']['players']:
udemae = e['udemae_pre'] if ('udemae_pre' in e) else None
rank = e['rank'] if ('rank' in e) else None
kills = e['kills'] if ('kills' in e) else None
deaths = e['deaths'] if ('deaths' in e) else None
weapon = e['weapon'] if ('weapon' in e) else None
score = e['score'] if ('score' in e) else None
me = '*' if e['me'] else ''
if 'prefix' in e:
prefix = e['prefix']
prefix_ = re.sub('の', '', prefix)
gender = e['gender']
else:
prefix_ = ''
gender = ''
print("team %s rank_in_team %s rank %s udemae %s %s/%s weapon %s score %s %s%s %s" % (
e.get('team', None),
e.get('rank_in_team', None),
e.get('rank', None),
e.get('udemae_pre', None),
e.get('kills', None),
e.get('deaths', None),
e.get('weapon', None),
e.get('score', None),
prefix_, gender,
me,))
print('--------')
def _analyze(self, context):
frame = context['engine']['frame']
return True
def _init_scene(self, debug=False):
self.mask_win = IkaMatcher(
651, 47, 99, 33,
img_file='result_detail.png',
threshold=0.950,
orig_threshold=0.05,
bg_method=matcher.MM_NOT_WHITE(),
fg_method=matcher.MM_WHITE(),
label='result_detail:WIN',
debug=debug,
)
base_dir = IkaUtils.baseDirectory()
languages = Localization.get_game_languages()
for lang in languages:
mask_file = os.path.join(base_dir, 'masks', lang, 'result_detail.png')
if os.path.exists(mask_file):
break
if not os.path.exists(mask_file):
mask_file = os.path.join(base_dir, 'masks', 'result_detail.png')
winlose = cv2.imread(mask_file)
self.winlose_gray = cv2.cvtColor(winlose, cv2.COLOR_BGR2GRAY)
self._white_filter = matcher.MM_WHITE()
self.udemae_recoginizer = UdemaeRecoginizer()
self.number_recoginizer = NumberRecoginizer()
# for SplatFest (ja)
self.fest_gender_recoginizer = character_recoginizer.FesGenderRecoginizer()
self.fest_level_recoginizer = character_recoginizer.FesLevelRecoginizer()
self.load_akaze_model()
self._client_local = APIClient(local_mode=True)
# APIClient(local_mode=False, base_uri='http://localhost:8000')
self._client_remote = None
if __name__ == "__main__":
ResultDetail.main_func()
```
#### File: ikalog/ui/capture.py
```python
import gettext
from ikalog import inputs
from ikalog.utils import *
import wx
t = gettext.translation('IkaUI', 'locale', fallback=True)
_ = t.gettext
class VideoCapture(object):
# アマレコTV のキャプチャデバイス名
DEV_AMAREC = "AmaRec Video Capture"
source = 'amarec'
source_device = None
deinterlace = False
File = ''
def read(self):
if self.capture is None:
return None, None
r = self.capture.read()
return r
def start_recorded_file(self, file):
IkaUtils.dprint(
'%s: initalizing pre-recorded video file %s' % (self, file))
self.realtime = False
self.from_file = True
self.capture.init_capture(file)
self.fps = self.capture.cap.get(5)
def enumerate_devices(self):
if IkaUtils.isWindows():
from ikalog.inputs.win.videoinput_wrapper import VideoInputWrapper
return VideoInputWrapper().get_device_list()
else:
return [
'IkaLog does not support camera enumeration on this platform.',
'IkaLog does not support camera enumeration on this platform.',
'IkaLog does not support camera enumeration on this platform.',
]
def initialize_input(self):
print('----------------')
print(self.source, self.source_device)
if self.source == 'dshow_capture':
self.capture = inputs.DirectShow()
self.capture.start_camera(self.source_device)
elif self.source == 'opencv_capture':
if IkaUtils.isWindows():
self.capture = inputs.CVCapture()
self.capture.start_camera(self.source_device)
else: # FIXME
self.capture = inputs.AVFoundationCapture()
elif self.source == 'screen':
self.capture = inputs.ScreenCapture()
self.capture.calibrate()
elif self.source == 'amarec':
self.capture = inputs.CVCapture()
self.capture.start_camera(self.DEV_AMAREC)
elif self.source == 'file':
self.capture = inputs.CVFile()
self.start_recorded_file(self.File)
# ToDo reset context['engine']['msec']
success = True
print(self.capture)
return success
def apply_ui(self):
self.source = ''
for control in [self.radioAmarecTV, self.radio_dshow_capture, self.radio_opencv_capture, self.radioScreen, self.radioFile]:
if control.GetValue():
self.source = {
self.radioAmarecTV: 'amarec',
self.radio_dshow_capture: 'dshow_capture',
self.radio_opencv_capture: 'opencv_capture',
self.radioFile: 'file',
self.radioScreen: 'screen',
}[control]
self.source_device = \
self.listCameras.GetItems()[self.listCameras.GetSelection()]
print('source_device = ', self.source_device)
self.File = self.editFile.GetValue()
self.deinterlace = self.checkDeinterlace.GetValue()
# この関数は GUI 動作時にしか呼ばれない。カメラが開けなかった
# 場合にメッセージを出す
if not self.initialize_input():
r = wx.MessageDialog(None,
_('Failed to intialize the capture source. Review your configuration'),
_('Eroor'),
wx.OK | wx.ICON_ERROR).ShowModal()
IkaUtils.dprint(
"%s: failed to activate input source >>>>" % (self))
else:
IkaUtils.dprint("%s: activated new input source" % self)
def refresh_ui(self):
if self.source == 'amarec':
self.radioAmarecTV.SetValue(True)
if self.source == 'dshow_capture':
self.radio_dshow_capture.SetValue(True)
if self.source == 'opencv_capture':
self.radio_opencv_capture.SetValue(True)
if self.source == 'camera': # Legacy
self.radio_opencv_capture.SetValue(True)
if self.source == 'screen':
self.radioScreen.SetValue(True)
if self.source == 'file':
self.radioFile.SetValue(True)
try:
dev = self.source_device
index = self.listCameras.GetItems().index(dev)
self.listCameras.SetSelection(index)
except:
IkaUtils.dprint('Current configured device is not in list')
if not self.File is None:
self.editFile.SetValue('')
else:
self.editFile.SetValue(self.File)
self.checkDeinterlace.SetValue(self.deinterlace)
def on_config_reset(self, context=None):
# さすがにカメラはリセットしたくないな
pass
def on_config_load_from_context(self, context):
self.on_config_reset(context)
try:
conf = context['config']['cvcapture']
except:
conf = {}
self.source = ''
try:
if conf['Source'] in ['dshow_capture', 'opencv_capture', 'camera', 'file', 'amarec', 'screen']:
self.source = conf['Source']
if conf['Source'] == 'camera': # Legacy
self.source = 'opencv_capture'
except:
pass
if 'SourceDevice' in conf:
try:
self.source_device = conf['SourceDevice']
except:
# FIXME
self.source_device = 0
if 'File' in conf:
self.File = conf['File']
if 'Deinterlace' in conf:
self.deinterlace = conf['Deinterlace']
self.refresh_ui()
return self.initialize_input()
def on_config_save_to_context(self, context):
context['config']['cvcapture'] = {
'Source': self.source,
'File': self.File,
'SourceDevice': self.source_device,
'Deinterlace': self.deinterlace,
}
def on_config_apply(self, context):
self.apply_ui()
def on_reload_devices_button_click(self, event=None):
pass
def on_calibrate_screen_button_click(self, event=None):
if (self.capture is not None) and self.capture.__class__.__name__ == 'ScreenCapture':
img = self.capture.read_raw()
if img is not None:
self.capture.auto_calibrate(img)
def on_screen_reset_button_click(self, event=None):
if (self.capture is not None) and self.capture.__class__.__name__ == 'ScreenCapture':
self.capture.reset()
def on_option_tab_create(self, notebook):
is_windows = IkaUtils.isWindows()
self.panel = wx.Panel(notebook, wx.ID_ANY)
self.page = notebook.InsertPage(0, self.panel, 'Input')
cameras = self.enumerate_devices()
self.layout = wx.BoxSizer(wx.VERTICAL)
self.panel.SetSizer(self.layout)
self.radioAmarecTV = wx.RadioButton(
self.panel, wx.ID_ANY, _('Capture through AmarecTV'))
self.radio_dshow_capture = wx.RadioButton(
self.panel, wx.ID_ANY,
_('HDMI Video input (DirectShow, recommended)')
)
self.radio_opencv_capture = wx.RadioButton(
self.panel, wx.ID_ANY,
_('HDMI Video input (OpenCV driver)')
)
self.radioScreen = wx.RadioButton(
self.panel, wx.ID_ANY, _('Realtime Capture from desktop'))
self.buttonCalibrateDesktop = wx.Button(
self.panel, wx.ID_ANY, _('Calibrate'))
self.buttonEntireDesktop = wx.Button(
self.panel, wx.ID_ANY, _('Reset'))
self.radioFile = wx.RadioButton(
self.panel, wx.ID_ANY, _('Read from pre-recorded video file (for testing)'))
self.editFile = wx.TextCtrl(self.panel, wx.ID_ANY, u'hoge')
self.listCameras = wx.ListBox(self.panel, wx.ID_ANY, choices=cameras)
self.listCameras.SetSelection(0)
self.buttonReloadDevices = wx.Button(
self.panel, wx.ID_ANY, _('Reload Devices'))
self.checkDeinterlace = wx.CheckBox(
self.panel, wx.ID_ANY, _('Enable Deinterlacing (experimental)'))
self.layout.Add(wx.StaticText(
self.panel, wx.ID_ANY, _('Select Input source:')))
self.layout.Add(self.radioAmarecTV)
self.layout.Add(self.radio_dshow_capture)
self.layout.Add(self.radio_opencv_capture)
self.layout.Add(self.listCameras, flag=wx.EXPAND)
self.layout.Add(self.buttonReloadDevices)
self.layout.Add(self.radioScreen)
buttons_layout = wx.BoxSizer(wx.HORIZONTAL)
buttons_layout.Add(self.buttonCalibrateDesktop)
buttons_layout.Add(self.buttonEntireDesktop)
self.layout.Add(buttons_layout)
self.layout.Add(self.radioFile)
self.layout.Add(self.editFile, flag=wx.EXPAND)
self.layout.Add(self.checkDeinterlace)
if is_windows:
self.radioAmarecTV.SetValue(True)
else:
self.radio_dshow_capture.Disable()
self.radioAmarecTV.Disable()
self.radioScreen.Disable()
self.buttonCalibrateDesktop.Disable()
self.radio_opencv_capture.SetValue(True)
self.buttonReloadDevices.Bind(
wx.EVT_BUTTON, self.on_reload_devices_button_click)
self.buttonCalibrateDesktop.Bind(
wx.EVT_BUTTON, self.on_calibrate_screen_button_click)
self.buttonEntireDesktop.Bind(
wx.EVT_BUTTON, self.on_screen_reset_button_click)
def __init__(self):
self.from_file = False
self.capture = None
if __name__ == "__main__":
pass
```
#### File: utils/character_recoginizer/udemae.py
```python
import cv2
import os
import numpy as np
from ikalog.utils.character_recoginizer import *
from ikalog.utils import *
class UdemaeRecoginizer(CharacterRecoginizer):
def __new__(cls, *args, **kwargs):
if not hasattr(cls, '__instance__'):
cls.__instance__ = super(
UdemaeRecoginizer, cls).__new__(cls, *args, **kwargs)
return cls.__instance__
def __init__(self):
if hasattr(self, 'trained') and self.trained:
return
super(UdemaeRecoginizer, self).__init__()
model_name = 'data/udemae.model'
if os.path.isfile(model_name):
self.load_model_from_file(model_name)
self.train()
return
IkaUtils.dprint('Building udemae recoginization model.')
data = [
{'file': 'numbers2/a1.png', 'response': 'a'},
{'file': 'numbers2/b1.png', 'response': 'b'},
{'file': 'numbers2/c1.png', 'response': 'c'},
{'file': 'numbers2/s1.png', 'response': 's'},
{'file': 'numbers2/s2.png', 'response': 's'},
{'file': 'numbers2/plus.png', 'response': '+'},
{'file': 'numbers2/minus.png', 'response': '-'},
]
for d in data:
d['img'] = cv2.imread(d['file'])
self.add_sample(d['response'], d['img'])
self.add_sample(d['response'], d['img'])
self.add_sample(d['response'], d['img'])
self.save_model_to_file(model_name)
self.train()
if __name__ == "__main__":
UdemaeRecoginizer()
```
#### File: IkaLog/test/GameStart.py
```python
import sys
import json
import argparse
sys.path.append('.')
from ikalog.inputs import CVCapture
from ikalog.utils import *
from ikalog.engine import *
from ikalog import outputs
class IkaTestGameStart:
# 正解ファイルを作成する
def answer_filename(self, video_file, answer_type, context):
#basename_fullpath, ext = os.path.splitext(video_file)
basename_fullpath = video_file
answer_fullpath = basename_fullpath + '.answer.' + answer_type
return answer_fullpath
# 正解ファイルを作成する
def write_answer_file(self, video_file, context):
answer_fullpath = self.answer_filename(
video_file, 'GameStart', context)
record = {
'stage': IkaUtils.map2text(self.engine.context['game'][
'map'], unknown='None'),
'rule': IkaUtils.rule2text(self.engine.context['game'][
'rule'], unknown='None'),
}
f = open(answer_fullpath, 'w')
f.write(json.dumps(record, separators=(',', ':')) + '\n')
f.close()
IkaUtils.dprint('wrote answer file %s' % answer_fullpath)
return True
def read_answer_file(self, video_file):
answer_fullpath = self.answer_filename(video_file, 'GameStart', None)
f = open(answer_fullpath, 'r')
record = json.load(f)
f.close()
return record
def test_regression(self, context, answer):
stage = IkaUtils.map2text(self.engine.context['game'][
'map'], unknown='None')
rule = IkaUtils.rule2text(self.engine.context['game'][
'rule'], unknown='None')
IkaUtils.dprint(' detected: stage %s rule %s' % (stage, rule))
IkaUtils.dprint(' answer : stage %s rule %s' %
(answer['stage'], answer['rule']))
assert(stage == answer['stage'])
assert(rule == answer['rule'])
return True
def on_frame_read(self, context):
if (context['engine']['msec'] > 60 * 1000):
IkaUtils.dprint('%s: プレイから60秒以内にマップが検出できませんでした' % self)
self.engine.stop()
def on_game_go_sign(self, context):
IkaUtils.dprint('%s: ゴーサインがでました' % self)
self.engine.stop()
def on_frame_read_failed(self, context):
IkaUtils.dprint('%s: たぶんファイルの終端にたどり着きました' % self)
self.engine.stop()
def on_game_start(self, context):
IkaUtils.dprint('%s: ゲーム検出' % self)
self.engine.stop()
def __init__(self, file):
# インプットとして指定されたファイルを読む
source = CVCapture()
source.start_recorded_file(file)
source.need_resize = True
# 画面が見えないと進捗が判らないので
screen = outputs.Screen(0, size=(640, 360))
# プラグインとして自分自身(画面)を設定しコールバックを受ける
outputPlugins = [self, screen]
# IkaEngine を実行
self.engine = IkaEngine()
self.engine.pause(False)
self.engine.set_capture(source)
self.engine.set_plugins(outputPlugins)
try:
self.engine.run()
except:
pass
if args.write:
# 正解ファイルを生成する
if self.write_answer_file(file, self.engine.context):
self.exit_code = 0
elif args.regression:
# リグレッションテスト
answer = self.read_answer_file(file)
if self.test_regression(self.engine.context, answer):
self.exit_code = 0
else:
args.stdout = True
self.exit_code = 0
if args.stdout:
# 標準出力に表示
map = IkaUtils.map2text(self.engine.context['game'][
'map'], unknown='None')
rule = IkaUtils.rule2text(self.engine.context['game'][
'rule'], unknown='None')
print(file, map, rule)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--auto', action='store_true')
parser.add_argument('--write', action='store_true')
parser.add_argument('--regression', action='store_true')
parser.add_argument('--stdout', action='store_true')
parser.add_argument('file')
args = parser.parse_args()
sys.exit(IkaTestGameStart(args.file).exit_code)
```
#### File: IkaLog/tools/IkaLog4.py
```python
import signal
from ikalog.engine import *
from IkaConfig import *
from ikalog.utils import *
from hdmi_switcher import HDMISwitcher
import time
class IkaLog4(object):
def signal_handler(num, frame):
IkaUtils.dprint('IkaLog: got signal %d' % num)
if num == 2:
self.stop_requested = True
def context2view(self, context):
for n in range(len(self.engines)):
if self.engines[n].context == context:
return n + 1
return None
def switch_to_single(self, port):
self.switcher.cmd_switch_port(port)
time.sleep(0.1)
self.switcher.cmd_mode(self.switcher.mode_single_channel)
self.consolidated_source.config_720p_single(port)
def switch_to_quad(self, port=None):
self.switcher.cmd_mode(self.switcher.mode_four_channels)
time.sleep(0.1)
if port is not None:
self.switcher.cmd_switch_port(1)
self.consolidated_source.config_720p_quad()
def initialize_switcher(self, f):
self.switcher = HDMISwitcher('/dev/tty.usbserial-FTZ2AKZU')
time.sleep(1)
self.switcher.cmd_resolution(self.switcher.res_720p)
time.sleep(1)
self.switch_to_quad()
def initialize_sources(self):
try:
f = sys.argv[1]
source = inputs.CVFile()
source.start_video_file(f)
except:
source = inputs.AVFoundationCapture()
source.start_camera(1)
self.consolidated_source = inputs.ConsolidatedInput(source)
def initialize_engines(self):
# Engine
self.engines = []
for view in self.consolidated_source.outputs:
engine = IkaEngine()
engine.pause(False)
engine.set_capture(view)
engine.set_plugins([
# outputs.Console(),
outputs.DebugLog(),
self,
])
engine.close_session_at_eof = False
self.engines.append(engine)
def main_loop(self):
while not self.stop_requested:
self.consolidated_source.next_frame()
for engine in self.engines:
engine.process_frame()
cv2.waitKey(1)
IkaUtils.dprint('bye!')
def on_lobby_matching(self, context):
print('on_lobby_matching', self.context2view(context))
self.switch_to_quad()
def on_lobby_matched(self, context):
print('on_lobby_matched', self.context2view(context))
self.switch_to_single(self.context2view(context))
def on_game_start(self, context):
print('on_game_start', self.context2view(context))
def on_game_go_sign(self, context):
print('on_game_go_sign', self.context2view(context))
self.switch_to_quad()
def on_game_finish(self, context):
print('on_game_finish', self.context2view(context))
self.switch_to_single(self.context2view(context))
def on_result_gears(self, context):
self.switch_to_quad()
def __init__(self):
self.stop_requested = False
signal.signal(signal.SIGINT, self.signal_handler)
ikalog4 = IkaLog4()
sources = ikalog4.initialize_sources()
ikalog4.initialize_engines()
ikalog4.initialize_switcher('')
ikalog4.main_loop()
```
#### File: IkaLog/tools/learn_gearpowers.py
```python
import sys
from ikalog.constants import gear_abilities
sys.path.append('.')
train_basedir = sys.argv[1]
from ikalog.utils import GearPowerRecoginizer
def learnImageGroup(recoginizer=None, name="unknown", dir=None):
if dir is None or recoginizer is None:
return None
train_dir = "%s/%s" % (train_basedir, dir)
print("%s => %s" % (name, train_dir))
recoginizer.learn_image_group(name=name, dir=train_dir)
def loopbackTest():
results = {}
misses = []
total = 0
correct = 0
sort_zumi = {}
for gearpower in gearpowers.groups:
for sample_tuple in gearpower['images']:
sample = sample_tuple[0]
answer, distance = gearpowers.match(sample) # = img
total = total + 1
if (gearpower['name'] == answer):
correct = correct + 1
msg = "正解"
else:
msg = " "
misses.append(sample)
if not answer in sort_zumi:
sort_zumi[answer] = []
sort_zumi[answer].append((distance, sample_tuple[3]))
#print("%s: %s 結果: %s<br>" % (msg, gearpower['name'], r['name']))
s = ("%d 問中 %d 問正解 学習内容に対する正答率 %3.1f%" %
(total, correct, correct / total * 100))
# miss list 表示
misses_hist = []
for sample in misses:
param, r = gearpowers.analyze_image(sample, debug=True)
misses_hist.append(r)
gearpowers.show_learned_icon_image(misses_hist, 'Misses', save='misses.png')
# file にリスト書き出し
f = open('gearpowers.html', 'w')
f.write('<p>%s</p>' % s)
for gearpower in sorted(sort_zumi.keys()):
f.write('<h3>%s</h3>' % gearpower)
print('<h3>%s</h3>' % gearpower)
for t in sorted(sort_zumi[gearpower]):
f.write('<font size=-4>%s</font><img src=%s alt="%s">' %
(t[0], t[1], t[0]))
print('<font size=-4>%s</font><img src=%s alt="%s">' %
(t[0], t[1], t[0]))
f.close()
return s
gearpowers = GearPowerRecoginizer()
for ability in gear_abilities:
learnImageGroup(gearpowers, ability, ability)
gearpowers.knn_train_from_group()
gearpowers.save_model_to_file()
gearpowers.knn_reset()
gearpowers.load_model_from_file()
gearpowers.knn_train()
if 1:
s = loopbackTest()
print(s)
if __name__ == "__main__":
from ikalog.scenes.result_gears import *
from ikalog.utils import *
import os
result_gears = ResultGears(None)
result_gears.gearpowers = gearpowers
sort_zumi = {}
for file in sys.argv[2:]:
context = {
'engine': {
'frame': cv2.imread(file),
},
'game': {
'map': {'name': 'ハコフグ倉庫', },
'rule': {'name': 'ガチエリア'},
},'scenes':{},
}
print('file ', file, context['engine']['frame'].shape)
result_gears._analyze(context)
srcname, ext = os.path.splitext(os.path.basename(file))
for n in range(len(context['scenes']['result_gears']['gears'])):
gear = context['scenes']['result_gears']['gears'][n]
for field in gear:
if field == 'img_name':
continue
elif field.startswith('img_'):
img_dir = os.path.join(
'test_result', 'gearpowers', gear[field.replace('img_','')])
img_file = os.path.join(img_dir, '%s.%d.%s.png' % (srcname, n, field))
print(img_file)
try:
os.makedirs(img_dir)
except:
pass
IkaUtils.writeScreenshot(img_file, gear[field])
``` |
{
"source": "joytunes/JTLocalize",
"score": 2
} |
#### File: jtlocalize/core/create_localized_strings_from_ib_files.py
```python
import argparse
from xml.dom import minidom
from xml.sax.saxutils import unescape
from localization_utils import *
# The prefix to identify a comment for an internationalized comment.
JT_INTERNATIONALIZED_COMMENT_PREFIX = 'jtl_'
DEFAULT_UI_COMPONENTS_PREFIX = 'JT'
def write_string_pairs_from_ib_file_to_file(ib_files_directory, exclude_dirs, output_file):
logging.info('Creating localization string pairs from IB files')
string_pairs = extract_string_pairs_in_dir(ib_files_directory, exclude_dirs)
output_file_desc = open_strings_file(output_file, "a")
for entry_key, entry_comment in string_pairs:
output_file_desc.write('\n')
if entry_key is not None:
write_entry_to_file(output_file_desc, entry_comment, entry_key)
output_file_desc.close()
def extract_string_pairs_in_dir(directory, exclude_dirs):
""" Extract string pairs in the given directory's xib/storyboard files.
Args:
directory (str): The path to the directory.
exclude_dirs (str): A list of directories to exclude from extraction.
Returns:
list: The extracted string pairs for all IB files in the directory.
"""
result = []
for ib_file_path in find_files(directory, [".xib", ".storyboard"], exclude_dirs):
result += extract_string_pairs_in_ib_file(ib_file_path)
return result
def get_element_attribute_or_empty(element, attribute_name):
"""
Args:
element (element): The xib's element.
attribute_name (str): The desired attribute's name.
Returns:
The attribute's value, or an empty str if none exists.
"""
return element.attributes[attribute_name].value if element.hasAttribute(attribute_name) else ""
def extract_element_internationalized_comment(element):
""" Extracts the xib element's comment, if the element has been internationalized.
Args:
element (element): The element from which to extract the comment.
Returns:
The element's internationalized comment, None if it does not exist, or hasn't been internationalized (according
to the JTLocalize definitions).
"""
element_entry_comment = get_element_attribute_or_empty(element, 'userLabel')
if element_entry_comment == "":
try:
element_entry_comment = element.getElementsByTagName('string')[0].firstChild.nodeValue
except Exception:
element_entry_comment = ""
if not element_entry_comment.lower().startswith(JT_INTERNATIONALIZED_COMMENT_PREFIX):
return None
else:
return element_entry_comment[len(JT_INTERNATIONALIZED_COMMENT_PREFIX):]
def add_string_pairs_from_attributed_ui_element(results, ui_element, comment_prefix):
""" Adds string pairs from a UI element with attributed text
Args:
results (list): The list to add the results to.
attributed_element (element): The element from the xib that contains, to extract the fragments from.
comment_prefix (str): The prefix of the comment to use for extracted string
(will be appended "Part X" suffices)
Returns:
bool: Whether or not an attributed string was found.
"""
attributed_strings = ui_element.getElementsByTagName('attributedString')
if attributed_strings.length == 0:
return False
attributed_element = attributed_strings[0]
fragment_index = 1
for fragment in attributed_element.getElementsByTagName('fragment'):
# The fragment text is either as an attribute <fragment content="TEXT">
# or a child in the format <string key='content'>TEXT</string>
try:
label_entry_key = fragment.attributes['content'].value
except KeyError:
label_entry_key = fragment.getElementsByTagName('string')[0].firstChild.nodeValue
comment = "%s Part %d" % (comment_prefix, fragment_index)
results.append((label_entry_key, comment))
fragment_index += 1
return fragment_index > 1
def add_string_pairs_from_label_element(xib_file, results, label):
""" Adds string pairs from a label element.
Args:
xib_file (str): Path to the xib file.
results (list): The list to add the results to.
label (element): The label element from the xib, to extract the string pairs from.
"""
label_entry_comment = extract_element_internationalized_comment(label)
if label_entry_comment is None:
return
if label.hasAttribute('usesAttributedText') and label.attributes['usesAttributedText'].value == 'YES':
add_string_pairs_from_attributed_ui_element(results, label, label_entry_comment)
else:
try:
label_entry_key = label.attributes['text'].value
except KeyError:
try:
label_entry_key = label.getElementsByTagName('string')[0].firstChild.nodeValue
except Exception:
label_entry_key = 'N/A'
logging.warn("%s: Missing text entry in %s", xib_file, label.toxml('UTF8'))
results.append((label_entry_key, label_entry_comment))
def add_string_pairs_from_text_field_element(xib_file, results, text_field):
""" Adds string pairs from a textfield element.
Args:
xib_file (str): Path to the xib file.
results (list): The list to add the results to.
text_field(element): The textfield element from the xib, to extract the string pairs from.
"""
text_field_entry_comment = extract_element_internationalized_comment(text_field)
if text_field_entry_comment is None:
return
if text_field.hasAttribute('usesAttributedText') and text_field.attributes['usesAttributedText'].value == 'YES':
add_string_pairs_from_attributed_ui_element(results, text_field, text_field_entry_comment)
else:
try:
text_field_entry_key = text_field.attributes['text'].value
results.append((text_field_entry_key, text_field_entry_comment + ' default text value'))
except KeyError:
pass
try:
text_field_entry_key = text_field.attributes['placeholder'].value
results.append((text_field_entry_key, text_field_entry_comment + ' placeholder text value'))
except KeyError:
pass
def add_string_pairs_from_text_view_element(xib_file, results, text_view):
""" Adds string pairs from a textview element.
Args:
xib_file (str): Path to the xib file.
results (list): The list to add the results to.
text_view(element): The textview element from the xib, to extract the string pairs from.
"""
text_view_entry_comment = extract_element_internationalized_comment(text_view)
if text_view_entry_comment is None:
return
if text_view.hasAttribute('usesAttributedText') and text_view.attributes['usesAttributedText'].value == 'YES':
add_string_pairs_from_attributed_ui_element(results, text_view, text_view_entry_comment)
else:
try:
text_view_entry_key = text_view.attributes['text'].value
results.append((text_view_entry_key, text_view_entry_comment + ' default text value'))
except KeyError:
pass
def add_string_pairs_from_button_element(xib_file, results, button):
""" Adds strings pairs from a button xib element.
Args:
xib_file (str): Path to the xib file.
results (list): The list to add the results to.
button(element): The button element from the xib, to extract the string pairs from.
"""
button_entry_comment = extract_element_internationalized_comment(button)
if button_entry_comment is None:
return
for state in button.getElementsByTagName('state'):
state_name = state.attributes['key'].value
state_entry_comment = button_entry_comment + " - " + state_name + " state of button"
if not add_string_pairs_from_attributed_ui_element(results, state, state_entry_comment):
try:
button_entry_key = state.attributes['title'].value
except KeyError:
try:
button_entry_key = state.getElementsByTagName('string')[0].firstChild.nodeValue
except Exception:
continue
results.append((button_entry_key, state_entry_comment))
def extract_string_pairs_in_ib_file(file_path):
""" Extract the strings pairs (key and comment) from a xib file.
Args:
file_path (str): The path to the xib file.
Returns:
list: List of tuples representing the string pairs.
"""
try:
results = []
xmldoc = minidom.parse(file_path)
element_name_to_add_func = {'label': add_string_pairs_from_label_element,
'button': add_string_pairs_from_button_element,
'textField': add_string_pairs_from_text_field_element,
'textView': add_string_pairs_from_text_view_element}
for element_name in element_name_to_add_func:
add_func = element_name_to_add_func[element_name]
elements = xmldoc.getElementsByTagName(element_name)
for element in elements:
add_func(file_path, results, element)
# Find strings of format JTL('Key Name', 'Key Comment') and add them to the results
jtl_brackets_find_results = re.findall(JTL_REGEX, open(file_path).read())
unescaped_jtl_brackets_find_results = [(unescape(x), unescape(y)) for (x, y) in jtl_brackets_find_results]
results += unescaped_jtl_brackets_find_results
if len(results) > 0:
results = [(None, os.path.basename(file_path))] + results
return results
except Exception, e:
logging.warn("ERROR: Error processing %s (%s: %s)", file_path, type(e), str(e))
return []
def create_localized_strings_from_ib_files(ib_files_directory, exclude_dirs, output_file):
write_string_pairs_from_ib_file_to_file(ib_files_directory, exclude_dirs, output_file)
def parse_args():
""" Parses the arguments given in the command line
Returns:
args: The configured arguments will be attributes of the returned object.
"""
parser = argparse.ArgumentParser(description='Extract the string for localization from IB files directory.')
parser.add_argument("ib_files_directory", help="The directory containing the IB files.")
parser.add_argument("output_file", help="The output file.")
parser.add_argument("--exclude_dirs", nargs='+',
help="Directories to exclude when looking for IB files to extract strings from")
parser.add_argument("--log_path", default="", help="The log file path")
return parser.parse_args()
# The main method for simple command line run.
if __name__ == '__main__':
args = parse_args()
setup_logging(args)
create_localized_strings_from_ib_files(args.ib_files_directory, args.exclude_dirs, args.output_file)
```
#### File: jtlocalize/tests/localization_diff_test.py
```python
import unittest
from jtlocalize.core.localization_diff import *
from jtlocalize.core.localization_merge_back import *
from jtlocalize.mock_translate import *
NEW_LOCALIZABLE_FILE_PATH = os.path.join(os.path.dirname(__file__), "resources/Localizable.new.strings")
OLD_TRANSLATED_FILE_PATH = os.path.join(os.path.dirname(__file__), "resources/Localizable.translated.old.strings")
NEW_TRANSLATED_FILE_PATH = "/tmp/Localizable.translated.new.strings"
MERGED_FILE_PATH = "/tmp/Localizable.merged.strings"
class LocalizationDiffTest(unittest.TestCase):
"""
The test is currently pretty stupid, just wanted to check a specific use case
"""
def setUp(self):
print "Starting test.."
def tearDown(self):
os.remove(MERGED_FILE_PATH)
os.remove(NEW_TRANSLATED_FILE_PATH)
def translate_pending_file(self):
mock_translate(NEW_TRANSLATED_FILE_PATH, wrap="test")
def assert_only_new_keys_in_pending_file(self):
old_translated_file_keys_to_objects = generate_localization_key_to_entry_dictionary_from_file(OLD_TRANSLATED_FILE_PATH)
localizable_values_to_objects = generate_localization_value_to_entry_dictionary_from_file(NEW_LOCALIZABLE_FILE_PATH)
f = open_strings_file(NEW_TRANSLATED_FILE_PATH, "r")
for comments, key, value in extract_comment_key_value_tuples_from_file(f):
localizable_key = localizable_values_to_objects[key]
self.assertFalse(localizable_key in old_translated_file_keys_to_objects)
def assert_localizable_value_translated(self):
merged_file_dict = generate_localization_key_to_entry_dictionary_from_file(MERGED_FILE_PATH)
f = open_strings_file(NEW_LOCALIZABLE_FILE_PATH, "r")
for comments, key, value in extract_comment_key_value_tuples_from_file(f):
merged_value = merged_file_dict[key].value
self.assertEqual(merged_value, "test(%s)" % value)
def test_simple_flow(self):
localization_diff(NEW_LOCALIZABLE_FILE_PATH, OLD_TRANSLATED_FILE_PATH, None, NEW_TRANSLATED_FILE_PATH)
self.assert_only_new_keys_in_pending_file()
self.translate_pending_file()
localization_merge_back(NEW_LOCALIZABLE_FILE_PATH, OLD_TRANSLATED_FILE_PATH, NEW_TRANSLATED_FILE_PATH,
MERGED_FILE_PATH)
self.assert_localizable_value_translated()
if __name__ == '__main__':
unittest.main()
```
#### File: jtlocalize/tests/localization_utils_test.py
```python
import unittest
from jtlocalize.core.localization_utils import *
TMP_FILE_PATH = "/tmp/Localizable.tmp.strings"
EXAMPLE_FILE_CONTENT = u"""
/**
* Comment 1
*/
/*** Header comment 1 ***/
/* Entry Comment 1 */
"key1" = "value1";
/* Entry Comment 2 */
/* Duplicate - Entry Comment 2 */
"key2" = "value2";
"""
class LocalizationUtilsTest(unittest.TestCase):
def setUp(self):
self.file_for_tests = open_strings_file(TMP_FILE_PATH, "w")
self.file_for_tests.write(EXAMPLE_FILE_CONTENT)
self.file_for_tests.close()
def tearDown(self):
os.remove(TMP_FILE_PATH)
def test_parse(self):
result_dict = generate_localization_key_to_entry_dictionary_from_file(TMP_FILE_PATH)
self.assertEquals(len(result_dict), 2, "Wrong number of keys")
self.assertEquals(result_dict["key1"].value, "value1")
self.assertEquals(result_dict["key1"].comments, ["Entry Comment 1"])
self.assertEquals(result_dict["key2"].value, "value2")
self.assertEquals(len(result_dict["key2"].comments), 2)
if __name__ == '__main__':
unittest.main()
```
#### File: jtlocalize/tests/merge_strings_files_test.py
```python
import unittest
import shutil
from jtlocalize.core.merge_strings_files import *
from jtlocalize.core.localization_utils import *
OLD_LOCALIZABLE_FILE_PATH = os.path.join(os.path.dirname(__file__), "resources/Localizable.old.strings")
NEW_LOCALIZABLE_FILE_PATH = os.path.join(os.path.dirname(__file__), "resources/app_localization_strings_output.strings")
MERGED_FILE_PATH = "/tmp/Localizable.merged.strings"
class MergeLocalizableTest(unittest.TestCase):
"""
Test for the merge localizable script
"""
def setUp(self):
print "Starting test.."
def tearDown(self):
os.remove(MERGED_FILE_PATH)
def test_merge(self):
old_localizable_file_keys_to_objects = generate_localization_key_to_entry_dictionary_from_file(OLD_LOCALIZABLE_FILE_PATH)
new_localizable_file_keys_to_objects = generate_localization_key_to_entry_dictionary_from_file(NEW_LOCALIZABLE_FILE_PATH)
shutil.copyfile(OLD_LOCALIZABLE_FILE_PATH, MERGED_FILE_PATH)
merge_strings_files(MERGED_FILE_PATH, NEW_LOCALIZABLE_FILE_PATH)
f = open_strings_file(MERGED_FILE_PATH, "r")
for comments, key, value in extract_comment_key_value_tuples_from_file(f):
if key in old_localizable_file_keys_to_objects:
self.assertEqual(value, old_localizable_file_keys_to_objects[key].value)
if key in new_localizable_file_keys_to_objects:
self.assertItemsEqual(comments, new_localizable_file_keys_to_objects[key].comments)
new_localizable_file_keys_to_objects.pop(key)
else:
self.assertItemsEqual(comments, old_localizable_file_keys_to_objects[key].comments)
old_localizable_file_keys_to_objects.pop(key)
else:
self.assertIn(key, new_localizable_file_keys_to_objects)
self.assertEqual(value, new_localizable_file_keys_to_objects[key].value)
self.assertListEqual(comments, new_localizable_file_keys_to_objects[key].comments)
new_localizable_file_keys_to_objects.pop(key)
self.assertEqual(len(new_localizable_file_keys_to_objects), 0)
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "JOYUAGV/waypoints_global_planner",
"score": 3
} |
#### File: waypoints_global_planner/scripts/path_planner.py
```python
from path_planning.Bezier import Bezier
from path_planning.BezierSpline import BezierSpline
from path_planning.CubicSpline import CubicSpline
from path_planning.Dubins import Dubins
from path_planning.Line import Line
from path_planning.Ellipse import Ellipse
DUBINS_POINTS = [[0,0],[3,5]]
POINTS = [[0,0],[1,-2],[10.5,-4.5],[5,6],[10,15],[25,30]]
ELLIPSE_POINTS = [[0,0],[1,2],[3,1]]
CIRCLE_POINTS = [[4,4],[6,9],[9,6]]
ORIENTATION = [[180, 90]]
LINE_POINTS = [[0,0],[7,-12]]
def prepare_desired_path(trajectory_type):
'''
Prepare desired path
'''
if trajectory_type == "dubins":
shape = Dubins(DUBINS_POINTS,ORIENTATION,5)
if trajectory_type == "circle":
shape = Ellipse(CIRCLE_POINTS)
if trajectory_type == "bezier":
shape = Bezier(POINTS, False)
if trajectory_type == "ellipse":
shape = Ellipse(ELLIPSE_POINTS)
if trajectory_type == "cubic_spline":
shape = CubicSpline(POINTS)
if trajectory_type == "bezier_spline":
shape = BezierSpline(POINTS, order=3, is_periodic=False)
if trajectory_type == "line":
shape = Line(LINE_POINTS, is_periodic=False)
samples, yaw_samples = shape.sample_points()
return samples, yaw_samples
'''
if __name__ == '__main__':
path = prepare_desired_path("circle")
print path
'''
``` |
{
"source": "Joyvalley/GWAS_Flow",
"score": 2
} |
#### File: GWAS_Flow/gwas_flow/main.py
```python
import sys
import pandas as pd
import numpy as np
from scipy.stats import f
import tensorflow as tf
from pandas_plink import read_plink
import h5py
from .herit import estimate_variance_components
def kinship(marker):
''' returns kinship matrix after vanRaden '''
# not implemented yet
raise NotImplementedError("No kins")
def load_and_prepare_data(x_file, y_file, k_file, m_phe, cof_file):
''' etl the data '''
if k_file != 'not_prov':
type_k = k_file.split(".")[-1]
type_x = x_file.split(".")[-1]
y_phe = pd.read_csv(y_file, engine='python').sort_values(
['accession_id']).groupby('accession_id').mean()
y_phe = pd.DataFrame({'accession_id': y_phe.index,
'phenotype_value': y_phe[m_phe]})
if type_x in ('hdf5', 'h5py'):
snp = h5py.File(x_file, 'r')
markers = np.asarray(snp['positions'])
acc_x = np.asarray(snp['accessions'][:], dtype=np.int)
elif type_x == 'csv':
x_gen = pd.read_csv(x_file, index_col=0)
markers = x_gen.columns.values
acc_x = x_gen.index
x_gen = np.asarray(x_gen, dtype=np.float64) / 2
elif type_x.lower() == 'plink':
my_prefix = x_file.split(".")[0]
(bim, fam, bed) = read_plink(my_prefix)
acc_x = np.array(fam[['fid']], dtype=np.str).flatten()
markers = np.array(bim[['snp']]).flatten()
else:
sys.exit("Only hdf5, h5py, plink and csv files are supported")
if k_file != 'not_prov':
if type_k in ('hdf5', 'h5py'):
k = h5py.File(k_file, 'r')
acc_k = np.asarray(k['accessions'][:], dtype=np.int)
elif type_k == 'csv':
k = pd.read_csv(k_file, index_col=0)
acc_k = k.index
k = np.array(k, dtype=np.float64)
acc_y = np.asarray(y_phe[['accession_id']]).flatten()
acc_isec = [isec for isec in acc_x if isec in acc_y]
if(len(acc_isec) == 0):
print("WARNING: accessions in X do not overlap with accessions in Y")
print("Accessions X:")
print(acc_x)
print("Accessions Y:")
print(acc_y)
idx_acc = list(map(lambda itt: itt in acc_isec, acc_x))
idy_acc = list(map(lambda itt: itt in acc_isec, acc_y))
if k_file != 'not_prov':
idk_acc = list(map(lambda itt: itt in acc_isec, acc_k))
if len(idk_acc) != len(acc_isec):
print("WARNING: not all accessions are in the kinship matrix")
print("Accessions X/Y:")
print(acc_isec)
print("Accessions K:")
print(acc_k)
if cof_file != 0:
cof = pd.read_csv(cof_file, index_col=0)
idc = cof.index
cof = np.array(cof['cof'])
acc_isec = [isec for isec in idc if isec in acc_y]
#idc_acc = list(map(lambda x: x in acc_isec, idc))
if not all(idx_acc):
print('''
accessions ids in the covariate file must be
identical to the ones in the phenotype file
''')
sys.exit()
else:
cof = 0
y_phe_ = np.asarray(y_phe.drop('accession_id', 1),
dtype=np.float64)[idy_acc, :]
if type_x in ('hdf5', 'h5py'):
x_gen = np.asarray(snp['snps'][:, np.where(idx_acc)[0]], np.float64).T
x_gen = x_gen[np.argsort(acc_x[idx_acc]), :]
if k_file != 'not_prov':
k_1 = np.asarray(k['kinship'][:])[idk_acc, :]
kin_vr = k_1[:, idk_acc]
kin_vr = kin_vr[np.argsort(acc_x[idx_acc]), :]
kin_vr = kin_vr[:, np.argsort(acc_x[idx_acc])]
else:
kin_vr = kinship(x_gen)
elif type_x.lower() == 'plink':
x_gen = np.asarray(bed.compute() / 2, dtype=np.float64)[:, idx_acc].T
if k_file != 'not_prov':
k_1 = k
if 'kinship' in k:
k_1 = k['kinship'][:]
k_1 = np.asarray(k_1)[idk_acc, :]
kin_vr = k_1[:, idk_acc]
kin_vr = kin_vr[np.argsort(acc_x[idx_acc]), :]
kin_vr = kin_vr[:, np.argsort(acc_x[idx_acc])]
else:
kin_vr = kinship(x_gen)
else:
x_gen = x_gen[idx_acc, :]
if k_file != 'not_prov':
k_1 = k[idk_acc, :]
kin_vr = k_1[:, idk_acc]
else:
kin_vr = kinship(x_gen)
print("data has been imported")
return x_gen, kin_vr, y_phe_, markers, cof
def mac_filter(mac_min, x_gen, markers):
''' filter for minor allele frequencies'''
ac1 = np.sum(x_gen, axis=0)
ac0 = x_gen.shape[0] - ac1
macs = np.minimum(ac1, ac0)
markers_used = markers[macs >= mac_min]
x_gen = x_gen[:, macs >= mac_min]
return markers_used, x_gen, macs
# calculate betas and se of betas
def stderr_func(itt, marker, y_t2d, int_t):
'''returns standard errors for induvidual markers '''
n_phe = len(int_t)
x = tf.stack(
(int_t, tf.squeeze(
tf.matmul(
marker.T, tf.reshape(
itt, (n_phe, -1))))), axis=1)
coeff = tf.matmul(
tf.matmul(
tf.linalg.inv(
tf.matmul(
tf.transpose(x, perm=None),
x)),
tf.transpose(x, perm=None)),
y_t2d)
sum_sq_e = tf.reduce_sum(tf.math.square(tf.math.subtract(y_t2d, tf.math.add(
tf.math.multiply(x[:, 1], coeff[0, 0]), tf.math.multiply(x[:, 1], coeff[1, 0])))))
stand_err = tf.math.sqrt(sum_sq_e / (n_phe - (1 + 2)))
stdr_glob = tf.sqrt(
tf.linalg.diag_part(
tf.math.multiply(
stand_err,
tf.linalg.inv(
tf.matmul(
tf.transpose(x, perm=None),
x)))))[1]
return tf.stack((coeff[1, 0], stdr_glob))
# calculate residual sum squares
def rss(itt, marker, y_t2d, int_t):
''' calculates the residual sum of squares '''
x_t = tf.reduce_sum(tf.math.multiply(marker.T, itt), axis=1)
lm_res = tf.linalg.lstsq(
tf.transpose(
tf.stack(
(int_t, x_t), axis=0), perm=None), y_t2d, l2_regularizer=0.0)
lm_x = tf.concat((tf.squeeze(lm_res), x_t), axis=0)
return tf.reduce_sum(tf.math.square(tf.math.subtract(tf.squeeze(y_t2d), tf.math.add(
tf.math.multiply(lm_x[1], lm_x[2:]), tf.multiply(lm_x[0], int_t)))))
# calculate residual sum squares with co-variates
def rss_cof(itt, marker, y_t2d, int_t, cof_t):
''' calculates the residual sum of squares when cof is included '''
x_t = tf.reduce_sum(tf.math.multiply(marker.T, itt), axis=1)
lm_res = tf.linalg.lstsq(
tf.transpose(
tf.stack(
(int_t, x_t, cof_t), axis=0), perm=None), y_t2d, l2_regularizer=0.0)
return tf.math.reduce_sum(tf.math.square(
y_t2d - (lm_res[1] * x_t + lm_res[0] * int_t + lm_res[2] * cof_t)))
def get_k_stand(kin_vr):
''' obtains the standardized kinship matrix'''
n_phe = kin_vr.shape[0]
return (n_phe - 1) / np.sum((np.identity(n_phe) -
np.ones((n_phe, n_phe)) / n_phe)
* kin_vr) * kin_vr
def get_herit(y_phe, k_stand):
''' calculates the heritabilty'''
return estimate_variance_components(y_phe, "normal", k_stand, verbose=False)
def transform_kinship(v_g, k_stand, v_e):
''' transform the kinship matrix with cholesky transformation '''
n_phe = k_stand.shape[0]
return np.transpose(
np.linalg.inv(
np.linalg.cholesky(
v_g *
k_stand +
v_e *
np.identity(n_phe)))).astype(
np.float64)
def transform_y(marker, y_phe):
''' transform phenotypes '''
return np.sum(np.multiply(np.transpose(marker), y_phe), axis=1).astype(np.float64)
def transform_int(marker):
''' transform the intercept'''
n_phe = marker.shape[0]
return np.sum(
np.multiply(
np.transpose(marker),
np.ones(n_phe)),
axis=1).astype(
np.float64)
def emmax(int_t, y_trans):
''' run emmax according to Kang et al 2010'''
n_phe = len(int_t)
return (np.linalg.lstsq(np.reshape(int_t, (n_phe, -1)),
np.reshape(y_trans, (n_phe, -1)), rcond=None)[1]).astype(np.float64)
def transform_cof(marker, cof):
''' transform the coefficients '''
return np.sum(np.multiply(np.transpose(marker), cof), axis=1).astype(np.float64)
def get_output(f_1, x_sub, stdr_glob):
''' get the F1 values'''
return tf.concat([tf.reshape(f_1, (x_sub.shape[1], -1)), stdr_glob], axis=1)
def get_stderr(marker, y_t2d, int_t, x_sub):
''' build tensor loping of all markers to obtain all standerros'''
return tf.map_fn(lambda mar: stderr_func(mar, marker, y_t2d, int_t), x_sub.T)
def get_f1(rss_env, r1_full, n_phe):
'''calculate the f1 scors for all markers'''
return tf.divide(
tf.subtract(
rss_env, r1_full), tf.divide(
r1_full, (n_phe - 2)))
def get_pval(f_dist, n_phe):
'''get p values from f1 scores'''
return f.logsf(f_dist, 1, n_phe - 2)
def get_r1_full(marker, y_t2d, int_t, x_sub):
''' build tensor for full model '''
return tf.map_fn(lambda mar: rss(mar, marker, y_t2d, int_t), x_sub.T)
def gwas(x_gen, kin_vr, y_phe, batch_size, cof):
''' get gwas results, calls all the subfunctions '''
# with open("test_data/cof_test", 'wb') as f:
# pickle.dump(cof, f)
y_phe = y_phe.flatten()
n_marker = x_gen.shape[1]
n_phe = len(y_phe)
# REML
k_stand = get_k_stand(kin_vr)
v_g, delta, v_e = get_herit(y_phe, k_stand)
print(" Pseudo-heritability is ", v_g / (v_e + v_g + delta))
print(" Performing GWAS on ", n_phe,
" phenotypes and ", n_marker, "markers")
# Transform kinship-matrix, phenotypes and estimate intercpt
# Xo = np.ones(K.shape[0]).flatten()
marker = transform_kinship(v_g, k_stand, v_e)
y_trans = transform_y(marker, y_phe)
int_t = transform_int(marker)
# transform co-factor
if isinstance(cof, int) == False:
cof_t = transform_cof(marker, cof)
# EMMAX Scan
rss_env = emmax(int_t, y_trans)
# loop over th batches
for i in range(int(np.ceil(n_marker / batch_size))):
tf.compat.v1.reset_default_graph()
if n_marker < batch_size:
x_sub = x_gen
else:
lower_limit = batch_size * i
upper_limit = batch_size * i + batch_size
if upper_limit <= n_marker:
x_sub = x_gen[:, lower_limit:upper_limit]
print(
"Working on markers ",
lower_limit,
" to ",
upper_limit,
" of ",
n_marker)
else:
x_sub = x_gen[:, lower_limit:]
print(
"Working on markers ",
lower_limit,
" to ",
n_marker,
" of ",
n_marker)
config = tf.compat.v1.ConfigProto()
sess = tf.compat.v1.Session(config=config)
y_t2d = tf.cast(tf.reshape(y_trans, (n_phe, -1)), dtype=tf.float64)
# y_tensor = tf.convert_to_tensor(y_trans,dtype = tf.float64)
stdr_glob = get_stderr(marker, y_t2d, int_t, x_sub)
if isinstance(cof, int) == False:
r1_full = tf.map_fn(
lambda mar: rss_cof(
mar, marker, y_t2d, int_t, cof_t), x_sub.T)
else:
r1_full = get_r1_full(marker, y_t2d, int_t, x_sub)
f_1 = get_f1(rss_env, r1_full, n_phe)
if i == 0:
output = sess.run(get_output(f_1, x_sub, stdr_glob))
else:
tmp = sess.run(get_output(f_1, x_sub, stdr_glob))
output = np.append(output, tmp, axis=0)
sess.close()
f_dist = output[:, 0]
pval = np.exp(get_pval(f_dist, n_phe))
output[:, 0] = pval
# with open("test_data/cof_output", 'wb') as f: pickle.dump(output, f)
return output
```
#### File: GWAS_Flow/gwas_flow/plot.py
```python
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.pyplot import figure
from pathlib import Path
# functions to plot a simple manhattanplot which includes the bonferroni
# threshold and if applicable the permutation-based threshold
def manhattan(res_name, perm):
''' returns manhattan plot '''
res = pd.read_csv(res_name).sort_values(['chr', 'pos'])
if np.issubdtype(
res['chr'].dtype,
np.number) != True and np.issubdtype(
res['chr'].dtype,
np.number) != True:
raise ValueError(
'''The manhattan plot requires numeric
information for the chromosomes and position of the markers
''')
res.chr = res.chr.astype('category')
res['my_cumsum'] = list(range(1, len(res) + 1))
res['BPcum'] = 0
my_s = 0
bla = list()
nbp = list()
for i in res.chr.unique():
nbp.append(np.max(res[res['chr'] == i]['pos']))
bla.append(res[res['chr'] == i]['pos'] + my_s)
my_s = my_s + nbp[i - 1]
my_bla = [y for x in bla for y in x]
res['BPcum'] = my_bla
res['minuslog10pvalue'] = -np.log10(res.pval)
res_group = res.groupby('chr')
figure(num=None, figsize=(8, 6), dpi=80, facecolor='w', edgecolor='k')
fig, my_axis = plt.subplots()
del fig
my_axis.margins(0.05)
my_axis.hlines(
np.log10(
len(res)), xmin=0, xmax=np.max(
res['BPcum']), linestyles="dashdot")
if perm > 1:
perm_res_name = Path(res_name).parent.joinpath(
'perm_' + Path(res_name).name)
perm_res = pd.read_csv(perm_res_name)
perm_idx = round(perm / 20, 0)
perm_threshold = perm_res['min_p'][perm_idx]
my_axis.hlines(-np.log10(perm_threshold), xmin=0,
xmax=np.max(res['BPcum']), linestyles="dashed")
for name, group in res_group:
my_axis.plot(
group.BPcum,
group.minuslog10pvalue,
marker='o',
linestyle='',
ms=1,
label=name)
plt.xticks([])
plt.legend()
plt.savefig("manhattan.pdf")
plt.savefig("manhattan.png")
```
#### File: GWAS_Flow/tests/test.py
```python
import warnings
warnings.filterwarnings('ignore', category=DeprecationWarning)
warnings.filterwarnings('ignore', category=FutureWarning)
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
import numpy as np
from gwas_flow import main
import pickle
import unittest
class TestGwas(unittest.TestCase):
''' class containing tests for gwas'''
def test_gwas(self):
''' tests for the gwas function '''
with open('test_data/X_test', 'rb') as g_file:
x_gen = pickle.load(g_file)
with open('test_data/K_gwas_test', 'rb') as k_file:
kin_mat = pickle.load(k_file)
with open('test_data/Y_test', 'rb') as phe_file:
y_pheno = pickle.load(phe_file)
with open('test_data/Out_test', 'rb') as output:
output = pickle.load(output)
with open('test_data/cof_test', 'rb') as coutt:
cof = pickle.load(coutt)
with open('test_data/cof_output', 'rb') as coutput:
cof_output = pickle.load(coutput)
batch_size = 500000
output = np.float64(output)
x_gen = np.float64(x_gen)
#kin_mat = np.float64(kin_mat)
y_pheno = np.float64(y_pheno)
result = main.gwas(
x_gen,
kin_mat,
y_pheno,
batch_size,
cof=0)
self.assertIsNone(
np.testing.assert_array_almost_equal(result, output))
cof_result = main.gwas(
x_gen,
kin_mat,
y_pheno,
batch_size,
cof)
self.assertIsNone(
np.testing.assert_array_almost_equal(cof_result, cof_output))
``` |
{
"source": "joy-void-joy/discord-py-template",
"score": 3
} |
#### File: joy-void-joy/discord-py-template/utils.py
```python
from discord.ext import commands
import discord
import urllib
import io
import colorsys
from traceback import format_exc
import sys
class Utils(commands.Cog):
@staticmethod
async def find_or_create_role(guild, name, **kwargs):
try:
print([i for i in guild.roles if i.name == name])
result = next(i for i in guild.roles if i.name == name)
except StopIteration:
result = await guild.create_role(name=name, **kwargs)
return result
@staticmethod
async def send_embed(ctx: commands.Context, message: discord.Message, delete: bool =False, footer: str="", link="", title="", image=""):
"""Util function to send an embed for quoting or other purposes"""
tosend = discord.Embed(description=message.content, timestamp=message.created_at)
tosend.title = title
tosend.url = link
if message.author:
tosend.color = message.author.color if message.author.color != discord.Color.default() else discord.Embed.Empty
tosend.set_author(name=message.author.display_name, icon_url=message.author.avatar_url)
if footer:
tosend.set_footer(icon_url=ctx.author.avatar_url, text=footer)
if not image:
try:
image = [i.url for i in message.embeds] + [i.proxy_url for i in message.attachments]
except ValueError:
pass
if image:
if message.content:
tosend.set_thumbnail(url=image)
else :
tosend.set_image(url=image)
### Sending
await ctx.send(embed=tosend)
if delete:
try:
await ctx.message.delete()
except discord.errors.NotFound:
pass
@staticmethod
def download_avatar(user: discord.Member, path: str):
"""Download the avatar of a user to path"""
opener = urllib.request.build_opener()
opener.addheaders = [('User-agent', 'Mozilla/5.0 (Windows NT 10.0; WOW64; rv:55.0) Gecko/20100101 Firefox/55.0')]
urllib.request.install_opener(opener)
urllib.request.urlretrieve(str(user.avatar_url), filename=path)
return path
@staticmethod
#TODO: Split this up
async def print_error(ctx, func=format_exc):
to_print = func()
list_messages = [to_print[i:i+1900] for i in range(0, len(to_print), 1900)]
for i in list_messages:
await ctx.send\
(
f"```python\n"
f"{i}\n"
f"```"
)
def setup(bot):
bot.add_cog(Utils())
bot.utils = bot.get_cog(Utils.__name__)
``` |
{
"source": "joy-void-joy/jarvis-codex-server",
"score": 3
} |
#### File: apps/ask_codex/Anonymizer.py
```python
from __future__ import annotations
from typing import Any
import ast
import string
import random
class Anonymizer(ast.NodeTransformer):
def __init__(self):
self.pool = string.ascii_letters
self.current_number_letters = 1
self.lut = {}
self.context = {}
### Utils
def _assign_name(self, node: Any, key="name"):
if len(self.lut) >= len(self.pool) ** self.current_number_letters:
self.current_number_letters += 1
while (r := ''.join(random.choices(self.pool, k=self.current_number_letters))) in self.lut.values():
pass
self.lut[getattr(node, key)] = r
setattr(node, key, r)
return r
### Visitors
def generic_visit(self, node: ast.AST, **kwargs) -> ast.AST:
old_context = dict(self.context)
self.context |= kwargs
result = super().generic_visit(node)
self.context = old_context
return result
## Transforms "return f'This is a {value} and {another}'" into "return [value, another]"
def visit_Return(self, node: ast.Return) -> Any:
if isinstance(node.value, ast.JoinedStr):
result = ast.List([i.value for i in ast.walk(node) if isinstance(i, ast.FormattedValue)])
elif isinstance(node.value, ast.Str):
result = ast.List([])
else:
result = node.value
return ast.Return(self.generic_visit(result))
## Anonymize variables/function/classes names
def visit_ClassDef(self, node: ast.ClassDef) -> Any:
self._assign_name(node)
return self.generic_visit(node)
def visit_FunctionDef(self, node: ast.FunctionDef) -> Any:
if node.name.startswith('on_'):
return self.generic_visit(node)
self._assign_name(node)
return self.generic_visit(node)
def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> Any:
return self.visit_FunctionDef(node)
def visit_arg(self, node: ast.arg) -> Any:
if node.arg == "self":
return self.generic_visit(node)
self._assign_name(node, key='arg')
return self.generic_visit(node)
def visit_Name(self, node: ast.Name) -> Any:
try:
# Name is already assigned
node.id = self.lut[node.id]
except KeyError as e:
# Name is new and has to be declared. This can only happen if we are storing the variable (as in: number = 42 -> n = 42)
if isinstance(node.ctx, ast.Store):
self._assign_name(node, 'id')
else:
raise e
finally:
return self.generic_visit(node)
def anonymize(code):
return ast.unparse(Anonymizer().visit(ast.parse(code)))
if __name__ == "__main__":
import sys
print("Enter code to be anonymized")
code = sys.stdin.read()
print(anonymize(code))
``` |
{
"source": "joy-void-joy/tacocast",
"score": 3
} |
#### File: tacocast/tacocast/dynamic_window.py
```python
import itertools
import more_itertools
# Dynamic sliding window
# Slide over an iterator, making chunks under target
# TODO: Explain better
# TODO: Move to a library?
class _dynamic_window:
_current_sum = 0
_current_class = 0
_bump = False
target: int = None
max_target: int = None
key = lambda _, x: x
inclusive = False
def __init__(self, target, hard_target=None, key=None, inclusive=None):
self.target = target
self.hard_target = hard_target
self.key = key or self.key
self.inclusive = inclusive or self.inclusive
def __call__(self, pack):
[v, next_v] = pack
self._bump = False
self._current_sum += self.key(v)
if self._current_sum > self.target or (self.hard_target and next_v and self._current_sum + self.key(next_v) > self.hard_target):
self._current_sum = self.key(v) if not self.inclusive else 0
self._current_class += 1
self._bump = True
return self._current_class - 1 if self.inclusive and self._bump else self._current_class
def dynamic_window(iterator, target, *args, **kwargs):
groups = itertools.groupby(more_itertools.windowed(itertools.chain(iterator, [None]), 2), key=_dynamic_window(target, *args, **kwargs))
return ((pack[0] for pack in group[1]) for group in groups)
```
#### File: tacocast/tacocast/tqdm.py
```python
from tqdm.auto import tqdm as tqdm_aux
def tqdm(iterator, postfix_func=lambda i: {'current': i}, *args, **kwargs):
with tqdm_aux(iterator, *args, **kwargs) as tq:
for i in tq:
tq.set_postfix(postfix_func(i))
yield i
``` |
{
"source": "joywa/graph-notebook",
"score": 2
} |
#### File: gremlin/client_provider/factory.py
```python
from graph_notebook.configuration.generate_config import AuthModeEnum
from graph_notebook.gremlin.client_provider.default_client import ClientProvider
from graph_notebook.gremlin.client_provider.iam_client import IamClientProvider
from graph_notebook.authentication.iam_credentials_provider.credentials_factory import credentials_provider_factory, \
IAMAuthCredentialsProvider
def create_client_provider(mode: AuthModeEnum,
credentials_provider_mode: IAMAuthCredentialsProvider = IAMAuthCredentialsProvider.ROLE):
if mode == AuthModeEnum.DEFAULT:
return ClientProvider()
elif mode == AuthModeEnum.IAM:
credentials_provider = credentials_provider_factory(credentials_provider_mode)
return IamClientProvider(credentials_provider)
else:
raise NotImplementedError(f"invalid client mode {mode} provided")
```
#### File: gremlin/client_provider/graphsonV3d0_MapType_objectify_patch.py
```python
from gremlin_python.structure.io.graphsonV3d0 import MapType
# Original code from Tinkerpop 3.4.1
#
# class MapType(_GraphSONTypeIO):
# python_type = DictType
# graphson_type = "g:Map"
#
# @classmethod
# def dictify(cls, d, writer):
# l = []
# for key in d:
# l.append(writer.toDict(key))
# l.append(writer.toDict(d[key]))
# return GraphSONUtil.typedValue("Map", l)
#
# @classmethod
# def objectify(cls, l, reader):
# new_dict = {}
# if len(l) > 0:
# x = 0
# while x < len(l):
# new_dict[reader.toObject(l[x])] = reader.toObject(l[x + 1])
# x = x + 2
# return new_dict
class MapType_patch:
@classmethod
def objectify(cls, l, reader): # noqa E741
new_dict = {}
if len(l) > 0:
x = 0
while x < len(l):
tmp = reader.toObject(l[x])
# Avoid keys that are dicts by making them tuples
if type(tmp) == dict:
tmp = tuple([(k, v) for k, v in tmp.items()])
new_dict[tmp] = reader.toObject(l[x + 1])
x = x + 2
return new_dict
MapType.objectify = MapType_patch.objectify
```
#### File: graph_notebook/loader/load.py
```python
from graph_notebook.request_param_generator.call_and_get_response import call_and_get_response
FORMAT_CSV = 'csv'
FORMAT_NTRIPLE = 'ntriples'
FORMAT_NQUADS = 'nquads'
FORMAT_RDFXML = 'rdfxml'
FORMAT_TURTLE = 'turtle'
PARALLELISM_LOW = 'LOW'
PARALLELISM_MEDIUM = 'MEDIUM'
PARALLELISM_HIGH = 'HIGH'
PARALLELISM_OVERSUBSCRIBE = 'OVERSUBSCRIBE'
VALID_FORMATS = [FORMAT_CSV, FORMAT_NTRIPLE, FORMAT_NQUADS, FORMAT_RDFXML, FORMAT_TURTLE]
PARALLELISM_OPTIONS = [PARALLELISM_LOW, PARALLELISM_MEDIUM, PARALLELISM_HIGH, PARALLELISM_OVERSUBSCRIBE]
LOADER_ACTION = 'loader'
FINAL_LOAD_STATUSES = ['LOAD_COMPLETED',
'LOAD_COMMITTED_W_WRITE_CONFLICTS',
'LOAD_CANCELLED_BY_USER',
'LOAD_CANCELLED_DUE_TO_ERRORS',
'LOAD_FAILED',
'LOAD_UNEXPECTED_ERROR',
'LOAD_DATA_DEADLOCK',
'LOAD_DATA_FAILED_DUE_TO_FEED_MODIFIED_OR_DELETED',
'LOAD_S3_READ_ERROR',
'LOAD_S3_ACCESS_DENIED_ERROR',
'LOAD_IN_QUEUE',
'LOAD_FAILED_BECAUSE_DEPENDENCY_NOT_SATISFIED',
'LOAD_FAILED_INVALID_REQUEST', ]
def do_load(host, port, load_format, use_ssl, source, region, arn, fail_on_error, parallelism,
update_single_cardinality, request_param_generator):
payload = {
'source': source,
'format': load_format,
'region': region,
'failOnError': fail_on_error,
'parallelism': parallelism,
'updateSingleCardinalityProperties': update_single_cardinality
}
if arn != '':
payload['iamRoleArn'] = arn
res = call_and_get_response('post', LOADER_ACTION, host, port, request_param_generator, use_ssl, payload)
return res.json()
def get_loader_jobs(host, port, use_ssl, request_param_generator):
res = call_and_get_response('get', LOADER_ACTION, host, port, request_param_generator, use_ssl)
return res.json()
def get_load_status(host, port, use_ssl, request_param_generator, id):
payload = {
'loadId': id
}
res = call_and_get_response('get', LOADER_ACTION, host, port, request_param_generator, use_ssl, payload)
return res.json()
def cancel_load(host, port, use_ssl, request_param_generator, load_id):
payload = {
'loadId': load_id
}
res = call_and_get_response('get', LOADER_ACTION, host, port, request_param_generator, use_ssl, payload)
return res.status_code == 200
```
#### File: graph_notebook/magics/graph_magic.py
```python
from __future__ import print_function # Python 2/3 compatibility
import argparse
import logging
import json
import time
import os
import uuid
from enum import Enum
import ipywidgets as widgets
from gremlin_python.driver.protocol import GremlinServerError
from IPython.core.display import HTML, display_html, display
from IPython.core.magic import (Magics, magics_class, cell_magic, line_magic, line_cell_magic, needs_local_scope)
from ipywidgets.widgets.widget_description import DescriptionStyle
from requests import HTTPError
import graph_notebook
from graph_notebook.configuration.generate_config import generate_default_config, DEFAULT_CONFIG_LOCATION
from graph_notebook.decorators.decorators import display_exceptions
from graph_notebook.magics.ml import neptune_ml_magic_handler, generate_neptune_ml_parser
from graph_notebook.network import SPARQLNetwork
from graph_notebook.network.gremlin.GremlinNetwork import parse_pattern_list_str, GremlinNetwork
from graph_notebook.sparql.table import get_rows_and_columns
from graph_notebook.gremlin.query import do_gremlin_query, do_gremlin_explain, do_gremlin_profile
from graph_notebook.gremlin.status import do_gremlin_status, do_gremlin_cancel
from graph_notebook.sparql.query import get_query_type, do_sparql_query, do_sparql_explain
from graph_notebook.sparql.status import do_sparql_status, do_sparql_cancel
from graph_notebook.system.database_reset import perform_database_reset, initiate_database_reset
from graph_notebook.visualization.template_retriever import retrieve_template
from graph_notebook.gremlin.client_provider.factory import create_client_provider
from graph_notebook.request_param_generator.factory import create_request_generator
from graph_notebook.loader.load import do_load, get_loader_jobs, get_load_status, cancel_load, VALID_FORMATS, \
PARALLELISM_OPTIONS, PARALLELISM_HIGH, FINAL_LOAD_STATUSES
from graph_notebook.configuration.get_config import get_config, get_config_from_dict
from graph_notebook.seed.load_query import get_data_sets, get_queries
from graph_notebook.status.get_status import get_status
from graph_notebook.widgets import Force
from graph_notebook.options import OPTIONS_DEFAULT_DIRECTED, vis_options_merge
sparql_table_template = retrieve_template("sparql_table.html")
sparql_explain_template = retrieve_template("sparql_explain.html")
sparql_construct_template = retrieve_template("sparql_construct.html")
gremlin_table_template = retrieve_template("gremlin_table.html")
pre_container_template = retrieve_template("pre_container.html")
loading_wheel_template = retrieve_template("loading_wheel.html")
error_template = retrieve_template("error.html")
loading_wheel_html = loading_wheel_template.render()
DEFAULT_LAYOUT = widgets.Layout(max_height='600px', overflow='scroll', width='100%')
logging.basicConfig()
logger = logging.getLogger("graph_magic")
DEFAULT_MAX_RESULTS = 1000
GREMLIN_CANCEL_HINT_MSG = '''You must supply a string queryId when using --cancelQuery, for example: %gremlin_status --cancelQuery --queryId my-query-id'''
SPARQL_CANCEL_HINT_MSG = '''You must supply a string queryId when using --cancelQuery, for example: %sparql_status --cancelQuery --queryId my-query-id'''
SEED_LANGUAGE_OPTIONS = ['', 'Gremlin', 'SPARQL']
LOADER_FORMAT_CHOICES = ['']
LOADER_FORMAT_CHOICES.extend(VALID_FORMATS)
class QueryMode(Enum):
DEFAULT = 'query'
EXPLAIN = 'explain'
PROFILE = 'profile'
EMPTY = ''
def store_to_ns(key: str, value, ns: dict = None):
if key == '' or ns is None:
return
ns[key] = value
def str_to_query_mode(s: str) -> QueryMode:
s = s.lower()
for mode in list(QueryMode):
if mode.value == s:
return QueryMode(s)
logger.debug(f'Invalid query mode {s} supplied, defaulting to query.')
return QueryMode.DEFAULT
# noinspection PyTypeChecker
@magics_class
class Graph(Magics):
def __init__(self, shell):
# You must call the parent constructor
super(Graph, self).__init__(shell)
try:
self.config_location = os.getenv('GRAPH_NOTEBOOK_CONFIG', DEFAULT_CONFIG_LOCATION)
self.graph_notebook_config = get_config(self.config_location)
except FileNotFoundError:
self.graph_notebook_config = generate_default_config()
print(
'Could not find a valid configuration. Do not forgot to validate your settings using %graph_notebook_config')
self.max_results = DEFAULT_MAX_RESULTS
self.graph_notebook_vis_options = OPTIONS_DEFAULT_DIRECTED
logger.setLevel(logging.ERROR)
@line_cell_magic
@display_exceptions
def graph_notebook_config(self, line='', cell=''):
if cell != '':
data = json.loads(cell)
config = get_config_from_dict(data)
self.graph_notebook_config = config
print('set notebook config to:')
print(json.dumps(self.graph_notebook_config.to_dict(), indent=2))
elif line == 'reset':
self.graph_notebook_config = get_config(self.config_location)
print('reset notebook config to:')
print(json.dumps(self.graph_notebook_config.to_dict(), indent=2))
elif line == 'silent':
"""
silent option to that our neptune_menu extension can receive json instead
of python Configuration object
"""
config_dict = self.graph_notebook_config.to_dict()
return print(json.dumps(config_dict, indent=2))
else:
config_dict = self.graph_notebook_config.to_dict()
print(json.dumps(config_dict, indent=2))
return self.graph_notebook_config
@line_magic
def graph_notebook_host(self, line):
if line == '':
print('please specify a host.')
return
# TODO: we should attempt to make a status call to this host before we set the config to this value.
self.graph_notebook_config.host = line
print(f'set host to {line}')
@cell_magic
@needs_local_scope
@display_exceptions
def sparql(self, line='', cell='', local_ns: dict = None):
parser = argparse.ArgumentParser()
parser.add_argument('query_mode', nargs='?', default='query',
help='query mode (default=query) [query|explain]')
parser.add_argument('--endpoint-prefix', '-e', default='',
help='prefix path to sparql endpoint. For example, if "foo/bar" were specified, the endpoint called would be /foo/bar/sparql')
parser.add_argument('--expand-all', action='store_true')
request_generator = create_request_generator(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type,
command='sparql')
parser.add_argument('--store-to', type=str, default='', help='store query result to this variable')
args = parser.parse_args(line.split())
mode = str_to_query_mode(args.query_mode)
endpoint_prefix = args.endpoint_prefix if args.endpoint_prefix != '' else self.graph_notebook_config.sparql.endpoint_prefix
tab = widgets.Tab()
logger.debug(f'using mode={mode}')
if mode == QueryMode.EXPLAIN:
res = do_sparql_explain(cell, self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator, path_prefix=endpoint_prefix)
store_to_ns(args.store_to, res, local_ns)
if 'error' in res:
html = error_template.render(error=json.dumps(res['error'], indent=2))
else:
html = sparql_explain_template.render(table=res)
explain_output = widgets.Output(layout=DEFAULT_LAYOUT)
with explain_output:
display(HTML(html))
tab.children = [explain_output]
tab.set_title(0, 'Explain')
display(tab)
else:
query_type = get_query_type(cell)
headers = {} if query_type not in ['SELECT', 'CONSTRUCT', 'DESCRIBE'] else {
'Accept': 'application/sparql-results+json'}
res = do_sparql_query(cell, self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator, headers, endpoint_prefix)
store_to_ns(args.store_to, res, local_ns)
titles = []
children = []
display(tab)
table_output = widgets.Output(layout=DEFAULT_LAYOUT)
# Assign an empty value so we can always display to table output.
# We will only add it as a tab if the type of query allows it.
# Because of this, the table_output will only be displayed on the DOM if the query was of type SELECT.
table_html = ""
query_type = get_query_type(cell)
if query_type in ['SELECT', 'CONSTRUCT', 'DESCRIBE']:
logger.debug('creating sparql network...')
# some issues with displaying a datatable when not wrapped in an hbox and displayed last
hbox = widgets.HBox([table_output], layout=DEFAULT_LAYOUT)
titles.append('Table')
children.append(hbox)
expand_all = line == '--expand-all'
sn = SPARQLNetwork(expand_all=expand_all)
sn.extract_prefix_declarations_from_query(cell)
try:
sn.add_results(res)
except ValueError as value_error:
logger.debug(value_error)
logger.debug(f'number of nodes is {len(sn.graph.nodes)}')
if len(sn.graph.nodes) > 0:
f = Force(network=sn, options=self.graph_notebook_vis_options)
titles.append('Graph')
children.append(f)
logger.debug('added sparql network to tabs')
rows_and_columns = get_rows_and_columns(res)
if rows_and_columns is not None:
table_id = f"table-{str(uuid.uuid4())[:8]}"
table_html = sparql_table_template.render(columns=rows_and_columns['columns'],
rows=rows_and_columns['rows'], guid=table_id)
# Handling CONSTRUCT and DESCRIBE on their own because we want to maintain the previous result pattern
# of showing a tsv with each line being a result binding in addition to new ones.
if query_type == 'CONSTRUCT' or query_type == 'DESCRIBE':
lines = []
for b in res['results']['bindings']:
lines.append(f'{b["subject"]["value"]}\t{b["predicate"]["value"]}\t{b["object"]["value"]}')
raw_output = widgets.Output(layout=DEFAULT_LAYOUT)
with raw_output:
html = sparql_construct_template.render(lines=lines)
display(HTML(html))
children.append(raw_output)
titles.append('Raw')
json_output = widgets.Output(layout=DEFAULT_LAYOUT)
with json_output:
print(json.dumps(res, indent=2))
children.append(json_output)
titles.append('JSON')
tab.children = children
for i in range(len(titles)):
tab.set_title(i, titles[i])
with table_output:
display(HTML(table_html))
@line_magic
@needs_local_scope
@display_exceptions
def sparql_status(self, line='', local_ns: dict = None):
parser = argparse.ArgumentParser()
parser.add_argument('-q', '--queryId', default='',
help='The ID of a running SPARQL query. Only displays the status of the specified query.')
parser.add_argument('-c', '--cancelQuery', action='store_true',
help='Tells the status command to cancel a query. This parameter does not take a value')
parser.add_argument('-s', '--silent', action='store_true',
help='If silent=true then the running query is cancelled and the HTTP response code is 200. If silent is not present or silent=false, the query is cancelled with an HTTP 500 status code.')
parser.add_argument('--store-to', type=str, default='', help='store query result to this variable')
args = parser.parse_args(line.split())
request_generator = create_request_generator(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
if not args.cancelQuery:
res = do_sparql_status(self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator, args.queryId)
else:
if args.queryId == '':
print(SPARQL_CANCEL_HINT_MSG)
return
else:
res = do_sparql_cancel(self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator, args.queryId, args.silent)
store_to_ns(args.store_to, res, local_ns)
print(json.dumps(res, indent=2))
@cell_magic
@needs_local_scope
@display_exceptions
def gremlin(self, line, cell, local_ns: dict = None):
parser = argparse.ArgumentParser()
parser.add_argument('query_mode', nargs='?', default='query',
help='query mode (default=query) [query|explain|profile]')
parser.add_argument('-p', '--path-pattern', default='', help='path pattern')
parser.add_argument('-g', '--group-by', default='T.label', help='Property used to group nodes (e.g. code, T.region) default is T.label')
parser.add_argument('--store-to', type=str, default='', help='store query result to this variable')
parser.add_argument('--ignore-groups', action='store_true', help="Ignore all grouping options")
args = parser.parse_args(line.split())
mode = str_to_query_mode(args.query_mode)
logger.debug(f'Arguments {args}')
tab = widgets.Tab()
if mode == QueryMode.EXPLAIN:
request_generator = create_request_generator(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
query_res = do_gremlin_explain(cell, self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator)
if 'explain' in query_res:
html = pre_container_template.render(content=query_res['explain'])
else:
html = pre_container_template.render(content='No explain found')
explain_output = widgets.Output(layout=DEFAULT_LAYOUT)
with explain_output:
display(HTML(html))
tab.children = [explain_output]
tab.set_title(0, 'Explain')
display(tab)
elif mode == QueryMode.PROFILE:
request_generator = create_request_generator(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
query_res = do_gremlin_profile(cell, self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator)
if 'profile' in query_res:
html = pre_container_template.render(content=query_res['profile'])
else:
html = pre_container_template.render(content='No profile found')
profile_output = widgets.Output(layout=DEFAULT_LAYOUT)
with profile_output:
display(HTML(html))
tab.children = [profile_output]
tab.set_title(0, 'Profile')
display(tab)
else:
client_provider = create_client_provider(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
query_res = do_gremlin_query(cell, self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, client_provider)
children = []
titles = []
table_output = widgets.Output(layout=DEFAULT_LAYOUT)
titles.append('Console')
children.append(table_output)
try:
logger.debug(f'groupby: {args.group_by}')
if args.ignore_groups:
gn = GremlinNetwork()
else:
gn = GremlinNetwork(group_by_property=args.group_by)
if args.path_pattern == '':
gn.add_results(query_res)
else:
pattern = parse_pattern_list_str(args.path_pattern)
gn.add_results_with_pattern(query_res, pattern)
logger.debug(f'number of nodes is {len(gn.graph.nodes)}')
if len(gn.graph.nodes) > 0:
f = Force(network=gn, options=self.graph_notebook_vis_options)
titles.append('Graph')
children.append(f)
logger.debug('added gremlin network to tabs')
except ValueError as value_error:
logger.debug(f'unable to create gremlin network from result. Skipping from result set: {value_error}')
tab.children = children
for i in range(len(titles)):
tab.set_title(i, titles[i])
display(tab)
table_id = f"table-{str(uuid.uuid4()).replace('-', '')[:8]}"
table_html = gremlin_table_template.render(guid=table_id, results=query_res)
with table_output:
display(HTML(table_html))
store_to_ns(args.store_to, query_res, local_ns)
@line_magic
@needs_local_scope
@display_exceptions
def gremlin_status(self, line='', local_ns: dict = None):
parser = argparse.ArgumentParser()
parser.add_argument('-q', '--queryId', default='',
help='The ID of a running Gremlin query. Only displays the status of the specified query.')
parser.add_argument('-c', '--cancelQuery', action='store_true',
help='Required for cancellation. Parameter has no corresponding value.')
parser.add_argument('-w', '--includeWaiting', action='store_true',
help='(Optional) Normally, only running queries are included in the response. When the includeWaiting parameter is specified, the status of all waiting queries is also returned.')
parser.add_argument('--store-to', type=str, default='', help='store query result to this variable')
args = parser.parse_args(line.split())
request_generator = create_request_generator(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
if not args.cancelQuery:
res = do_gremlin_status(self.graph_notebook_config.host,
self.graph_notebook_config.port,
self.graph_notebook_config.ssl, self.graph_notebook_config.auth_mode,
request_generator, args.queryId, args.includeWaiting)
else:
if args.queryId == '':
print(GREMLIN_CANCEL_HINT_MSG)
return
else:
res = do_gremlin_cancel(self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, self.graph_notebook_config.auth_mode,
request_generator, args.queryId)
print(json.dumps(res, indent=2))
store_to_ns(args.store_to, res, local_ns)
@line_magic
@display_exceptions
def status(self, line):
logger.info(f'calling for status on endpoint {self.graph_notebook_config.host}')
request_generator = create_request_generator(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
logger.info(
f'used credentials_provider_mode={self.graph_notebook_config.iam_credentials_provider_type.name} and auth_mode={self.graph_notebook_config.auth_mode.name} to make status request')
res = get_status(self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator)
logger.info(f'got the response {res}')
return res
@line_magic
@display_exceptions
def db_reset(self, line):
host = self.graph_notebook_config.host
port = self.graph_notebook_config.port
ssl = self.graph_notebook_config.ssl
logger.info(f'calling system endpoint {host}')
parser = argparse.ArgumentParser()
parser.add_argument('-g', '--generate-token', action='store_true', help='generate token for database reset')
parser.add_argument('-t', '--token', nargs=1, default='', help='perform database reset with given token')
parser.add_argument('-y', '--yes', action='store_true', help='skip the prompt and perform database reset')
args = parser.parse_args(line.split())
generate_token = args.generate_token
skip_prompt = args.yes
request_generator = create_request_generator(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
logger.info(
f'used credentials_provider_mode={self.graph_notebook_config.iam_credentials_provider_type.name} and auth_mode={self.graph_notebook_config.auth_mode.name} to make system request')
if generate_token is False and args.token == '':
if skip_prompt:
res = initiate_database_reset(host, port, ssl, request_generator)
token = res['payload']['token']
res = perform_database_reset(token, host, port, ssl, request_generator)
logger.info(f'got the response {res}')
return res
output = widgets.Output()
source = 'Are you sure you want to delete all the data in your cluster?'
label = widgets.Label(source)
text_hbox = widgets.HBox([label])
check_box = widgets.Checkbox(
value=False,
disabled=False,
indent=False,
description='I acknowledge that upon deletion the cluster data will no longer be available.',
layout=widgets.Layout(width='600px', margin='5px 5px 5px 5px')
)
button_delete = widgets.Button(description="Delete")
button_cancel = widgets.Button(description="Cancel")
button_hbox = widgets.HBox([button_delete, button_cancel])
display(text_hbox, check_box, button_hbox, output)
def on_button_delete_clicked(b):
result = initiate_database_reset(host, port, ssl, request_generator)
text_hbox.close()
check_box.close()
button_delete.close()
button_cancel.close()
button_hbox.close()
if not check_box.value:
with output:
print('Checkbox is not checked.')
return
token = result['payload']['token']
if token == "":
with output:
print('Failed to get token.')
print(result)
return
result = perform_database_reset(token, host, port, ssl, request_generator)
if 'status' not in result or result['status'] != '200 OK':
with output:
print('Database reset failed, please try the operation again or reboot the cluster.')
print(result)
logger.error(result)
return
retry = 10
poll_interval = 5
interval_output = widgets.Output()
job_status_output = widgets.Output()
status_hbox = widgets.HBox([interval_output])
vbox = widgets.VBox([status_hbox, job_status_output])
display(vbox)
last_poll_time = time.time()
while retry > 0:
time_elapsed = int(time.time() - last_poll_time)
time_remaining = poll_interval - time_elapsed
interval_output.clear_output()
if time_elapsed > poll_interval:
with interval_output:
print('checking status...')
job_status_output.clear_output()
with job_status_output:
display_html(HTML(loading_wheel_html))
try:
retry -= 1
interval_check_response = get_status(host, port, ssl, request_generator)
except Exception as e:
# Exception is expected when database is resetting, continue waiting
with job_status_output:
last_poll_time = time.time()
time.sleep(1)
continue
job_status_output.clear_output()
with job_status_output:
if interval_check_response["status"] == 'healthy':
interval_output.close()
print('Database has been reset.')
return
last_poll_time = time.time()
else:
with interval_output:
print(f'checking status in {time_remaining} seconds')
time.sleep(1)
with output:
print(result)
if interval_check_response["status"] != 'healthy':
print("Could not retrieve the status of the reset operation within the allotted time. "
"If the database is not healthy after 1 min, please try the operation again or "
"reboot the cluster.")
def on_button_cancel_clicked(b):
text_hbox.close()
check_box.close()
button_delete.close()
button_cancel.close()
button_hbox.close()
with output:
print('Database reset operation has been canceled.')
button_delete.on_click(on_button_delete_clicked)
button_cancel.on_click(on_button_cancel_clicked)
return
elif generate_token:
res = initiate_database_reset(host, port, ssl, request_generator)
else:
# args.token is an array of a single string, e.g., args.token=['<PASSWORD>'], use index 0 to take the string
res = perform_database_reset(args.token[0], host, port, ssl, request_generator)
logger.info(f'got the response {res}')
return res
@line_magic
@needs_local_scope
@display_exceptions
def load(self, line='', local_ns: dict = None):
parser = argparse.ArgumentParser()
parser.add_argument('-s', '--source', default='s3://')
parser.add_argument('-l', '--loader-arn', default=self.graph_notebook_config.load_from_s3_arn)
parser.add_argument('-f', '--format', choices=LOADER_FORMAT_CHOICES, default='')
parser.add_argument('-p', '--parallelism', choices=PARALLELISM_OPTIONS, default=PARALLELISM_HIGH)
parser.add_argument('-r', '--region', default=self.graph_notebook_config.aws_region)
parser.add_argument('--fail-on-failure', action='store_true', default=False)
parser.add_argument('--update-single-cardinality', action='store_true', default=True)
parser.add_argument('--store-to', type=str, default='', help='store query result to this variable')
parser.add_argument('--run', action='store_true', default=False)
args = parser.parse_args(line.split())
# since this can be a long-running task, freezing variables in the case
# that a user alters them in another command.
host = self.graph_notebook_config.host
port = self.graph_notebook_config.port
ssl = self.graph_notebook_config.ssl
credentials_provider_mode = self.graph_notebook_config.iam_credentials_provider_type
request_generator = create_request_generator(self.graph_notebook_config.auth_mode, credentials_provider_mode)
region = self.graph_notebook_config.aws_region
button = widgets.Button(description="Submit")
output = widgets.Output()
source = widgets.Text(
value=args.source,
placeholder='Type something',
description='Source:',
disabled=False,
)
arn = widgets.Text(
value=args.loader_arn,
placeholder='Type something',
description='Load ARN:',
disabled=False
)
source_format = widgets.Dropdown(
options=LOADER_FORMAT_CHOICES,
value=args.format,
description='Format: ',
disabled=False
)
region_box = widgets.Text(
value=region,
placeholder=args.region,
description='AWS Region:',
disabled=False
)
fail_on_error = widgets.Dropdown(
options=['TRUE', 'FALSE'],
value=str(args.fail_on_failure).upper(),
description='Fail on Failure: ',
disabled=False
)
parallelism = widgets.Dropdown(
options=PARALLELISM_OPTIONS,
value=args.parallelism,
description='Parallelism :',
disabled=False
)
update_single_cardinality = widgets.Dropdown(
options=['TRUE', 'FALSE'],
value=str(args.update_single_cardinality).upper(),
description='Update Single Cardinality:',
disabled=False,
)
source_hbox = widgets.HBox([source])
arn_hbox = widgets.HBox([arn])
source_format_hbox = widgets.HBox([source_format])
display(source_hbox, source_format_hbox, region_box, arn_hbox, fail_on_error, parallelism,
update_single_cardinality, button, output)
def on_button_clicked(b):
source_hbox.children = (source,)
arn_hbox.children = (arn,)
source_format_hbox.children = (source_format,)
validated = True
validation_label_style = DescriptionStyle(color='red')
if not (source.value.startswith('s3://') and len(source.value) > 7) and not source.value.startswith('/'):
validated = False
source_validation_label = widgets.HTML(
'<p style="color:red;">Source must be an s3 bucket or file path</p>')
source_validation_label.style = validation_label_style
source_hbox.children += (source_validation_label,)
if source_format.value == '':
validated = False
source_format_validation_label = widgets.HTML('<p style="color:red;">Format cannot be blank.</p>')
source_format_hbox.children += (source_format_validation_label,)
if not arn.value.startswith('arn:aws') and source.value.startswith(
"s3://"): # only do this validation if we are using an s3 bucket.
validated = False
arn_validation_label = widgets.HTML('<p style="color:red;">Load ARN must start with "arn:aws"</p>')
arn_hbox.children += (arn_validation_label,)
if not validated:
return
source_exp = os.path.expandvars(
source.value) # replace any env variables in source.value with their values, can use $foo or ${foo}. Particularly useful for ${AWS_REGION}
logger.info(f'using source_exp: {source_exp}')
try:
load_result = do_load(host, port, source_format.value, ssl, str(source_exp), region_box.value,
arn.value,
fail_on_error.value, parallelism.value, update_single_cardinality.value,
request_generator)
store_to_ns(args.store_to, load_result, local_ns)
source_hbox.close()
source_format_hbox.close()
region_box.close()
arn_hbox.close()
fail_on_error.close()
parallelism.close()
update_single_cardinality.close()
button.close()
output.close()
if 'status' not in load_result or load_result['status'] != '200 OK':
with output:
print('Something went wrong.')
print(load_result)
logger.error(load_result)
return
load_id_label = widgets.Label(f'Load ID: {load_result["payload"]["loadId"]}')
poll_interval = 5
interval_output = widgets.Output()
job_status_output = widgets.Output()
load_id_hbox = widgets.HBox([load_id_label])
status_hbox = widgets.HBox([interval_output])
vbox = widgets.VBox([load_id_hbox, status_hbox, job_status_output])
display(vbox)
last_poll_time = time.time()
while True:
time_elapsed = int(time.time() - last_poll_time)
time_remaining = poll_interval - time_elapsed
interval_output.clear_output()
if time_elapsed > poll_interval:
with interval_output:
print('checking status...')
job_status_output.clear_output()
with job_status_output:
display_html(HTML(loading_wheel_html))
try:
interval_check_response = get_load_status(host, port, ssl, request_generator,
load_result['payload']['loadId'])
except Exception as e:
logger.error(e)
with job_status_output:
print('Something went wrong updating job status. Ending.')
return
job_status_output.clear_output()
with job_status_output:
print(f'Overall Status: {interval_check_response["payload"]["overallStatus"]["status"]}')
if interval_check_response["payload"]["overallStatus"]["status"] in FINAL_LOAD_STATUSES:
interval_output.close()
print('Done.')
return
last_poll_time = time.time()
else:
with interval_output:
print(f'checking status in {time_remaining} seconds')
time.sleep(1)
except HTTPError as httpEx:
output.clear_output()
with output:
print(httpEx.response.content.decode('utf-8'))
button.on_click(on_button_clicked)
if args.run:
on_button_clicked(None)
@line_magic
@display_exceptions
@needs_local_scope
def load_ids(self, line, local_ns: dict = None):
parser = argparse.ArgumentParser()
parser.add_argument('--store-to', type=str, default='')
args = parser.parse_args(line.split())
credentials_provider_mode = self.graph_notebook_config.iam_credentials_provider_type
request_generator = create_request_generator(self.graph_notebook_config.auth_mode, credentials_provider_mode)
res = get_loader_jobs(self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator)
ids = []
if 'payload' in res and 'loadIds' in res['payload']:
ids = res['payload']['loadIds']
labels = [widgets.Label(value=label_id) for label_id in ids]
if not labels:
labels = [widgets.Label(value="No load IDs found.")]
vbox = widgets.VBox(labels)
display(vbox)
if args.store_to != '' and local_ns is not None:
local_ns[args.store_to] = res
@line_magic
@display_exceptions
@needs_local_scope
def load_status(self, line, local_ns: dict = None):
parser = argparse.ArgumentParser()
parser.add_argument('load_id', default='', help='loader id to check status for')
parser.add_argument('--store-to', type=str, default='')
args = parser.parse_args(line.split())
credentials_provider_mode = self.graph_notebook_config.iam_credentials_provider_type
request_generator = create_request_generator(self.graph_notebook_config.auth_mode, credentials_provider_mode)
res = get_load_status(self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator, args.load_id)
print(json.dumps(res, indent=2))
if args.store_to != '' and local_ns is not None:
local_ns[args.store_to] = res
@line_magic
@display_exceptions
@needs_local_scope
def cancel_load(self, line, local_ns: dict = None):
parser = argparse.ArgumentParser()
parser.add_argument('load_id', default='', help='loader id to check status for')
parser.add_argument('--store-to', type=str, default='')
args = parser.parse_args(line.split())
credentials_provider_mode = self.graph_notebook_config.iam_credentials_provider_type
request_generator = create_request_generator(self.graph_notebook_config.auth_mode, credentials_provider_mode)
res = cancel_load(self.graph_notebook_config.host, self.graph_notebook_config.port,
self.graph_notebook_config.ssl, request_generator, args.load_id)
if res:
print('Cancelled successfully.')
else:
print('Something went wrong cancelling bulk load job.')
if args.store_to != '' and local_ns is not None:
local_ns[args.store_to] = res
@line_magic
@display_exceptions
def seed(self, line):
parser = argparse.ArgumentParser()
parser.add_argument('--language', type=str, default='', choices=SEED_LANGUAGE_OPTIONS)
parser.add_argument('--dataset', type=str, default='')
parser.add_argument('--endpoint-prefix', '-e', default='',
help='prefix path to query endpoint. For example, "foo/bar". The queried path would then be /foo/bar/sparql for sparql seed commands')
parser.add_argument('--run', action='store_true')
args = parser.parse_args(line.split())
output = widgets.Output()
progress_output = widgets.Output()
language_dropdown = widgets.Dropdown(
options=SEED_LANGUAGE_OPTIONS,
description='Language:',
disabled=False
)
data_set_drop_down = widgets.Dropdown(
description='Data set:',
disabled=False
)
submit_button = widgets.Button(description="Submit")
data_set_drop_down.layout.visibility = 'hidden'
submit_button.layout.visibility = 'hidden'
def on_value_change(change):
selected_language = change['new']
data_sets = get_data_sets(selected_language)
data_sets.sort()
data_set_drop_down.options = [ds for ds in data_sets if
ds != '__pycache__'] # being extra sure that we aren't passing __pycache__ here.
data_set_drop_down.layout.visibility = 'visible'
submit_button.layout.visibility = 'visible'
return
def on_button_clicked(b=None):
submit_button.close()
language_dropdown.disabled = True
data_set_drop_down.disabled = True
language = language_dropdown.value.lower()
data_set = data_set_drop_down.value.lower()
with output:
print(f'Loading data set {data_set} with language {language}')
queries = get_queries(language, data_set)
if len(queries) < 1:
with output:
print('Did not find any queries for the given dataset')
return
load_index = 1 # start at 1 to have a non-empty progress bar
progress = widgets.IntProgress(
value=load_index,
min=0,
max=len(queries) + 1, # len + 1 so we can start at index 1
orientation='horizontal',
bar_style='info',
description='Loading:'
)
with progress_output:
display(progress)
# TODO: gremlin_prefix path is not supported yet
sparql_prefix = args.endpoint_prefix if args.endpoint_prefix != '' else self.graph_notebook_config.sparql.endpoint_prefix
for q in queries:
with output:
print(f'{progress.value}/{len(queries)}:\t{q["name"]}')
# Just like with the load command, seed is long-running
# as such, we want to obtain the values of host, port, etc. in case they
# change during execution.
host = self.graph_notebook_config.host
port = self.graph_notebook_config.port
auth_mode = self.graph_notebook_config.auth_mode
ssl = self.graph_notebook_config.ssl
if language == 'gremlin':
client_provider = create_client_provider(auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
# IMPORTANT: We treat each line as its own query!
for line in q['content'].splitlines():
try:
do_gremlin_query(line, host, port, ssl, client_provider)
except GremlinServerError as gremlinEx:
try:
error = json.loads(gremlinEx.args[0][5:]) # remove the leading error code.
content = json.dumps(error, indent=2)
except Exception:
content = {
'error': gremlinEx
}
with output:
print(content)
progress.close()
return
except Exception as e:
content = {
'error': e
}
with output:
print(content)
progress.close()
return
else:
request_generator = create_request_generator(auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
try:
do_sparql_query(q['content'], host, port, ssl, request_generator, path_prefix=sparql_prefix)
except HTTPError as httpEx:
# attempt to turn response into json
try:
error = json.loads(httpEx.response.content.decode('utf-8'))
content = json.dumps(error, indent=2)
except Exception:
content = {
'error': httpEx
}
with output:
print(content)
progress.close()
return
except Exception as ex:
content = {
'error': str(ex)
}
with output:
print(content)
progress.close()
return
progress.value += 1
progress.close()
with output:
print('Done.')
return
submit_button.on_click(on_button_clicked)
language_dropdown.observe(on_value_change, names='value')
display(language_dropdown, data_set_drop_down, submit_button, progress_output, output)
if args.language != '':
language_dropdown.value = args.language
if args.dataset != '' and args.dataset in data_set_drop_down.options:
data_set_drop_down.value = args.dataset.lower()
if args.run:
on_button_clicked()
@line_magic
def enable_debug(self, line):
logger.setLevel(logging.DEBUG)
@line_magic
def disable_debug(self, line):
logger.setLevel(logging.ERROR)
@line_magic
def graph_notebook_version(self, line):
print(graph_notebook.__version__)
@line_cell_magic
@display_exceptions
def graph_notebook_vis_options(self, line='', cell=''):
if line == 'reset':
self.graph_notebook_vis_options = OPTIONS_DEFAULT_DIRECTED
if cell == '':
print(json.dumps(self.graph_notebook_vis_options, indent=2))
else:
options_dict = json.loads(cell)
self.graph_notebook_vis_options = vis_options_merge(self.graph_notebook_vis_options, options_dict)
@line_cell_magic
@display_exceptions
@needs_local_scope
def neptune_ml(self, line, cell='', local_ns: dict = None):
parser = generate_neptune_ml_parser()
args = parser.parse_args(line.split())
logger.info(f'received call to neptune_ml with details: {args.__dict__}, cell={cell}, local_ns={local_ns}')
request_generator = create_request_generator(self.graph_notebook_config.auth_mode,
self.graph_notebook_config.iam_credentials_provider_type)
main_output = widgets.Output()
display(main_output)
res = neptune_ml_magic_handler(args, request_generator, self.graph_notebook_config, main_output, cell, local_ns)
message = json.dumps(res, indent=2) if type(res) is dict else res
store_to_ns(args.store_to, res, local_ns)
with main_output:
print(message)
```
#### File: network/gremlin/gremlin_network_with_pattern.py
```python
from graph_notebook.gremlin.query import do_gremlin_query
from graph_notebook.network.gremlin.GremlinNetwork import GremlinNetwork, PathPattern
from test.integration import DataDrivenGremlinTest
class TestGremlinNetwork(DataDrivenGremlinTest):
def test_add_path_with_edge_object(self):
query = "g.V().has('airport','code','AUS').outE().inV().path().by('code').by().limit(10)"
results = do_gremlin_query(query, self.host, self.port, self.ssl, self.client_provider)
gn = GremlinNetwork()
pattern = [PathPattern.V, PathPattern.OUT_E, PathPattern.IN_V]
gn.add_results_with_pattern(results, pattern)
self.assertEqual(11, len(gn.graph.nodes))
self.assertEqual(10, len(gn.graph.edges))
def test_add_path_by_dist(self):
query = """g.V().has('airport','code','AUS').
repeat(outE().inV().simplePath()).
until(has('code','WLG')).
limit(5).
path().
by('code').
by('dist')"""
results = do_gremlin_query(query, self.host, self.port, self.ssl, self.client_provider)
gn = GremlinNetwork()
pattern = [PathPattern.V, PathPattern.OUT_E, PathPattern.IN_V, PathPattern.OUT_E]
gn.add_results_with_pattern(results, pattern)
self.assertEqual(8, len(gn.graph.nodes))
self.assertEqual(11, len(gn.graph.edges))
def test_path_with_dict(self):
query = """g.V().has('airport','code','CZM').
out('route').
path().
by(valueMap('code','city','region','desc','lat','lon').
order(local).
by(keys))"""
results = do_gremlin_query(query, self.host, self.port, self.ssl, self.client_provider)
gn = GremlinNetwork()
pattern = [PathPattern.V, PathPattern.IN_V]
gn.add_results_with_pattern(results, pattern)
self.assertEqual(12, len(gn.graph.nodes))
self.assertEqual(11, len(gn.graph.edges))
def test_out_v_unhashable_dict(self):
query = """g.V().
hasLabel('country').
has('desc','Jamaica').
out().
path().
by(valueMap())"""
results = do_gremlin_query(query, self.host, self.port, self.ssl, self.client_provider)
gn = GremlinNetwork()
pattern = [PathPattern.V, PathPattern.OUT_V]
gn.add_results_with_pattern(results, pattern)
node = gn.graph.nodes.get('graph_notebook-2f363b2fa995d0567e638a240efd0a26')
self.assertEqual(["Jamaica"], node['properties']['desc'])
```
#### File: integration/notebook/test_gremlin_graph_notebook.py
```python
from test.integration.notebook.GraphNotebookIntegrationTest import GraphNotebookIntegrationTest
class TestGraphMagicGremlin(GraphNotebookIntegrationTest):
def tearDown(self) -> None:
delete_query = "g.V('graph-notebook-test').drop()"
self.ip.run_cell_magic('gremlin', 'query', delete_query)
def test_gremlin_query(self):
label = 'graph-notebook-test'
query = f"g.addV('{label}')"
store_to_var = 'gremlin_res'
self.ip.run_cell_magic('gremlin', f'query --store-to {store_to_var}', query)
self.assertFalse('graph_notebook_error' in self.ip.user_ns)
gremlin_res = self.ip.user_ns[store_to_var]
# TODO: how can we get a look at the objects which were displayed?
self.assertEqual(gremlin_res[0].label, label)
``` |
{
"source": "JoyWambui/1minute",
"score": 3
} |
#### File: 1minute/app/models.py
```python
import re
from sqlalchemy.orm import backref, lazyload
from . import db
from werkzeug.security import generate_password_hash,check_password_hash
from flask_login import UserMixin
from . import login_manager
from datetime import datetime
class User(UserMixin,db.Model):
"""Class that defines a User Model and helps us create new users."""
__tablename__ = "users"
id = db.Column(db.Integer,primary_key = True)
username = db.Column(db.String(255))
email = db.Column(db.String(255),unique=True,index=True)
user_password = db.Column(db.String(255))
pitches = db.relationship("Pitch",backref="user",lazy="dynamic")
comments = db.relationship("Comment", backref="user",lazy="dynamic")
categories = db.relationship("Category", backref="user",lazy="dynamic")
@property
def password(self):
"""Defines a write only class property password"""
raise AttributeError("You cannot read the password attribute")
@password.setter
def password(self, password):
"""Method that takes in a password and hashes it."""
self.user_password = generate_password_hash(password)
def password_verification(self,password):
"""Method that checks whether a hashed password and user password that it hashes match."""
return check_password_hash(self.user_password,password)
def get_user_id(self):
return self.id
def __repr__(self):
return f'User {self.username}'
@login_manager.user_loader
def loads_user(user_id):
"""Method that queries the datbase and gets a user with the passed id."""
return User.query.get(int(user_id))
class Pitch(db.Model):
"""Class that defines a Pitch Model and creates new pitches."""
__tablename__ = "pitches"
id = id = db.Column(db.Integer,primary_key = True)
user_pitch = db.Column(db.String(800))
time_posted = db.Column(db.DateTime,default=datetime.now)
user_id = db.Column(db.Integer,db.ForeignKey("users.id"))
category_id = db.Column(db.Integer,db.ForeignKey("categories.id"))
comments = db.relationship("Comment",backref="pitch",lazy="dynamic")
def save_pitch(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_user_pitches(cls,id):
"""Returns all a user's pitches."""
user_pitches = Pitch.query.filter_by(user_id=id).all()
return user_pitches
@classmethod
def get_all_pitches(cls):
"""Returns all pitches."""
pitches = Pitch.query.all()
return pitches
class Category(db.Model):
"""Creates category instances and defines category methods."""
__tablename__="categories"
id = db.Column(db.Integer,primary_key = True)
name = db.Column(db.String(255))
user_id = db.Column(db.Integer,db.ForeignKey("users.id"))
pitches = db.relationship("Pitch",backref="category",lazy="dynamic")
def save_category(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_categories(cls):
"""Returns all categories."""
categories = Category.query.all()
return categories
class Comment(db.Model):
"""Class that defines a Pitch Model and creates new pitches."""
__tablename__ = "comments"
id = id = db.Column(db.Integer,primary_key = True)
author=db.Column(db.String(100))
comment = db.Column(db.String)
time_posted = db.Column(db.DateTime,default=datetime.now)
user_id = db.Column(db.Integer,db.ForeignKey("users.id"))
pitch_id = db.Column(db.Integer,db.ForeignKey("pitches.id"))
def save_comment(self):
db.session.add(self)
db.session.commit()
@classmethod
def get_pitch_comments(cls,id):
"""Returns all a pitch's comments."""
pitch_comments = Comment.query.filter_by(pitch_id=id).all()
return pitch_comments
``` |
{
"source": "JoyWambui/Developer-Awards",
"score": 2
} |
#### File: Developer-Awards/awards/views.py
```python
from audioop import avg
from django.http import Http404, HttpResponseRedirect
from django.shortcuts import redirect, render
from django.contrib.auth import login, authenticate
from django.urls import reverse, reverse_lazy
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from django.db.models import Avg
from django.views import generic
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.renderers import TemplateHTMLRenderer
from rest_framework import status,viewsets
from .serializer import ProfileSerializer,ProjectSerializer
from .forms import SignUpForm
from .permissions import IsAuthenticatedOrReadOnly
from .models import *
def signup(request):
'''View function that signs up a new user'''
if request.method == 'POST':
form = SignUpForm(request.POST)
if form.is_valid():
form.save()
username = form.cleaned_data.get('username')
raw_password = <PASSWORD>.cleaned_data.get('<PASSWORD>')
user = authenticate(username=username, password=<PASSWORD>)
login(request, user)
messages.success(request, "Congratulations! Your user account has been created.")
return redirect(reverse('homepage'))
else:
form = SignUpForm()
title = 'Create New Account'
context={
'title': title,
'form': form,
}
return render(request, 'registration/signup.html', context)
def homepage(request):
return render(request, 'homepage.html')
def search_results(request):
if 'project' in request.GET and request.GET["project"]:
search_term = request.GET.get("project")
project_list = Project.objects.filter(title__icontains=search_term)
return render(request, 'search_results.html', {"project_list":project_list})
else:
message = "You haven't searched for any term"
return render(request, 'search_results.html',{"message":message})
#PROFILE LOGIC
class ProfileListView(generic.ListView):
model=Profile
class ProfileDetailView(generic.DetailView):
model = Profile
def get_context_data(self, **kwargs):
context = super(ProfileDetailView, self).get_context_data(**kwargs)
context['projects'] = Project.objects.filter(author=self.object.user).all()
return context
class ProfileUpdateView(LoginRequiredMixin ,generic.UpdateView):
login_url='/login/'
model = Profile
fields = [
"profile_photo",
"bio",
"phone_number"
]
#PROJECT LOGIC
class ProjectCreateView(LoginRequiredMixin,generic.CreateView):
login_url='/login/'
model = Project
fields = ['image','title', 'description','link']
def get_success_url(self):
return reverse('profile', kwargs={'pk': self.object.author.id})
def form_valid(self, form):
form.instance.author = self.request.user
return super(ProjectCreateView, self).form_valid(form)
class ProjectListView(generic.ListView):
model=Project
def get_context_data(self,**kwargs):
context = super(ProjectListView,self).get_context_data(**kwargs)
return context
class ProjectDetailView(generic.DetailView):
model = Project
def get_context_data(self, **kwargs):
context = super(ProjectDetailView, self).get_context_data(**kwargs)
context['votes'] = Rate.objects.filter(rated_project=self.object.pk).all()
context['total_design'] = Rate.objects.filter(rated_project=self.object.pk).all().aggregate(Avg('design'))
context['total_usability'] = Rate.objects.filter(rated_project=self.object.pk).all().aggregate(Avg('usability'))
context['total_content'] = Rate.objects.filter(rated_project=self.object.pk).all().aggregate(Avg('content'))
context['total_score'] = Rate.objects.filter(rated_project=self.object.pk).all().aggregate(Avg('score'))
print(Rate.objects.filter(rated_project=self.object.pk).all().aggregate(Avg('score')))
return context
class ProjectUpdateView(LoginRequiredMixin,generic.UpdateView):
login_url='/login/'
model = Project
fields = [
"title",
"description",
"image",
"link"
]
def get_success_url(self):
return reverse('project', kwargs={'pk': self.object.pk})
class ProjectDeleteView(LoginRequiredMixin,generic.DeleteView):
login_url='/login/'
model = Project
def get_success_url(self):
return reverse('profile', kwargs={'pk': self.request.user.id})
#RATE LOGIC
class RateCreateView(LoginRequiredMixin,generic.CreateView):
login_url='/login/'
model = Rate
fields = ['design','usability', 'content']
def get_success_url(self):
return reverse('project', kwargs={'pk': self.object.rated_project.pk})
def form_valid(self, form):
form.instance.rated_project = Project.objects.get(id=self.kwargs.get('pk'))
form.instance.reviewer = self.request.user
form.instance.score = (form.instance.design+form.instance.usability+form.instance.content)/3
return super(RateCreateView, self).form_valid(form)
class RateUpdateView(LoginRequiredMixin,generic.UpdateView):
login_url='/login/'
model = Rate
fields = ['design','usability', 'content']
def get_success_url(self):
return reverse('project', kwargs={'pk': self.object.rated_project.pk})
def form_valid(self, form):
form.instance.score = (form.instance.design+form.instance.usability+form.instance.content)/3
return super(RateUpdateView, self).form_valid(form)
class RateDeleteView(LoginRequiredMixin,generic.DeleteView):
login_url='/login/'
model = Rate
def get_success_url(self):
return reverse('project', kwargs={'pk': self.object.rated_project.id})
#API LOGIC
class ProfileViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list` and `retrieve` actions.
"""
queryset = Profile.objects.all()
serializer_class = ProfileSerializer
permission_classes = [IsAuthenticatedOrReadOnly,]
def perform_create(self, serializer):
serializer.save(user=self.request.user)
class ProjectViewSet(viewsets.ModelViewSet):
"""
This viewset automatically provides `list` and `retrieve` actions.
"""
queryset = Project.objects.all()
serializer_class = ProjectSerializer
permission_classes = [IsAuthenticatedOrReadOnly,]
def perform_create(self, serializer):
serializer.save(author=self.request.user)
# class ProfileList(APIView):
# # renderer_classes = [TemplateHTMLRenderer]
# # template_name = 'profiles.html'
# def perform_create(self, serializer):
# serializer.save(user=self.request.user)
# def get(self, request, format=None):
# profiles = Profile.get_profiles()
# serializers = ProfileSerializer(profiles,many=True)
# return Response({'serializers':serializers.data,})#'profiles':profiles})
# def post(self, request, format=None):
# serializers = ProfileSerializer(data=request.data)
# if serializers.is_valid():
# serializers.save()
# return Response(serializers.data, status=status.HTTP_201_CREATED)
# return Response(serializers.errors, status=status.HTTP_400_BAD_REQUEST)
# class ProfileView(APIView):
# renderer_classes = [TemplateHTMLRenderer]
# template_name = 'profile.html'
# def get_single_profile(self,pk):
# try:
# return Profile.objects.filter(id=pk).get()
# except Profile.DoesNotExist:
# raise Http404
# def get(self, request, pk, format=None):
# user=request.user
# profile = get_object_or_404(Profile, pk=pk)
# serializers = ProfileSerializer(profile)
# print(serializers.data)
# return Response({'serializer':serializers.data,'profile':profile})
# def post(self, request, pk,format=None):
# user=request.user
# if pk: # the update request
# profile = get_object_or_404(Profile, id=pk)
# serializers = ProfileSerializer(profile, data=request.data)
# else: # the create request
# serializers = ProfileSerializer(data=request.data)
# if not serializers.is_valid():
# return Response({'serializers': serializers,'profile':profile})
# serializers.save()
# return redirect('profile', pk=profile.id)
# class ProjectList(APIView):
# # renderer_classes = [TemplateHTMLRenderer]
# # template_name = 'projects.html'
# def get(self, request, format=None):
# projects = Project.get_projects()
# serializers = ProjectSerializer(projects,many=True)
# return Response({'serializers':serializers.data,'projects':projects})
# def post(self, request, format=None):
# serializers = ProjectSerializer(data=request.data)
# if serializers.is_valid():
# serializers.save()
# return Response(serializers.data, status=status.HTTP_201_CREATED)
# return Response(serializers.errors, status=status.HTTP_400_BAD_REQUEST)
``` |
{
"source": "JoyWambui/habari-gani",
"score": 3
} |
#### File: habari-gani/app/models.py
```python
class Source:
"""Source class to define News Source Objects"""
def __init__(self,id,name,description,url,category,country):
self.id = id
self.name = name
self.description = description
self.url = url
self.category = category
self.country = country
class Article:
"""Source class to define Article Objects from a news source"""
def __init__(self,author,article_title,article_description,article_url,image_url,published):
self.author = author
self.article_title = article_title
self.article_description = article_description
self.article_url = article_url
self.image_url = image_url
self.published = published
``` |
{
"source": "JoyWambui/instaphotoz",
"score": 2
} |
#### File: instaphotoz/gallery/models.py
```python
from datetime import datetime
from operator import mod
from time import timezone
from django.db import models
from django.contrib.auth.models import User
from tinymce import models as tinymce_models
from cloudinary.models import CloudinaryField
from django.dispatch import receiver
from django.db.models.signals import post_save
# Create your models here.
class Profile(models.Model):
'''Model that defines a user profile and its methods'''
user = models.OneToOneField(User,related_name='profile', on_delete=models.CASCADE)
profile_photo = models.ImageField(upload_to='images/', blank=True, null=True)
bio = tinymce_models.HTMLField()
first_name = models.CharField(max_length=50,blank=True)
last_name = models.CharField(max_length=50,blank=True)
email = models.EmailField()
followers = models.ManyToManyField(User, blank=True, related_name='followers', symmetrical=False)
@receiver(post_save, sender=User)
def save_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
instance.profile.save()
# def get_user_profile()
class Image(models.Model):
'''Model that defines an image upload and its methods'''
image = models.ImageField(upload_to='images/')
image_name = models.CharField(max_length=30)
image_caption = models.TextField()
author = models.ForeignKey(User, on_delete=models.CASCADE, null=True)
likes = models.ManyToManyField(User, blank=True, related_name='likes')
def __str__(self):
return self.image_name
class Comment(models.Model):
'''Model that defines a comment '''
comment = tinymce_models.HTMLField()
comment_image = models.ForeignKey('Image', related_name='comment_image', on_delete=models.CASCADE )
comment_author = models.ForeignKey(User, related_name='comment_author', on_delete=models.CASCADE )
created = models.DateTimeField(default=datetime.now)
``` |
{
"source": "JoyWambui/Pichaz",
"score": 3
} |
#### File: Pichaz/gallery/tests.py
```python
from django.test import TestCase
from .models import Image, Location, Category
class ImageTestCase(TestCase):
'''Test Class to test the Image Model and its methods'''
def setUp(self):
'''Defines new instances'''
self.location = Location(location_name='nairobi')
self.location.save()
self.category = Category(category_name='food')
self.category.save()
self.image = Image(image_path='imagepath',image_name='spurs',description='a plate of ribs',location=self.location)
self.image.save()
self.image.categories.add(self.category)
def tearDown(self):
'''Clears the database after every test'''
Image.objects.all().delete()
Location.objects.all().delete()
Category.objects.all().delete()
def test_save_image(self):
'''Tests if an image is saved'''
self.image.save_image()
images = Image.objects.all()
self.assertTrue(len(images)==1)
def test_get_images(self):
'''Tests that all images are returned'''
self.image.save_image()
self.location1 = Location(location_name='thika')
self.location1.save()
self.category1 = Category(category_name='travel')
self.category1.save()
self.image1 = Image(image_path='imagepath1',image_name='pineapple',description='a pinapple farm',location=self.location1)
self.image1.save_image()
self.image1.categories.add(self.category1)
images = Image.get_images()
self.assertTrue(len(images)==2)
def test_get_single_image(self):
'''Retrieves an image instance from the database by id'''
self.image.save_image()
print('my id=',self.image.pk)
got_image= Image.get_single_image(4)
self.assertEquals(self.image,got_image)
# def test_update_image(self):
# '''Tests whether an image instance can be updated'''
# self.image.save_image()
# update = Image.update_image(5,image_name ='pineapples')
# print(self.image.image_name)
# #self.assertEquals(self.image.image_name,'pineapples')
def test_delete_image(self):
'''Tests whether an image instance is deleted'''
self.image.save_image()
print('eh',self.image.pk)
Image.delete_image(1)
images = Image.get_images()
self.assertTrue(len(images)==0)
def test_search_by_category(self):
'''Tests that a category's images are returned'''
self.image.save_image()
self.location1 = Location(location_name='thika')
self.location1.save()
self.category1 = Category(category_name='travel')
self.category1.save()
self.image1 = Image(image_path='imagepath1',image_name='pineapple',description='a pinapple farm',location=self.location1)
self.image1.save_image()
self.image1.categories.add(self.category1)
print('id=',self.category1.pk)
images = Image.search_by_category('travel')
self.assertTrue(len(images)>0)
def test_view_by_location(self):
'''Tests that all images with a certain location are returned'''
self.image.save_image()
print('iiid=',self.image.pk)
images = Image.view_by_location('nairobi')
self.assertTrue(len(images)>0)
class LocationTestCase(TestCase):
'''Tests the Location Model and its methods'''
def setUp(self):
'''Creates an instance of the Location Model'''
self.location = Location(location_name='nairobi')
def tearDown(self):
'''Clears the database after every test'''
Location.objects.all().delete()
def test_save_location(self):
'''Tests if a location is saved'''
self.location.save_location()
locations = Location.objects.all()
self.assertTrue(len(locations)==1)
def test_get_locations(self):
'''Tests that all location instances are returned'''
self.location.save_location()
self.location1 = Location(location_name='thika')
self.location1.save()
locations = Location.get_locations()
self.assertTrue(len(locations)==2)
# def test_update_location(self):
# '''Tests that a location can be updated'''
# self.location.save_location()
# print('loc=',self.location.pk)
# Location.update_location(9,'thika')
# self.assertEquals(self.location.location_name,'thika')
def test_delete_location(self):
'''Tests that a location instance is deleted'''
self.location.save_location()
print('loc=',self.location.pk)
Location.delete_location(9)
locations = Location.get_locations()
self.assertTrue(len(locations)==0)
class CategoryTestCase(TestCase):
'''Tests the Category Model and its methods'''
def setUp(self):
'''Creates an instance of the Category Model'''
self.category = Category(category_name='business')
def tearDown(self):
'''Clears the database after every test'''
Category.objects.all().delete()
def test_save_category(self):
'''Tests if a category is saved'''
self.category.save_category()
categories = Category.objects.all()
self.assertTrue(len(categories)==1)
def test_get_categories(self):
'''Tests that all category instances are returned'''
self.category.save_category()
self.category1 = Category(category_name='fitness')
self.category1.save()
categories = Category.get_categories()
self.assertTrue(len(categories)==2)
# def test_update_category(self):
# '''Tests that a category can be updated'''
# self.category.save_category()
# print('loc=',self.category.pk)
# Category.update_category(9,'thika')
# self.assertEquals(self.category.category_name,'thika')
def test_delete_category(self):
'''Tests that a category instance is deleted'''
self.category.save_category()
print('cat=',self.category.pk)
Category.delete_category(1)
categories = Category.get_categories()
self.assertTrue(len(categories)==0)
```
#### File: Pichaz/gallery/views.py
```python
from django.http import Http404
from django.shortcuts import render
from . models import Image,Category,Location
from django.contrib import messages
def index(request):
'''View function that displays uploaded photos'''
images = Image.get_images()
return render(request, 'index.html',{'images':images})
def search_categories(request):
'''View function that displays searched photos by category'''
if 'image' in request.GET and request.GET['image']:
search_term = request.GET.get("image")
searched_images = Image.search_by_category(search_term)
message = f"{search_term}"
return render(request, 'search.html',{"message":message,"images": searched_images})
else:
messages.add_message(request, messages.ERROR, "You haven't searched for any term.")
return render(request, 'search.html')
def locations(request):
'''View function that displays all locations'''
locations= Location.get_locations()
return render(request,'locations.html',{'locations':locations})
def location_images(request,id):
'''View function that displays all images based on a location'''
try:
location = Location.objects.get(id = id)
except Location.DoesNotExist:
raise Http404("Location does not exist")
images = Image.view_by_location(location.location_name)
return render(request,"location.html", {"images":images, 'location':location})
``` |
{
"source": "JoyWambui/write-a-way",
"score": 3
} |
#### File: app/auth/views.py
```python
from flask import render_template,redirect,url_for,flash,request
from flask_login import login_user,logout_user,login_required
from . import auth
from ..models import User
from .. import db
from .forms import RegistrationForm,LoginForm
@auth.route('/registration', methods=['GET','POST'])
def register():
registration_form= RegistrationForm()
if registration_form.validate_on_submit():
new_user = User(username=registration_form.sign_up_username.data,user_email=registration_form.sign_up_email.data,password=registration_form.sign_up_password.data)
db.session.add(new_user)
db.session.commit()
return redirect(url_for('auth.login'))
title= 'New Account'
return render_template('auth/register.html',registration_form=registration_form,title=title)
@auth.route('/login', methods=['GET','POST'])
def login():
login_form= LoginForm()
if login_form.validate_on_submit():
logged_in_user = User.query.filter_by(user_email=login_form.login_email.data).first()
if logged_in_user is not None and logged_in_user.verify_password_hash(login_form.login_password.data):
login_user(logged_in_user,login_form.remember_me.data)
return redirect(request.args.get('next')or url_for('main.index'))
flash('Invalid email or password!')
title= 'Writer Login'
return render_template('auth/login.html',login_form=login_form,title=title)
@auth.route('/logout')
@login_required
def logout():
logout_user()
return redirect(url_for('main.index'))
```
#### File: write-a-way/app/__init__.py
```python
from flask import Flask
from config import config_options
from flask_sqlalchemy import SQLAlchemy
from flask_bootstrap import Bootstrap
from flask_login import LoginManager
from flask_uploads import UploadSet,configure_uploads,IMAGES
from flask_simplemde import SimpleMDE
from flask_mail import Mail
#Creating extensions instances
db = SQLAlchemy()
bootstrap = Bootstrap()
images = UploadSet('images',IMAGES)
simple = SimpleMDE()
mail = Mail()
login_manager = LoginManager()
login_manager.session_protection='strong'
login_manager.login_view='auth.login'
def create_app(config_name):
"""Creates the application instance under different configurations."""
app = Flask(__name__)
#Creating app configurations
app.config.from_object(config_options[config_name])
configure_uploads(app,images)
#Initializing Flask extensions
db.init_app(app)
bootstrap.init_app(app)
login_manager.init_app(app)
simple.init_app(app)
mail.init_app(app)
#Registering Blueprints
from .main import main as main_blueprint
app.register_blueprint(main_blueprint)
from .auth import auth as auth_blueprint
app.register_blueprint(auth_blueprint,url_prefix='/authentication')
return app
```
#### File: write-a-way/app/models.py
```python
from . import db, login_manager
from datetime import datetime
from werkzeug.security import generate_password_hash,check_password_hash
from flask_login import UserMixin
@login_manager.user_loader
def load_user(user_id):
return User.query.get(user_id)
class User(UserMixin,db.Model):
"""Class that defines the User Model and its methods."""
__tablename__='users'
id = db.Column(db.Integer,primary_key=True)
username = db.Column(db.String(25), unique=True,nullable=False)
user_email = db.Column(db.String(120), unique=True,nullable=False)
user_password = db.Column(db.String(255), unique=True,nullable=False)
user_bio = db.Column(db.Text(),nullable=True)
user_account_image= db.Column(db.String(),nullable=True)
user_posts= db.relationship('Post',backref='author',lazy=True)
#Setting password and its hash
@property
def password(self):
raise AttributeError('This attribute cannot be accessed.')
@password.setter
def password(self,password):
self.user_password = generate_password_hash(password)
def verify_password_hash(self,password):
return check_password_hash(self.user_password,password)
def __repr__(self):
return f"User('{self.username}','{self.user_email}')"
class Post(db.Model):
"""Class that defines the Blog Post Model and its methods."""
__tablename__='posts'
id = db.Column(db.Integer,primary_key=True)
post_title= db.Column(db.String(60),nullable=False)
post_content= db.Column(db.Text,nullable=False)
post_creation = db.Column(db.DateTime,nullable=False,default=datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('users.id'),nullable=False)
post_comments= db.relationship('Comment', cascade='all,delete,delete-orphan',backref='comment',lazy=True)
def __repr__(self):
return f"Post('{self.post_title}','{self.post_creation}')"
def save_posts(self):
"""Saves new posts to the database."""
db.session.add(self)
db.session.commit()
@classmethod
def get_user_posts(self,id):
"""Gets all a user's posts."""
got_posts= Post.query.filter_by(user_id=id).all()
return got_posts
@classmethod
def get_all_posts(self):
"""Gets all posts."""
all_posts= Post.query.all()
return all_posts
class Comment(db.Model):
"""Class that defines the Comment Model and its methods."""
__tablename__='comments'
id = db.Column(db.Integer,primary_key=True)
comment_title= db.Column(db.String(60),nullable=False)
comment_author= db.Column(db.String(60),nullable=False)
comment_content= db.Column(db.Text,nullable=False)
comment_creation = db.Column(db.DateTime,nullable=False,default=datetime.utcnow)
post_id = db.Column(db.Integer, db.ForeignKey('posts.id', ondelete='CASCADE'),nullable=False)
def save_comment(self):
"""Saves new comments to the database."""
db.session.add(self)
try:
db.session.commit()
except:
db.session.rollback()
@classmethod
def get_post_comments(self,id):
"""Gets all a post's comments."""
got_comments= Comment.query.filter_by(post_id=id).all()
return got_comments
class Subscription(db.Model):
"""Class that defines the Subscription Model and its methods."""
__tablename__='subscriptions'
id = db.Column(db.Integer,primary_key=True)
name = db.Column(db.String(25), unique=True,nullable=False)
email = db.Column(db.String(120), unique=True,nullable=False)
class Quote:
"""Defines a quote object."""
def __init__(self,id,author,random_quote,permalink):
self.id = id
self.author = author
self.random_quote = random_quote
self.permalink = permalink
```
#### File: write-a-way/app/request.py
```python
from urllib import response
import urllib.request,json
from .models import Quote
def get_quotes():
random_quote_url= 'http://quotes.stormconsultancy.co.uk/random.json'
with urllib.request.urlopen(random_quote_url) as url:
get_quotes_data= url.read()
response = json.loads(get_quotes_data)
if response:
quote_response= process_response(response)
return quote_response
def process_response(response):
id = response.get('id')
author = response.get('author')
random_quote= response.get('quote')
permalink = response.get('permalink')
quote_object= Quote(id,author,random_quote,permalink)
return quote_object
```
#### File: write-a-way/tests/test_blog.py
```python
import unittest
from app.models import Post,User
from app import db
class TestPostModel(unittest.TestCase):
"""Tests the Post Model and its methods."""
def setUp(self):
"""Sets up a new user and post instance."""
self.new_user= User(username='rick',user_email='<EMAIL>',password='<PASSWORD>')
self.new_post=Post(post_title='rick and morty',post_content='adventures in space',author=self.new_user)
def tearDown(self):
"""Deletes all elements from the database after every test."""
Post.query.delete()
User.query.delete()
db.session.commit()
def test_check_instance_attributes(self):
"""Check if attribute values are correctly placed."""
self.assertEquals(self.new_post.post_title,'rick and morty')
self.assertEquals(self.new_post.post_content,'adventures in space')
self.assertEquals(self.new_post.author,self.new_user)
self.assertEquals(self.new_post.user_id,self.new_user.id)
def test_save_posts(self):
"""Check if a post is saved."""
self.new_post.save_posts()
self.assertTrue(len(Post.query.all())>0)
def test_get_user_posts(self):
"""Iest if a user's posts are returned."""
self.new_post.save_posts()
got_posts=Post.get_user_posts(self.new_post.user_id)
self.assertTrue(len(got_posts)==1)
def test_get_all_posts(self):
"""Iest if all posts are returned."""
self.new_post.save_posts()
second_user= User(username='morty',user_email='<EMAIL>',password='<PASSWORD>')
self.second_post=Post(post_title='morty and rick',post_content='space adventures',author=second_user)
self.second_post.save_posts()
all_posts=Post.get_all_posts()
self.assertTrue(len(all_posts)==2)
``` |
{
"source": "JoyXujingyan0830/A-unified-model-for-zero-shot-musical-source-separation-transcription-and-synthesis",
"score": 3
} |
#### File: dataset/urmp/urmp_generate_dataset.py
```python
import os
import sys
import random
import argparse
sys.path.insert(1, os.path.join(sys.path[0], '../..'))
from utils.utilities import (mkdir, write_lst)
random.seed(1234)
instr_tags = "vn,vc,va,fl,cl,sax,tpt,tbn,bn,hn,tba,db,ob"
instrs = "Violin,Cello,Viola,Flute,Clarinet,Saxophone,Trumpet,Trombone,Bassoon,Horn,Tuba,Double_Bass,Oboe"
tag2instr = {}
seen = "Violin,Cello,Viola,Flute,Clarinet,Saxophone,Trumpet,Trombone"
unseen = "Horn,Tuba,Double_Bass,Bassoon,Oboe"
skips = ""
instr_tags = instr_tags.split(',')
instrs = instrs.split(',')
seen = seen.split(',')
unseen = unseen.split(',')
skips = skips.split(',')
for i, tag in enumerate(instr_tags):
tag2instr[tag] = instrs[i]
def get_all_audios(folder):
audios = {}
tracks_num = 0
sample_folders = os.listdir(folder)
for sample in sample_folders:
sample_path = os.path.join(folder, sample)
tracks = os.listdir(sample_path)
if len(sample.split('_')) < 2:
continue
sampleName = sample.split('_')[1]
sample_instrs = sample.split('_')[2:]
if sampleName not in audios:
audios[sampleName] = {}
for track in tracks:
if not str.endswith(track, "ref.txt"):
continue
track = str.replace(track, "_ref.txt", ".h5")
#track = str.replace(track, "_TRAIN.h5", "_TEST.h5")
track_path = os.path.join(sample_path, track)
track_name = track.split("_")[1]
instr = tag2instr[track.split("_")[2]]
if instr not in audios[sampleName]:
audios[sampleName][instr] = {}
if track_name not in audios[sampleName][instr]:
tracks_num += 1
audios[sampleName][instr][track_name] = track_path
seen_audios = []
unseen_audios = []
for songName in audios:
for instr in audios[songName]:
if instr in seen:
seen_audios.append(songName)
else:
unseen_audios.append(songName)
train_lst = {}
test_lst = {}
for songName in audios:
if songName in unseen_audios:
instrs = {}
instrs_num = 0
for instr in audios[songName]:
if instr not in instrs:
instrs[instr] = []
for track in audios[songName][instr]:
instrs[instr].append(audios[songName][instr][track])
instrs_num += len(instrs[instr])
instrs = sorted(instrs.items(), key=lambda d: -len(d[1]))
show = [{instr[0]:len(instr[1])} for instr in instrs]
print(show)
data_lst = []
for instr in instrs:
if len(instr[1]) > instrs_num // 2:
print("aaaaaaaaaaaaaaaaaaaaaaaah")
for track in instr[1]:
data_lst.append([instr[0], track])
total = len(data_lst)
pairs = []
for i, track in enumerate(data_lst):
j = total - 1- i
if j == i:
j = 0
pairs.append([track[0], data_lst[j][0], track[1],data_lst[j][1]])
if i + 1 >= (total + 1)// 2:
break
test_lst[songName] = {"test" : pairs, "query" : []}
else:
for instr in audios[songName]:
if instr not in train_lst:
train_lst[instr] = []
for track in audios[songName][instr]:
train_lst[instr].append(str.replace(audios[songName][instr][track], "_TEST.h5", "h5"))
print("\nseen:\n")
compute_instr_samples(audios, songNames=None, skipNames=unseen_audios)
print("\nunseen:\n")
compute_instr_samples(audios, songNames=unseen_audios)
print("\nall:\n")
compute_instr_samples(audios)
query_lst = []
songs_lst = []
songs_num = len(test_lst)
for test in test_lst:
songs_lst.append(test)
for i, test in enumerate(test_lst):
for pair in test_lst[test]["test"]:
query = []
query += pair[:2]
for j in range(2):
path = None
while path is None:
song_id = random.randint(0, songs_num - 1)
if song_id == i:
continue
query_pairs = test_lst[songs_lst[song_id]]["test"]
for query_pair in query_pairs:
for k in range(2):
if query_pair[k] == pair[j] and not query_pair[k + 2] == pair[j + 2]:
path = query_pair[k + 2]
query.append(path)
break
if path is not None:
break
test_lst[test]["query"] += [query]
return audios, train_lst, test_lst
def compute_instr_samples(audios, songNames=None, skipNames=None):
samples = {}
num = 0
for songName in audios:
if songNames is not None and songName not in songNames:
continue
if skipNames is not None and songName in skipNames:
continue
for instr in audios[songName]:
if instr not in samples:
samples[instr] = 0
num += len(audios[songName][instr])
samples[instr] += len(audios[songName][instr])
total_num = 0
for instr in samples:
total_num += samples[instr]
print(instr, samples[instr])
print(total_num, num)
return samples
def save_train_lst(data, output_folder):
for instr in data:
instr_folder = os.path.join(output_folder, instr)
mkdir(instr_folder)
path = os.path.join(instr_folder, "train.lst")
write_lst(path, data[instr])
def save_test_lst(data, output_folder):
testset_folder = os.path.join(output_folder, "testset")
mkdir(testset_folder)
test_lst = []
query_lst = []
for songName in data:
test_lst += data[songName]["test"]
query_lst += data[songName]["query"]
test_lst = [f"{t[0]},{t[1]}\t{t[2]},{t[3]}" for t in test_lst]
query_lst = [f"{t[0]},{t[1]}\t{t[2]},{t[3]}" for t in query_lst]
print("test set", len(test_lst))
test_lst_path = os.path.join(testset_folder, "test.lst")
query_lst_path = os.path.join(testset_folder, "query.lst")
write_lst(test_lst_path, test_lst)
write_lst(query_lst_path, query_lst)
if __name__=="__main__":
parser = argparse.ArgumentParser(description='')
parser.add_argument('--feature_dir', type=str, required=True, help='Directory of generated dataset.')
parser.add_argument('--data_dir', type=str, required=True, help='Directory to store generated files.')
args = parser.parse_args()
folder = args.feature_dir
output_folder = args.data_dir
audios, train_lst, test_lst = get_all_audios(folder)
save_train_lst(train_lst, output_folder)
save_test_lst(test_lst, output_folder)
instr_samples = compute_instr_samples(audios)
```
#### File: src/utils/utilities.py
```python
import os
import sys
import time
import numpy as np
import configparser
import json
et = 1e-8
def load_json(path):
with open(path,'r') as load_f:
load_dict = json.load(load_f)
return load_dict
def save_json(path, data):
with open(path,'w') as f:
json.dump(data,f)
def print_dict(x):
for key in x:
print(key, x[key])
def factorized_fft(fft, onset_offset):
st = -1
curve_fft = np.zeros_like(fft)
mean_fft = np.zeros_like(fft)
for i in range(fft.shape[-1]):
if onset_offset[i] == 1 and st == -1:
st = i
elif not onset_offset[i] == 0:
if st == -1:
out_fft[i] = 0
mean_fft = fft[i]
else:
ave = np.mean(fft[st : i + 1])
std = np.std(fft[st : i + 1])
mean_fft[st : i + 1] = ave
curve_fft[st : i + 1] = (fft[st : i + 1] - ave) / (std + et)
if onset_offset[i] == 2:
st = -1
return curve_fft, mean_fft
def compute_time(event, pre_time):
cur_time = time.time()
print(f'{event} use', cur_time - pre_time)
return cur_time
def encode_mu_law(x, mu=256):
mu = mu - 1
fx = np.sign(x) * np.log(1 + mu * np.abs(x)) / np.log(1 + mu)
return np.floor((fx + 1) / 2 * mu + 0.5).astype(np.int64)
def decode_mu_law(y, mu=256):
mu = mu - 1
fx = (y - 0.5) / mu * 2 - 1
x = np.sign(fx) / mu * ((1 + mu) ** np.abs(fx) - 1)
return x
def read_config(config_path, name):
config = configparser.ConfigParser()
config.read(config_path)
return config[name]
def dict2str(dic, pre):
res = ''
for i, d in enumerate(dic):
if i == 0:
res += pre
res += d + ' :'
val = dic[d]
if type(val) is dict:
res += '\n' + dict2str(val, pre + '\t') + '\n'
else:
res += f'\t{val}\t'
return res
def save_score(path, score):
mkdir(path, is_file=True)
res = dict2str(score, '')
write_lst(path, [res])
return res
def get_process_groups(audio_num, process_num):
assert audio_num > 0 and process_num > 0
if process_num > audio_num:
process_num = audio_num
audio_num_per_process = (audio_num + process_num - 1) // process_num
reduce_id = process_num - (audio_num_per_process * process_num - audio_num)
groups = []
cur = 0
for i in range(process_num):
if i == reduce_id:
audio_num_per_process -= 1
groups += [[cur, cur + audio_num_per_process]]
cur += audio_num_per_process
return groups
def mkdir(fd, is_file=False):
fd = fd.split('/')
fd = fd[:-1] if is_file else fd
ds = []
for d in fd:
ds.append(d)
d = "/".join(ds)
if not d == "" and not os.path.exists(d):
os.makedirs(d)
def get_filename(path):
path = os.path.realpath(path)
na_ext = path.split('/')[-1]
na = os.path.splitext(na_ext)[0]
return na
def traverse_folder(folder):
paths = []
names = []
for root, dirs, files in os.walk(folder):
for name in files:
filepath = os.path.join(root, name)
names.append(name)
paths.append(filepath)
return names, paths
def note_to_freq(piano_note):
return 2 ** ((piano_note - 39) / 12) * 440
def create_logging(log_dir, filemode):
mkdir(log_dir)
i1 = 0
while os.path.isfile(os.path.join(log_dir, '{:04d}.log'.format(i1))):
i1 += 1
log_path = os.path.join(log_dir, '{:04d}.log'.format(i1))
logging.basicConfig(
level=logging.DEBUG,
format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
datefmt='%a, %d %b %Y %H:%M:%S',
filename=log_path,
filemode=filemode)
# Print to console
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
console.setFormatter(formatter)
logging.getLogger('').addHandler(console)
return logging
def float32_to_int16(x):
x = np.clip(x, -1, 1)
assert np.max(np.abs(x)) <= 1.
return (x * 32767.).astype(np.int16)
def int16_to_float32(x):
return (x / 32767.).astype(np.float32)
def pad_truncate_sequence(x, max_len):
if len(x) < max_len:
return np.concatenate((x, np.zeros(max_len - len(x))))
else:
return x[0 : max_len]
def read_lst(lst_path):
with open(lst_path) as f:
data = f.readlines()
data = [d.rstrip() for d in data]
return data
def write_lst(lst_path, lst):
lst = [str(l) for l in lst]
with open(lst_path, 'w') as f:
f.writelines('\n'.join(lst))
def freq2note(freq):
freq = float(freq)
note = round(12 * np.log2(freq / 440)) + 48
return note
def note2freq(note):
note = float(note)
freq = (2**((note - 48) / 12)) * 440
return freq
def parse_frameroll2annotation(frame_roll, frames_per_second=100, notes_num=88):
pre = notes_num
st = -1
est = []
preds = np.pad(frame_roll,(0,1), 'constant', constant_values=(0, notes_num))
for i in range(frame_roll.shape[0]):
if not frame_roll[i] == pre:
if st > -1 and not pre == notes_num:
est.append(\
'%f\t%f\t%d' % (st * 1.0 / frames_per_second, i * 1.0 / frames_per_second, pre))
st = i
pre = frame_roll[i]
return est
``` |
{
"source": "jozefbonnar/pydactyl",
"score": 3
} |
#### File: client/servers/backups.py
```python
from pydactyl.api import base
class Backups(base.PterodactylAPI):
"""Pterodactyl Client Server Backups API."""
def list_backups(self, server_id: str):
"""List files belonging to specified server.
Optionally specify a directory and only return results in the
specified directory. Directory is relative to the server's root.
Args:
server_id(str): Server identifier (abbreviated UUID)
"""
endpoint = 'client/servers/{}/files/list'.format(server_id)
response = self._api_request(endpoint=endpoint)
return response
def create_backup(self, server_id: str):
"""Create a new backup of the specified server.
Args:
server_id(str): Server identifier (abbreviated UUID)
"""
endpoint = 'client/servers/{}/backups'.format(server_id)
response = self._api_request(endpoint=endpoint, mode='POST')
return response
def get_backup_detail(self, server_id: str, backup_id: str):
"""Retrieves information about the specified backup.
Args:
server_id(str): Server identifier (abbreviated UUID)
backup_id(str): Backup identifier (long UUID)
"""
endpoint = 'client/servers/{}/backups/{}'.format(server_id, backup_id)
response = self._api_request(endpoint=endpoint)
return response
def get_backup_download(self, server_id: str, backup_id: str):
"""Generates a download link for the specified backup.
Args:
server_id(str): Server identifier (abbreviated UUID)
backup_id(str): Backup identifier (long UUID)
"""
endpoint = 'client/servers/{}/backups/{}/download'.format(server_id,
backup_id)
response = self._api_request(endpoint=endpoint)
return response
def delete_backup(self, server_id: str, backup_id: str):
"""Deletes the specified backup.
Args:
server_id(str): Server identifier (abbreviated UUID)
backup_id(str): Backup identifier (long UUID)
"""
endpoint = 'client/servers/{}/backups/{}'.format(server_id, backup_id)
response = self._api_request(endpoint=endpoint, mode='DELETE')
return response
```
#### File: client/servers/network.py
```python
from pydactyl.api import base
class Network(base.PterodactylAPI):
"""Pterodactyl Client Server Network API."""
def list_allocations(self, server_id: str):
"""Retrieves network information for the specified server.
Args:
server_id(str): Server identifier (abbreviated UUID)
"""
endpoint = 'client/servers/{}/network/allocations'.format(server_id)
response = self._api_request(endpoint=endpoint)
return response
def assign_allocation(self, server_id: str):
"""Assigns an allocation to the server.
Automatically assigns a new allocation if auto-assign is enabled on
the instance.
Args:
server_id(str): Server identifier (abbreviated UUID)
"""
endpoint = 'client/servers/{}/network/allocations'.format(server_id)
response = self._api_request(endpoint=endpoint, mode='POST')
return response
def set_allocation_note(self, server_id: str, allocation_id: int,
note: str):
"""Sets the note on an allocation.
Args:
server_id(str): Server identifier (abbreviated UUID)
allocation_id(int): Allocation identifier (e.g. 2)
note(str): Contents of the note
"""
data = {'notes': note}
endpoint = 'client/servers/{}/network/allocations/{}'.format(
server_id, allocation_id)
response = self._api_request(endpoint=endpoint, mode='POST', data=data)
return response
def set_primary_allocation(self, server_id: str, allocation_id: int):
"""Sets the specified allocation as the primary allocation.
Args:
server_id(str): Server identifier (abbreviated UUID)
allocation_id(int): Allocation identifier (e.g. 2)
"""
endpoint = 'client/servers/{}/network/allocations/{}/primary'.format(
server_id, allocation_id)
response = self._api_request(endpoint=endpoint, mode='POST')
return response
def unassign_allocation(self, server_id: str, allocation_id: int):
"""Deletes the specified non-primary allocation.
Args:
server_id(str): Server identifier (abbreviated UUID)
allocation_id(int): Allocation identifier (e.g. 2)
"""
endpoint = 'client/servers/{}/network/allocations/{}'.format(
server_id, allocation_id)
response = self._api_request(endpoint=endpoint, mode='DELETE')
return response
```
#### File: client/servers/startup.py
```python
from pydactyl.api import base
class Startup(base.PterodactylAPI):
"""Pterodactyl Client Server Startup API."""
def list_variables(self, server_id: str):
"""Lists all variables on the server.
Args:
server_id(str): Server identifier (abbreviated UUID)
"""
endpoint = 'client/servers/{}/startup'.format(server_id)
response = self._api_request(endpoint=endpoint)
return response
def update_variable(self, server_id: str, name: str, value: str):
"""Updates the specified server variable.
Args:
server_id(str): Server identifier (abbreviated UUID)
name(str): Variable name to update
value(str): Value to assign to the updated variable
"""
data = {'key': name, 'value': value}
endpoint = 'client/servers/{}/startup/variable'.format(server_id)
response = self._api_request(endpoint=endpoint, mode='PUT', data=data)
return response
```
#### File: client/servers/users.py
```python
from pydactyl.api import base
class Users(base.PterodactylAPI):
"""Pterodactyl Client Server Backups API."""
def list_users(self, server_id: str):
"""List all users added to the server.
Includes user details and permissions assigned to them.
Args:
server_id(str): Server identifier (abbreviated UUID)
"""
endpoint = 'client/servers/{}/users'.format(server_id)
response = self._api_request(endpoint=endpoint)
return response
def create_user(self, server_id: str, email: str, permissions: iter,
username: str = None):
"""Adds a new user to the server.
Args:
server_id(str): Server identifier (abbreviated UUID)
email(str): Email address of the new user
permissions(iter): List of permissions to assign to the user
username(str): Username to assign, randomized if not specified
"""
data = {'email': email, 'permissions': permissions}
if username:
data['username'] = username
endpoint = 'client/servers/{}/users'.format(server_id)
response = self._api_request(endpoint=endpoint, mode='POST', data=data)
return response
def get_user(self, server_id: str, user_id: str):
"""Retrieves details about the specified user.
Args:
server_id(str): Server identifier (abbreviated UUID)
user_id(str): User identifier (long UUID)
"""
endpoint = 'client/servers/{}/users/{}'.format(server_id, user_id)
response = self._api_request(endpoint=endpoint)
return response
def update_user(self, server_id: str, user_id: str, permissions):
"""Updates the specified user.
This probably has more options than the documentation list.
Args:
server_id(str): Server identifier (abbreviated UUID)
user_id(str): User identifier (long UUID)
permissions(iter): List of permissions to assign to the user
"""
data = {'permissions': permissions}
endpoint = 'client/servers/{}/users/{}'.format(server_id, user_id)
response = self._api_request(endpoint=endpoint, mode='POST', data=data)
return response
def delete_user(self, server_id: str, user_id: str):
"""Deletes the specified user.
Args:
server_id(str): Server identifier (abbreviated UUID)
user_id(str): User identifier (long UUID)
"""
endpoint = 'client/servers/{}/users/{}'.format(server_id, user_id)
response = self._api_request(endpoint=endpoint, mode='DELETE')
return response
```
#### File: tests/application/nests_test.py
```python
import unittest
try:
from unittest import mock
except ImportError:
import mock
from pydactyl import PterodactylClient
class NestsTests(unittest.TestCase):
def setUp(self):
self.client = PterodactylClient(url='dummy', api_key='dummy')
@mock.patch('pydactyl.api.base.PterodactylAPI._api_request')
def test_list_nests(self, mock_api):
expected = {
'endpoint': 'application/nests',
}
self.client.nests.list_nests()
mock_api.assert_called_with(**expected, params=None)
@mock.patch('pydactyl.api.base.PterodactylAPI._api_request')
def test_list_nests_with_includes(self, mock_api):
expected = {
'endpoint': 'application/nests',
}
self.client.nests.list_nests('eggs', 'servers')
mock_api.assert_called_with(
**expected, params={'include': 'eggs,servers'})
@mock.patch('pydactyl.api.base.PterodactylAPI._api_request')
def get_nest_info(self, mock_api):
expected = {
'endpoint': 'application/nests/11',
}
self.client.nests.get_nest_info(11)
mock_api.assert_called_with(**expected, params=None)
@mock.patch('pydactyl.api.base.PterodactylAPI._api_request')
def get_nest_info_with_include(self, mock_api):
expected = {
'endpoint': 'application/nests/11',
}
self.client.nests.get_nest_info(11, 'config')
mock_api.assert_called_with(**expected, params={'include': 'config'})
@mock.patch('pydactyl.api.base.PterodactylAPI._api_request')
def test_get_eggs_in_nest(self, mock_api):
expected = {
'endpoint': 'application/nests/22/eggs',
}
self.client.nests.get_eggs_in_nest(22)
mock_api.assert_called_with(**expected, params=None)
@mock.patch('pydactyl.api.base.PterodactylAPI._api_request')
def test_get_eggs_in_nest_with_includes(self, mock_api):
expected = {
'endpoint': 'application/nests/22/eggs',
}
self.client.nests.get_eggs_in_nest(22, 'nest', 'config')
mock_api.assert_called_with(
**expected, params={'include': 'nest,config'})
@mock.patch('pydactyl.api.base.PterodactylAPI._api_request')
def test_get_egg_info(self, mock_api):
expected = {
'endpoint': 'application/nests/33/eggs/44',
}
self.client.nests.get_egg_info(33, 44)
mock_api.assert_called_with(**expected, params=None)
@mock.patch('pydactyl.api.base.PterodactylAPI._api_request')
def test_get_egg_info_with_includes(self, mock_api):
expected = {
'endpoint': 'application/nests/33/eggs/44',
}
self.client.nests.get_egg_info(33, 44, 'servers', 'config')
mock_api.assert_called_with(
**expected, params={'include': 'servers,config'})
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "JozefGalbicka/websupportsk-ddns",
"score": 3
} |
#### File: websupportsk-ddns/websupportsk_ddns/notifiers.py
```python
import requests
import logging
logger = logging.getLogger(__name__)
def send_notifications(notifiers, message):
for notifier in notifiers:
notifier.send_notification(message)
class Pushover:
def __init__(self, api_token, user_key):
self.api_token = api_token
self.user_key = user_key
self.url = "https://api.pushover.net/1/messages.json"
def send_notification(self, text):
r = requests.post(self.url, data={
"token": self.api_token,
"user": self.user_key,
"message": text
})
logger.debug(f"Pushover notification response: {r.text}")
if "errors" in r.text:
logger.error(f"Pushover error occured: {r.text}")
class Gotify:
def __init__(self, url, api_token):
self.api_token = api_token
self.url = f"http://{url}/message?token={api_token}"
def send_notification(self, text):
r = requests.post(self.url, data={
"message": text
})
logger.debug(f"Gotify notification response: {r.text}")
if "error" in r.text:
logger.error(f"Gotify error occured: {r.text}")
``` |
{
"source": "jozefhruska/VUT-FIT-BIT",
"score": 3
} |
#### File: Project-2/modules/Instruction.py
```python
from modules.ErrorHelper import ErrorHelper
from modules.Argument import Argument, ArgumentType
class Instruction(ErrorHelper):
def __init__(self, instruction):
self.opcode = instruction.attrib['opcode'].upper()
self.reqArgCount = 0
self.reqArgTypes = []
self.order = instruction.attrib['order'].upper()
self.args = {}
def process(self):
# Convert order attrib to numeric value
try: self.order = int(self.order)
except Exception as e: self.fatalWithMessage(31, str(e))
# Resolve instruction's opcode
if self.opcode == 'MOVE':
self.reqArgCount = 2
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB
}
elif self.opcode == 'DEFVAR':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.VAR
}
elif self.opcode == 'CALL':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.LABEL
}
elif self.opcode == 'PUSHS':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.SYMB
}
elif self.opcode == 'POPS':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.VAR
}
elif self.opcode == 'ADD':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'SUB':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'MUL':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'IDIV':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'LT':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'GT':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'EQ':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'AND':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'OR':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'NOT':
self.reqArgCount = 2
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB
}
elif self.opcode == 'INT2CHAR':
self.reqArgCount = 2
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB
}
elif self.opcode == 'STRI2INT':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'READ':
self.reqArgCount = 2
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.TYPE
}
elif self.opcode == 'WRITE':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.SYMB
}
elif self.opcode == 'CONCAT':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'STRLEN':
self.reqArgCount = 2
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB
}
elif self.opcode == 'GETCHAR':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'SETCHAR':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'TYPE':
self.reqArgCount = 2
self.reqArgTypes = {
'arg1': ArgumentType.VAR,
'arg2': ArgumentType.SYMB
}
elif self.opcode == 'LABEL':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.LABEL
}
elif self.opcode == 'JUMP':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.LABEL
}
elif self.opcode == 'JUMPIFEQ':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.LABEL,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'JUMPIFNEQ':
self.reqArgCount = 3
self.reqArgTypes = {
'arg1': ArgumentType.LABEL,
'arg2': ArgumentType.SYMB,
'arg3': ArgumentType.SYMB
}
elif self.opcode == 'EXIT':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.SYMB
}
elif self.opcode == 'DPRINT':
self.reqArgCount = 1
self.reqArgTypes = {
'arg1': ArgumentType.SYMB
}
elif self.opcode in [ 'CREATEFRAME', 'PUSHFRAME', 'POPFRAME', 'RETURN', 'BREAK' ]:
# Opcodes with no arguments
pass
else: self.fatalWithMessage(32, 'Unknown instruction opcode \'%s\'.' % self.opcode)
def addArgument(self, argument):
argumentInstance = Argument(argument, self.reqArgTypes[argument.tag])
if argumentInstance != None: self.args.update({ argument.tag: argumentInstance})
def getOrder(self):
return self.order
```
#### File: features/steps/edit.py
```python
from behave import *
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
import urllib.parse as urlparse
import re
def getToken(browser):
parsed = urlparse.urlparse(browser.current_url)
return urlparse.parse_qs(parsed.query)['token'][0]
category = None
categoryCount = 0
imagesCount = 0
@given(u'a web browser is on a product edit page')
def step_impl(context):
context.browser.get("http://mys01.fit.vutbr.cz:8024/admin/index.php?route=catalog/product/edit&token=%s&product_id=41" % getToken(context.browser))
assert(u'Edit Product' == context.browser.find_element_by_css_selector("#content h3.panel-title").text)
@given(u'the "General" tab is selected in tab menu')
def step_impl(context):
context.browser.find_element_by_css_selector("#content .nav-tabs li:nth-child(1) a").click()
assert(u'General' == context.browser.find_element_by_css_selector("#content .nav-tabs li.active a").text)
@given(u'the user adds "42" to the "Product name" input')
def step_impl(context):
nameInput = context.browser.find_element_by_css_selector("#input-name1")
nameInput.send_keys("42")
@when(u'the user clicks on the "Save" button')
def step_impl(context):
context.browser.find_element_by_css_selector("#content .page-header button[type='submit']").click()
@then(u'product edit page is reloaded')
def step_impl(context):
assert(u'Product List' == context.browser.find_element_by_css_selector("#content h3.panel-title").text)
@then(u'the "Product name" value is "iMac42"')
def step_impl(context):
context.browser.get("http://mys01.fit.vutbr.cz:8024/admin/index.php?route=catalog/product/edit&token=%s&product_id=41" % getToken(context.browser))
assert(u'iMac42' == context.browser.find_element_by_css_selector("#input-name1").get_attribute("value"))
@given(u'the "Data" tab is selected in tab menu')
def step_impl(context):
context.browser.find_element_by_css_selector("#content .nav-tabs li:nth-child(2) a").click()
assert(u'Data' == context.browser.find_element_by_css_selector("#content .nav-tabs li.active a").text)
@when(u'the user hovers on SKU tooltip icon')
def step_impl(context):
ActionChains(context.browser).move_to_element(context.browser.find_element_by_css_selector("#tab-data > div:nth-child(2) > label > span")).perform()
@then(u'the Stock Keeping Unit is shown')
def step_impl(context):
assert(u'Stock Keeping Unit' == context.browser.find_element_by_css_selector("#tab-data > div:nth-child(2) > label > span").get_attribute('data-original-title'))
@when(u'the user hovers on UPC tooltip icon')
def step_impl(context):
ActionChains(context.browser).move_to_element(context.browser.find_element_by_css_selector("#tab-data > div:nth-child(3) > label > span")).perform()
@then(u'the Universal Product Code is shown')
def step_impl(context):
assert(u'Universal Product Code' == context.browser.find_element_by_css_selector("#tab-data > div:nth-child(3) > label > span").get_attribute('data-original-title'))
@when(u'the user hovers on EAN tooltip icon')
def step_impl(context):
ActionChains(context.browser).move_to_element(context.browser.find_element_by_css_selector("#tab-data > div:nth-child(4) > label > span")).perform()
@then(u'the European Article Number is shown')
def step_impl(context):
assert(u'European Article Number' == context.browser.find_element_by_css_selector("#tab-data > div:nth-child(4) > label > span").get_attribute('data-original-title'))
@when(u'the user hovers on JAN tooltip icon')
def step_impl(context):
ActionChains(context.browser).move_to_element(context.browser.find_element_by_css_selector("#tab-data > div:nth-child(5) > label > span")).perform()
@then(u'the Japanese Article Number is shown')
def step_impl(context):
assert(u'Japanese Article Number' == context.browser.find_element_by_css_selector("#tab-data > div:nth-child(5) > label > span").get_attribute('data-original-title'))
@when(u'the user hovers on ISBN tooltip icon')
def step_impl(context):
ActionChains(context.browser).move_to_element(context.browser.find_element_by_css_selector("#tab-data > div:nth-child(6) > label > span")).perform()
@then(u'the International Standard Book Number is shown')
def step_impl(context):
assert(u'International Standard Book Number' == context.browser.find_element_by_css_selector("#tab-data > div:nth-child(6) > label > span").get_attribute('data-original-title'))
@when(u'the user hovers on MPN tooltip icon')
def step_impl(context):
ActionChains(context.browser).move_to_element(context.browser.find_element_by_css_selector("#tab-data > div:nth-child(7) > label > span")).perform()
@then(u'the Manufacturer Part Number is shown')
def step_impl(context):
assert(u'Manufacturer Part Number' == context.browser.find_element_by_css_selector("#tab-data > div:nth-child(7) > label > span").get_attribute('data-original-title'))
@when(u'the user clicks on calendar icon for "Date Available" input')
def step_impl(context):
context.browser.find_element_by_css_selector("#tab-data > div:nth-child(17) > div > div > span > button").click()
@then(u'a calendar selector is shown below the input')
def step_impl(context):
classAttribute = context.browser.find_element_by_css_selector("body > div.bootstrap-datetimepicker-widget.dropdown-menu").get_attribute("class")
assert(re.search("picker-open", classAttribute) != False)
@given(u'an empty value is in "Model" input')
def step_impl(context):
modelInput = context.browser.find_element_by_css_selector("#input-model")
modelInput.clear()
assert(modelInput.text == '')
@then(u'a warning is displayed below the page title')
def step_impl(context):
context.browser.find_element_by_css_selector("#content > div.container-fluid > div.alert.alert-danger")
@then(u'an explanation is displayed below the "Model" input')
def step_impl(context):
context.browser.find_element_by_css_selector("#content .nav-tabs li:nth-child(2) a").click()
assert(context.browser.find_element_by_css_selector("#tab-data > div.form-group.required.has-error > div > div").text != None)
@given(u'the "Links" tab is selected in tab menu')
def step_impl(context):
context.browser.find_element_by_css_selector("#content .nav-tabs li:nth-child(3) a").click()
assert(u'Links' == context.browser.find_element_by_css_selector("#content .nav-tabs li.active a").text)
@given(u'products is in at least one category')
def step_impl(context):
childCategories = context.browser.find_elements_by_css_selector("#product-category div")
categoryCount = len(childCategories)
category = context.browser.find_element_by_css_selector("#product-category div:first-child")
assert(categoryCount > 0)
@when(u'the user clicks on the remove icon next to a category name')
def step_impl(context):
context.browser.find_element_by_css_selector("#product-category > div:first-child > i.fa-minus-circle").click()
@then(u'category dissapears from the list')
def step_impl(context):
childCategories = context.browser.find_elements_by_css_selector("#product-category div")
assert(len(childCategories) != categoryCount)
@then(u'the product is no longer in this category')
def step_impl(context):
childCategories = context.browser.find_elements_by_css_selector("#product-category > div")
for childCategory in childCategories:
assert(childCategory.text != category.text)
@given(u'the "Image" tab is selected in tab menu')
def step_impl(context):
context.browser.find_element_by_css_selector("#content .nav-tabs li:nth-child(9) a").click()
assert(u'Image' == context.browser.find_element_by_css_selector("#content .nav-tabs li.active a").text)
@given(u'products has at least one image')
def step_impl(context):
images = context.browser.find_elements_by_css_selector("#images > tbody > tr")
imagesCount = len(images)
assert(imagesCount > 0)
@when(u'the user clicks on the remove icon in the last table column')
def step_impl(context):
context.browser.find_element_by_css_selector("#images > tbody > tr:nth-child(1) > td:nth-child(3) > button").click()
@then(u'image dissapears from the list')
def step_impl(context):
images = context.browser.find_elements_by_css_selector("#images > tbody > tr")
assert(len(images) != imagesCount)
``` |
{
"source": "jozefizso/boxcutter-windows",
"score": 2
} |
#### File: boxcutter-windows/bin/tweak-json.py
```python
import json
import os
import re
import shutil
import sys
import time
winrm = True
ssh = False
keep_input_artifact = True
vmx_data_post = False
compression_level = 0
chocolatey = False
add_debugging = True
set_packer_debug = False
add_debug_log = True
add_unzip_vbs = False
add_shell_command = False
add_ssh_uninstaller = False
tools_upload_flavor = False
default_cm = 'nocm'
attach_provisions_iso = False
attach_windows_iso = True
attach_vboxguestadditions_iso = True
attach_shared_folder = False
if add_ssh_uninstaller:
add_debugging = False
add_debug_log = False
vmx_data_post = False
def touch(filename, mtime):
with open(filename, 'a+'):
pass
os.utime(filename, (mtime, mtime))
return 0
def touch_by_file(filename, touch_filename):
touch(filename, os.path.getmtime(touch_filename))
if len(sys.argv) < 2:
sys.exit('Usage: ' + sys.argv[0] + ' filename.json')
if len(sys.argv) >= 3:
winrm = True
vmx_data_post = True
json_file_path = sys.argv[1]
orig = json_file_path + '.orig'
print('Updating ' + json_file_path)
if not os.path.isfile(orig):
mtime = os.path.getmtime(json_file_path)
shutil.copyfile(json_file_path, orig)
touch(orig, mtime)
json_file = open(orig, 'rb')
json_data = json.load(json_file)
debug_cmd = 'floppy/zzz-debug-log.cmd'
save_logs_cmd = 'script/save-logs.cmd'
unzip_vbs = 'floppy/unzip.vbs'
wget_exe = '.windows/wget.exe'
download_cmd = 'floppy/_download.cmd'
packer_config_cmd = 'floppy/_packer_config.cmd'
packer_config_local_cmd = 'floppy/_packer_config_local.cmd'
shutdown_seconds = '10'
timeout_seconds = '10000'
if winrm:
winrm_suffix = '_winrm'
else:
winrm_suffix = ''
shutdown_comment = 'Packer Shutdown'
shutdown_command = 'shutdown /s /t %s /f /d p:4:1 /c "%s"' % (shutdown_seconds, shutdown_comment)
cwd = os.getcwd()
provisions_iso = cwd + '/.windows/provisions/provisions.iso'
windows_iso = 'C:/Program Files (x86)/VMware/VMware Workstation/windows.iso'
vboxguestadditions_iso = "C:/Progra~1/Oracle/VirtualBox/VBoxGuestAdditions.iso"
for i, a in enumerate(json_data['builders']):
if re.search('^(vmware|virtualbox)\-', a['type']):
del a['keep_failed_build']
#a['output_directory'] = 'output-%s_%s%s' % (a['type'], a['vm_name'], winrm_suffix)
#a['ssh_wait_timeout'] = timeout_seconds + 's'
#a['shutdown_timeout'] = timeout_seconds + 's'
#a['shutdown_command'] = shutdown_command
if add_ssh_uninstaller:
del a['shutdown_timeout']
#del a['shutdown_command']
#a['shutdown_command'] = 'choice /C Y /N /T %s /D Y /M "Waiting %s seconds"' % (timeout_seconds, timeout_seconds)
#a['http_directory'] = 'floppy'
floppy_files = dict.fromkeys(a['floppy_files'], True)
if add_debug_log:
if os.path.exists(debug_cmd):
floppy_files[debug_cmd] = True
if os.path.exists(download_cmd):
floppy_files[download_cmd] = True
if os.path.exists(packer_config_cmd):
floppy_files[packer_config_cmd] = True
if os.path.exists(packer_config_local_cmd):
floppy_files[packer_config_local_cmd] = True
if os.path.exists(wget_exe):
floppy_files[wget_exe] = True
if add_unzip_vbs:
if os.path.exists(unzip_vbs):
floppy_files[unzip_vbs] = True
if not ssh:
if 'floppy/cygwin.bat' in floppy_files:
del floppy_files['floppy/cygwin.bat']
if 'floppy/openssh.bat' in floppy_files:
del floppy_files['floppy/openssh.bat']
a['floppy_files'] = sorted(floppy_files)
if re.search('^vmware\-', a['type']):
# to turn off to see if Cygwin is failing because of this
if winrm or add_ssh_uninstaller:
# buggy with winrm
a['tools_upload_flavor'] = ''
# a['disk_type_id'] = "0"
# a['skip_compaction'] = compression_level == 0
if winrm:
a['communicator'] = 'winrm'
a['winrm_username'] = 'vagrant'
a['winrm_password'] = '<PASSWORD>'
a['winrm_timeout'] = timeout_seconds + 's'
if not tools_upload_flavor:
a['tools_upload_flavor'] = ''
if not 'vmx_data' in a:
a['vmx_data'] = {}
if attach_shared_folder:
a['vmx_data']['sharedFolder.maxNum'] = '1'
a['vmx_data']['sharedFolder0.enabled'] = 'TRUE'
a['vmx_data']['sharedFolder0.expiration'] = 'never'
a['vmx_data']['sharedFolder0.guestName'] = 'C'
a['vmx_data']['sharedFolder0.hostPath'] = 'C:\\'
a['vmx_data']['sharedFolder0.present'] = 'TRUE'
a['vmx_data']['sharedFolder0.readAccess'] = 'TRUE'
a['vmx_data']['sharedFolder0.writeAccess'] = 'TRUE'
a['vmx_data']['hgfs.maprootshare'] = 'TRUE'
a['vmx_data']['sound.autodetect'] = 'TRUE'
a['vmx_data']['sound.filename'] = '-1'
#a['vmx_data']['sound.pciSlotNumber'] = '32'
a['vmx_data']['sound.present'] = 'TRUE'
a['vmx_data']['sound.startconnected'] = 'TRUE'
a['vmx_data']['sound.virtualdev'] = 'hdaudio'
# a['vmx_data']['virtualhw.version'] = '10'
if attach_provisions_iso:
if os.path.exists(provisions_iso):
a['vmx_data']['ide1:1.deviceType'] = 'cdrom-image'
a['vmx_data']['ide1:1.fileName'] = provisions_iso
a['vmx_data']['ide1:1.present'] = 'TRUE'
a['vmx_data']['ide1:1.startConnected'] = 'TRUE'
if attach_windows_iso:
if os.path.exists(windows_iso):
a['vmx_data']['scsi0:1.present'] = 'TRUE'
a['vmx_data']['scsi0:1.deviceType'] = 'cdrom-image'
a['vmx_data']['scsi0:1.fileName'] = '{{ user `vmware_windows_iso` }}'
if vmx_data_post:
if not 'vmx_data_post' in a:
a['vmx_data_post'] = {}
a['vmx_data_post']['ethernet0.virtualDev'] = 'vmxnet3'
a['vmx_data_post']['RemoteDisplay.vnc.enabled'] = 'false'
a['vmx_data_post']['RemoteDisplay.vnc.port'] = '5900'
a['vmx_data_post']['scsi0.virtualDev'] = 'lsilogic'
if re.search('^virtualbox\-', a['type']):
if not 'vboxmanage' in a:
a['vboxmanage'] = []
if attach_provisions_iso:
if os.path.exists(provisions_iso):
a['vboxmanage'].append([
"storageattach",
"{{.Name}}",
"--storagectl",
"IDE Controller",
"--port",
"1",
"--device",
"1",
"--type",
"dvddrive",
"--medium",
provisions_iso
])
if attach_vboxguestadditions_iso:
if os.path.exists(vboxguestadditions_iso):
# a['guest_additions_url'] = vboxguestadditions_iso
a['vboxmanage'].append([
"storageattach",
"{{.Name}}",
"--storagectl",
"SATA",
"--port",
"1",
"--device",
"0",
"--type",
"dvddrive",
"--medium",
vboxguestadditions_iso
])
# builders: modify iso properties
a['iso_checksum'] = '{{ user `iso_checksum` }}'
a['iso_checksum_type'] = '{{ user `iso_checksum_type` }}'
a['iso_url'] = '{{ user `iso_url` }}/{{ user `iso_name` }}'
for i in json_data['post-processors']:
if i['type'] == 'vagrant':
i['keep_input_artifact'] = keep_input_artifact
i['compression_level'] = compression_level
#if winrm:
# i['output'] = 'winrm-' + i['output']
#if compression_level == 0:
# i['only'] = 'force-vagrant'
#else:
del i['only']
packer_debug_env = 'PACKER_DEBUG=1'
if add_shell_command:
env_vars = [
"CM={{user `cm`}}",
"CM_VERSION={{user `cm_version`}}",
]
if set_packer_debug:
env_vars.append(packer_debug_env)
debug_step = {
"environment_vars": env_vars,
"script": debug_cmd,
"type": "shell",
}
json_data['provisioners'].insert(0, debug_step)
for i, a in enumerate(json_data['provisioners']):
if a['type'] != 'windows-shell':
continue
if winrm:
# use winrm defaults
if 'remote_path' in a:
del a['remote_path']
if 'execute_command' in a:
del a['execute_command']
#a['guest_os_type'] = 'windows'
if 'inline' in a:
if winrm or add_ssh_uninstaller:
if re.search('^rm ', a['inline'][0]):
del json_data['provisioners'][i]
continue
#if winrm:
#a['binary'] = 'true'
if 'script' in a:
continue
if not 'scripts' in a:
continue
#if 'execute_command' in a:
# a['execute_command'] = re.sub(' /c ', ' /q /c ', a['execute_command'])
if set_packer_debug:
if 'environment_vars' in a:
packer_debug = False
for j in a['environment_vars']:
if j == packer_debug_env:
packer_debug = True
break
if not packer_debug:
a['environment_vars'].append(packer_debug_env)
scripts = []
if add_debugging:
if os.path.exists('script/dump-logs.cmd'):
scripts.append('script/dump-logs.cmd')
# don't need any more:
#scripts.append('script/01-install-handle.cmd')
for j in a['scripts']:
if j == 'script/clean.bat':
if add_debugging:
scripts.append('script/save-logs.cmd')
scripts.append('script/save-temp-dirs.cmd')
if chocolatey:
scripts.append('script/nuget.cmd')
#scripts.append('script/reboot.cmd')
scripts.append('script/chocolatey.cmd')
if compression_level == 0:
if j == 'script/clean.bat':
continue
if j == "script/ultradefrag.bat":
continue
if j == "script/uninstall-7zip.bat":
continue
if j == "script/sdelete.bat":
continue
#if not add_ssh_uninstaller:
scripts.append(j)
if add_debug_log:
scripts.append(debug_cmd)
if add_ssh_uninstaller:
if re.search('cygwin', json_file_path):
scripts.append('script/uninstall-cygwin.cmd')
else:
scripts.append('script/uninstall-openssh.cmd')
a['scripts'] = scripts
if 'variables' in json_data:
json_data['variables']['cm'] = default_cm
json_data['variables']['shutdown_command'] = shutdown_command
json_data['variables']['vmware_windows_iso'] = windows_iso
#json_data['variables']['iso_checksum_type'] = 'sha1'
#json_data['variables']['iso_name'] = json_data['variables']['iso_url']
#json_data['variables']['iso_url'] = 'iso'
new_data = json_data
mtime = os.path.getmtime(json_file_path)
new_data = json.dumps(new_data, sort_keys=True, indent=2, separators=(',', ': '))
json_file.close()
json_file = open(json_file_path, 'w')
json_file.write(new_data)
json_file.close()
touch(json_file_path, mtime)
``` |
{
"source": "jozef-matula/atomibox",
"score": 2
} |
#### File: jozef-matula/atomibox/atomibox.py
```python
import sys
import signal
import argparse
import time
import threading
import os
import os.path
import stat
import hashlib
#signal.signal(signal.SIGINT, signal.SIG_DFL)
def formatTimeStamp():
return time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
def logDebug(s):
sys.stderr.write(formatTimeStamp() + " " + s + "\n")
sys.stderr.flush()
def logError(s):
# TODO: do something more proper
logDebug("ERROR: " + s)
class ConfigurationLocation:
def __init__(self, s_baseDirectoryPath = None):
self.s_baseDirectoryPath = s_baseDirectoryPath
class Configuration:
def __init__(self):
self.a_locations = []
self.i_tcpPort = 8847
class FileChange:
def __init__(self):
pass
class FileChangeProdider:
def __init__(self):
pass
def getChanges(self):
return []
def mainUI(cfg):
from PyQt5 import QtCore
from PyQt5 import QtWidgets
from PyQt5 import QtGui
app = QtWidgets.QApplication(sys.argv)
w = QtWidgets.QWidget()
def onQuit():
QtCore.QCoreApplication.instance().quit()
class SystemTrayIcon(QtWidgets.QSystemTrayIcon):
def __init__(self, icon, parent=None):
QtWidgets.QSystemTrayIcon.__init__(self, icon, parent)
menu = QtWidgets.QMenu(parent)
exitAction = menu.addAction(QtGui.QIcon("resources/quit.ico"), "E&xit")
exitAction.triggered.connect(onQuit)
self.setContextMenu(menu)
trayIcon = SystemTrayIcon(QtGui.QIcon("resources/main.ico"), w)
trayIcon.show()
i_result = app.exec_()
trayIcon.hide()
del app
sys.exit(i_result)
class Atom:
def __init__(self):
# properties stored in database
self.i_id = None
self.i_parentId = None
self.s_name = None
self.f_lastModificationTimeStamp = None
self.s_contentHash = None
# runtime properties
self.s_localPath = None
def insertIntoDB(self, db):
qi = QtSql.QSqlQuery(db);
qi.prepare("INSERT INTO atoms(name, parentId, lastModification, contentSize, contentHash) VALUES(?, ?, ?, ?, ?)")
qi.bindValue(0, self.s_name)
qi.bindValue(1, self.i_parentId)
qi.bindValue(2, self.f_lastModificationTimeStamp)
qi.bindValue(3, self.i_contentSize if hasattr(self, 'i_contentSize') else -1)
qi.bindValue(4, self.s_contentHash)
if qi.exec_():
self.i_id = qi.lastInsertId()
qi.finish()
else:
logError("Failed to execute atom insert query: %s" % str(qi.lastError().text()))
def updateInDB(self, db):
qu = QtSql.QSqlQuery(db);
qu.prepare("UPDATE atoms SET name = ?, parentId = ?, lastModification = ?, contentSize = ?, contentHash = ? WHERE id = ?")
qu.bindValue(0, self.s_name)
qu.bindValue(1, self.i_parentId)
qu.bindValue(2, self.f_lastModificationTimeStamp)
qu.bindValue(3, self.i_contentSize if hasattr(self, 'i_contentSize') else -1)
qu.bindValue(4, self.s_contentHash)
qu.bindValue(5, self.i_id)
if qu.exec_():
qu.finish()
else:
logError("Failed to execute atom update query: %s" % str(qu.lastError().text()))
def removeFromDB(self, db):
def recursiveDelete(i_id):
q = QtSql.QSqlQuery(db)
q.prepare("SELECT id FROM atoms WHERE parentId = ?")
q.bindValue(0, i_id)
if q.exec_():
try:
while q.next():
r = q.record()
recursiveDelete(r.field(0).value())
finally:
q.finish()
else:
logError("Failed to execute atom cascade delete query: %s" % str(q.lastError().text()))
qd = QtSql.QSqlQuery(db);
qd.prepare("DELETE FROM atoms WHERE id = ?")
qd.bindValue(0, i_id)
if qd.exec_():
qd.finish()
else:
logError("Failed to execute atom delete query: %s" % str(qd.lastError().text()))
recursiveDelete(self.i_id)
@staticmethod
def initDBStructures(db):
q = QtSql.QSqlQuery(db)
logDebug("Creating table \"atoms\"")
if q.exec("""
CREATE TABLE atoms(
id INTEGER PRIMARY KEY AUTOINCREMENT,
parentId INTEGER,
name TEXT,
lastModification REAL,
contentSize INTEGER,
contentHash TEXT
);"""):
q.finish()
else:
logError("Failed to create table \"atoms\": %s" % str(q.lastError().text()))
logDebug("Creating index \"atomParents\"")
if(q.exec("CREATE INDEX atomParents ON atoms(parentId)")):
q.finish()
else:
logError("Failed to create index \"atomParents\": %s" % str(q.lastError().text()))
@staticmethod
def listAtomsFromDBForParent(db, i_parentId = None):
q = QtSql.QSqlQuery(db);
if i_parentId is not None:
q.prepare("SELECT * FROM atoms WHERE parentId = ?")
q.bindValue(0, i_parentId)
else:
q.prepare("SELECT * FROM atoms WHERE parentId IS NULL")
a_result = []
if q.exec_():
try:
while q.next():
r = q.record()
a_result.append(Atom._createAtomFromDBRecord(r))
finally:
q.finish()
return a_result
def createAtomFromDB(db, i_id = None):
q = QtSql.QSqlQuery(db);
if i_id is None:
return DirectoryAtom() # top directory
else:
q.prepare("SELECT * FROM atoms WHERE id = ?")
q.bindValue(0, i_id)
if q.exec_():
try:
if q.next():
return Atom._createAtomFromDBRecord(r)
finally:
q.finish()
return None
@staticmethod
def _createAtomFromDBRecord(r):
i_size = int(r.field("contentSize").value())
if i_size < 0:
atom = DirectoryAtom()
else:
atom = FileAtom()
atom.i_contentSize = i_size
atom.i_id = int(r.field("id").value())
atom.s_name = str(r.field("name").value())
v = r.field("parentId").value()
atom.i_parentId = int(v) if len(str(v)) > 0 else None
v = r.field("lastModification").value()
atom.f_lastModificationTimeStamp = float(v) if len(str(v)) > 0 else None
atom.s_contentHash = str(r.field("contentHash").value())
return atom
class DirectoryAtom(Atom):
def __init__(self):
Atom.__init__(self)
class FileAtom(Atom):
def __init__(self):
Atom.__init__(self)
self.i_contentSize = None
class FileChangeDiscoveryThread(threading.Thread):
class LocationData:
def __init__(self):
self.db = None
self.atom = None
def __init__(self, cfg):
threading.Thread.__init__(self)
self.cfg = cfg
self.lock = threading.Lock()
self.quitEvent = threading.Event()
self.d_locationToData = {}
logDebug("Available SQL drivers: %s" % str(QtSql.QSqlDatabase.drivers()))
for loc in cfg.a_locations:
logDebug("Opening database for %s" % loc.s_baseDirectoryPath)
db = QtSql.QSqlDatabase().addDatabase("QSQLITE", "db-conn-" + loc.s_baseDirectoryPath)
db.setDatabaseName(os.path.join(loc.s_baseDirectoryPath, ".atomibox.sqlite"));
if db.open():
#logDebug("Available database tables: %s" % str(db.tables()))
r = db.driver().record("atoms")
as_columnNames = [str(r.fieldName(i)) for i in range(0, r.count())]
#logDebug("Current columns: %s" % ", ".join(as_columnNames))
#if len(as_columnNames) and 'parent' not in as_columnNames:
# # do column upgrade if needed
if len(as_columnNames) == 0:
Atom.initDBStructures(db)
atom = DirectoryAtom()
atom.s_name = loc.s_baseDirectoryPath
atom.s_localPath = os.path.abspath(loc.s_baseDirectoryPath)
locationData = FileChangeDiscoveryThread.LocationData()
locationData.db = db
locationData.atom = atom
self.d_locationToData[loc.s_baseDirectoryPath] = locationData
else:
logError("Failed to open database: %s" % str(db.lastError().text()))
def __del__(self):
# make sure all databases are close when this object is deleted
for s, locationData in self.d_locationToData.items():
logDebug("Closing database for %s" % s)
locationData.db.close()
def run(self):
logDebug("FileChangeDiscoveryThread starts...")
i_counter = 1000
while not self.quitEvent.is_set(): # .wait(timeout)
logDebug("FileChangeDiscoveryThread loops...")
time.sleep(1)
if i_counter < 3:
i_counter += 1
continue
i_counter = 0
for loc in cfg.a_locations:
locationData = self.d_locationToData[loc.s_baseDirectoryPath]
self.scanDirectory(locationData.db, locationData.atom, 0)
# TODO: this code is here just for debugging
#q = QtSql.QSqlQuery(locationData.db);
#q.exec("SELECT * FROM atoms")
#while q.next():
# r = q.record()
# s = ", ".join([str(r.field(i).value()) for i in range(0, r.count())])
# logDebug("ROW %s" % s)
logDebug("FileChangeDiscoveryThread quits...")
def scanDirectory(self, db, directoryAtom, i_currentDepth):
if i_currentDepth == 0:
logDebug("Scanning %s" % directoryAtom.s_localPath)
# build list actual files and directories here
a_currentAtoms = []
for s_name in os.listdir(directoryAtom.s_localPath):
if s_name == "." or s_name == ".." or (i_currentDepth == 0 and s_name == ".atomibox.sqlite"):
continue
s_path = os.path.join(directoryAtom.s_localPath, s_name)
t_stat = os.stat(s_path)
# create temporary atom object
if stat.S_ISDIR(t_stat.st_mode):
atom = DirectoryAtom()
else:
atom = FileAtom()
atom.i_contentSize = t_stat.st_size
atom.s_name = s_name
atom.i_parentId = directoryAtom.i_id
atom.f_lastModificationTimeStamp = t_stat.st_mtime
atom.s_localPath = s_path
a_currentAtoms.append(atom)
d_nameToCurrentAtoms = {}
for currentAtom in a_currentAtoms:
d_nameToCurrentAtoms[currentAtom.s_name] = currentAtom
a_recordedAtoms = Atom.listAtomsFromDBForParent(db, directoryAtom.i_id)
d_nameToRecordedAtoms = {}
for recordedAtom in a_recordedAtoms:
d_nameToRecordedAtoms[recordedAtom.s_name] = recordedAtom
# now dive into subdirectories
for atom in a_currentAtoms:
if isinstance(atom, DirectoryAtom):
if atom.s_name in d_nameToRecordedAtoms:
atom = d_nameToRecordedAtoms[atom.s_name]
atom.s_localPath = os.path.join(directoryAtom.s_localPath, atom.s_name)
else:
# EVENT: new directory atom
atom.insertIntoDB(db)
logDebug("EVENT: detected new directory %s in #%s -> created #%d" % (
atom.s_name, str(directoryAtom.i_id), atom.i_id))
d_nameToRecordedAtoms[atom.s_name] = atom
assert atom.i_id is not None
assert atom.s_localPath is not None
self.scanDirectory(db, atom, i_currentDepth + 1)
for atom in a_currentAtoms:
recordedAtom = d_nameToRecordedAtoms[atom.s_name] if atom.s_name in d_nameToRecordedAtoms else None
#if recordedAtom is not None:
# logDebug("Record for %s FOUND in #%s as #%d" % (
# atom.s_name, str(directoryAtom.i_id), atom.i_id))
if isinstance(atom, FileAtom) and isinstance(recordedAtom, FileAtom):
# file record found
if isinstance(recordedAtom, FileAtom) and isinstance(atom, FileAtom):
if recordedAtom.i_contentSize != atom.i_contentSize \
or recordedAtom.f_lastModificationTimeStamp != atom.f_lastModificationTimeStamp:
atom.s_contentHash = self.hashFileContent(atom.s_localPath)
atom.i_id = recordedAtom.i_id
atom.updateInDB(db)
logDebug("EVENT: detected content modification in file %s (%s->%s) in #%s" % (
atom.s_name, recordedAtom.s_contentHash, atom.s_contentHash, str(directoryAtom.i_id)))
if recordedAtom is None:
assert isinstance(atom, FileAtom)
# EVENT: new file atom
atom.s_contentHash = self.hashFileContent(atom.s_localPath)
atom.insertIntoDB(db)
d_nameToRecordedAtoms[atom.s_name] = atom
logDebug("EVENT: detected new file %s (%s) in #%s -> created #%d" % (
atom.s_name, atom.s_contentHash, str(directoryAtom.i_id), atom.i_id))
for recordedAtom in a_recordedAtoms:
if recordedAtom.s_name not in d_nameToCurrentAtoms:
# EVENT: deleted atom
recordedAtom.removeFromDB(db)
logDebug("EVENT: detected removal of atom %s #%d from #%s" % (
recordedAtom.s_name, recordedAtom.i_id, str(directoryAtom.i_id)))
def stop(self):
self.quitEvent.set()
logDebug("FileChangeDiscoveryThread stop requested...")
self.join()
@staticmethod
def hashFileContent(s_filePath):
h = hashlib.sha1()
with open(s_filePath, "rb") as f:
data = f.read(1048576)
h.update(data)
return h.hexdigest()
class HTTPServerThread(threading.Thread):
def __init__(self, cfg):
threading.Thread.__init__(self)
self.cfg = cfg
self.httpd = None
self.quitEvent = threading.Event()
def run(self):
logDebug("HTTPServerThread starts...")
import http.server
class Handler(http.server.BaseHTTPRequestHandler):
def do_GET(self):
sys.stdout.flush()
try:
self.send_response(200)
self.send_header('Content-Type', 'text/html')
self.end_headers()
self.wfile.write(bytes('Hello', 'UTF-8'))
except Exception as e:
self.send_error(500, "Internal server error: " + str(e))
t_serverAddress = ('', self.cfg.i_tcpPort)
if not self.quitEvent.is_set():
self.httpd = http.server.HTTPServer(t_serverAddress, Handler)
logDebug("HTTPServerThread constructed HTTPServer object")
try:
self.httpd.serve_forever()
finally:
self.httpd.socket.close()
while not self.quitEvent.is_set(): # .wait(timeout)
logDebug("HTTPServerThread loops and waits for quit...")
time.sleep(1)
logDebug("HTTPServerThread finishes...")
def stop(self):
self.quitEvent.set()
if self.httpd is not None:
self.httpd.shutdown()
logDebug("HTTPServerThread stop requested...")
self.join()
def mainClient(cfg):
pass
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--service", action="store_true",
help="enables service mode (non-UI)")
parser.add_argument("-c", "--client", action="store_true",
help="enables client mode (non-UI)")
args = parser.parse_args()
cfg = Configuration()
#cfg.a_locations.append(ConfigurationLocation('/tmp'))
#cfg.a_locations.append(ConfigurationLocation('/utils'))
cfg.a_locations.append(ConfigurationLocation('/tmp2'))
if args.service:
from PyQt5 import QtCore
from PyQt5 import QtSql
app = QtCore.QCoreApplication(sys.argv)
discoveryThread = FileChangeDiscoveryThread(cfg)
discoveryThread.start()
httpdThread = HTTPServerThread(cfg)
httpdThread.start()
quitEvent = threading.Event()
def sigIntHandler(signal, frame):
logDebug("Termination requested")
quitEvent.set()
signal.signal(signal.SIGINT, sigIntHandler)
while not quitEvent.wait(1):
pass
httpdThread.stop()
discoveryThread.stop()
elif args.client:
mainClient(cfg)
else:
mainUI(cfg)
``` |
{
"source": "jozef-mokry/nmt",
"score": 2
} |
#### File: nmt/nematus/tf_model.py
```python
import tensorflow as tf
from tf_layers import *
import numpy
class Decoder(object):
def __init__(self, config, context, x_mask):
with tf.name_scope("next_word_predictor"):
self.predictor = Predictor(config)
with tf.name_scope("initial_state_constructor"):
context_sum = tf.reduce_sum(
context * tf.expand_dims(x_mask, axis=2),
axis=0)
context_mean = context_sum / tf.expand_dims(
tf.reduce_sum(x_mask, axis=0),
axis=1)
self.init_state_layer = FeedForwardLayer(
in_size=config.state_size * 2,
out_size=config.state_size)
self.init_state = self.init_state_layer.forward(context_mean)
self.translation_maxlen = config.translation_maxlen
self.embedding_size = config.embedding_size
self.state_size = config.state_size
self.target_vocab_size = config.target_vocab_size
with tf.name_scope("y_embeddings_layer"):
self.y_emb_layer = EmbeddingLayer(
vocabulary_size=config.target_vocab_size,
embedding_size=config.embedding_size)
if config.use_layer_norm:
GRUctor = GRUStepWithNormalization
else:
GRUctor = GRUStep
self.grustep1 = GRUctor(
input_size=config.embedding_size,
state_size=config.state_size)
self.attstep = AttentionStep(
context=context,
context_state_size=2*config.state_size,
context_mask=x_mask,
state_size=config.state_size,
hidden_size=2*config.state_size)
self.grustep2 = GRUctor(
input_size=2*config.state_size,
state_size=config.state_size,
nematus_compat=config.nematus_compat)
def sample(self):
batch_size = tf.shape(self.init_state)[0]
i = tf.constant(0)
init_ys = -tf.ones(dtype=tf.int32, shape=[batch_size])
init_embs = tf.zeros(dtype=tf.float32, shape=[batch_size,self.embedding_size])
ys_array = tf.TensorArray(
dtype=tf.int32,
size=self.translation_maxlen,
clear_after_read=True, #TODO: does this help? or will it only introduce bugs in the future?
name='y_sampled_array')
init_loop_vars = [i, self.init_state, init_ys, init_embs, ys_array]
def cond(i, states, prev_ys, prev_embs, ys_array):
return tf.logical_and(
tf.less(i, self.translation_maxlen),
tf.reduce_any(tf.not_equal(prev_ys, 0)))
def body(i, states, prev_ys, prev_embs, ys_array):
new_states1 = self.grustep1.forward(states, prev_embs)
att_ctx = self.attstep.forward(new_states1)
new_states2 = self.grustep2.forward(new_states1, att_ctx)
logits = self.predictor.get_logits(prev_embs, new_states2, att_ctx, multi_step=False)
new_ys = tf.multinomial(logits, num_samples=1)
new_ys = tf.cast(new_ys, dtype=tf.int32)
new_ys = tf.squeeze(new_ys, axis=1)
new_ys = tf.where(
tf.equal(prev_ys, tf.constant(0, dtype=tf.int32)),
tf.zeros_like(new_ys),
new_ys)
ys_array = ys_array.write(index=i, value=new_ys)
new_embs = self.y_emb_layer.forward(new_ys)
return i+1, new_states2, new_ys, new_embs, ys_array
final_loop_vars = tf.while_loop(
cond=cond,
body=body,
loop_vars=init_loop_vars,
back_prop=False)
i, _, _, _, ys_array = final_loop_vars
sampled_ys = ys_array.gather(tf.range(0, i))
return sampled_ys
def beam_search(self, beam_size):
"""
Strategy:
compute the log_probs - same as with sampling
for sentences that are ended set log_prob(<eos>)=0, log_prob(not eos)=-inf
add previous cost to log_probs
run top k -> (idxs, values)
use values as new costs
divide idxs by num_classes to get state_idxs
use gather to get new states
take the remainder of idxs after num_classes to get new_predicted words
"""
# Initialize loop variables
batch_size = tf.shape(self.init_state)[0]
i = tf.constant(0)
init_ys = -tf.ones(dtype=tf.int32, shape=[batch_size])
init_embs = tf.zeros(dtype=tf.float32, shape=[batch_size,self.embedding_size])
f_min = numpy.finfo(numpy.float32).min
init_cost = [0.] + [f_min]*(beam_size-1) # to force first top k are from first hypo only
init_cost = tf.constant(init_cost, dtype=tf.float32)
init_cost = tf.tile(init_cost, multiples=[batch_size/beam_size])
ys_array = tf.TensorArray(
dtype=tf.int32,
size=self.translation_maxlen,
clear_after_read=True,
name='y_sampled_array')
p_array = tf.TensorArray(
dtype=tf.int32,
size=self.translation_maxlen,
clear_after_read=True,
name='parent_idx_array')
init_loop_vars = [i, self.init_state, init_ys, init_embs, init_cost, ys_array, p_array]
# Prepare cost matrix for completed sentences -> Prob(EOS) = 1 and Prob(x) = 0
eos_log_probs = tf.constant(
[[0.] + ([f_min]*(self.target_vocab_size - 1))],
dtype=tf.float32)
eos_log_probs = tf.tile(eos_log_probs, multiples=[batch_size,1])
def cond(i, states, prev_ys, prev_embs, cost, ys_array, p_array):
return tf.logical_and(
tf.less(i, self.translation_maxlen),
tf.reduce_any(tf.not_equal(prev_ys, 0)))
def body(i, states, prev_ys, prev_embs, cost, ys_array, p_array):
#If ensemble decoding is necessary replace with for loop and do model[i].{grustep1,attstep,...}
new_states1 = self.grustep1.forward(states, prev_embs)
att_ctx = self.attstep.forward(new_states1)
new_states2 = self.grustep2.forward(new_states1, att_ctx)
logits = self.predictor.get_logits(prev_embs, new_states2, att_ctx, multi_step=False)
log_probs = tf.nn.log_softmax(logits) # shape (batch, vocab_size)
# set cost of EOS to zero for completed sentences so that they are in top k
# Need to make sure only EOS is selected because a completed sentence might
# kill ongoing sentences
log_probs = tf.where(tf.equal(prev_ys, 0), eos_log_probs, log_probs)
all_costs = log_probs + tf.expand_dims(cost, axis=1) # TODO: you might be getting NaNs here since -inf is in log_probs
all_costs = tf.reshape(all_costs, shape=[-1, self.target_vocab_size * beam_size])
values, indices = tf.nn.top_k(all_costs, k=beam_size) #the sorted option is by default True, is this needed?
new_cost = tf.reshape(values, shape=[batch_size])
offsets = tf.range(
start = 0,
delta = beam_size,
limit = batch_size,
dtype=tf.int32)
offsets = tf.expand_dims(offsets, axis=1)
survivor_idxs = (indices/self.target_vocab_size) + offsets
new_ys = indices % self.target_vocab_size
survivor_idxs = tf.reshape(survivor_idxs, shape=[batch_size])
new_ys = tf.reshape(new_ys, shape=[batch_size])
new_embs = self.y_emb_layer.forward(new_ys)
new_states = tf.gather(new_states2, indices=survivor_idxs)
new_cost = tf.where(tf.equal(new_ys, 0), tf.abs(new_cost), new_cost)
ys_array = ys_array.write(i, value=new_ys)
p_array = p_array.write(i, value=survivor_idxs)
return i+1, new_states, new_ys, new_embs, new_cost, ys_array, p_array
final_loop_vars = tf.while_loop(
cond=cond,
body=body,
loop_vars=init_loop_vars,
back_prop=False)
i, _, _, _, cost, ys_array, p_array = final_loop_vars
indices = tf.range(0, i)
sampled_ys = ys_array.gather(indices)
parents = p_array.gather(indices)
cost = tf.abs(cost) #to get negative-log-likelihood
return sampled_ys, parents, cost
def score(self, y):
with tf.name_scope("y_embeddings_layer"):
y_but_last = tf.slice(y, [0,0], [tf.shape(y)[0]-1, -1])
y_embs = self.y_emb_layer.forward(y_but_last)
y_embs = tf.pad(y_embs,
mode='CONSTANT',
paddings=[[1,0],[0,0],[0,0]]) # prepend zeros
init_attended_context = tf.zeros([tf.shape(self.init_state)[0], self.state_size*2])
init_state_att_ctx = (self.init_state, init_attended_context)
gates_x, proposal_x = self.grustep1.precompute_from_x(y_embs)
def step_fn(prev, x):
prev_state = prev[0]
prev_att_ctx = prev[1]
gates_x2d = x[0]
proposal_x2d = x[1]
state = self.grustep1.forward(
prev_state,
gates_x=gates_x2d,
proposal_x=proposal_x2d)
att_ctx = self.attstep.forward(state)
state = self.grustep2.forward(state, att_ctx)
#TODO: write att_ctx to tensorArray instead of having it as output of scan?
return (state, att_ctx)
states, attended_states = RecurrentLayer(
initial_state=init_state_att_ctx,
step_fn=step_fn).forward((gates_x, proposal_x))
logits = self.predictor.get_logits(y_embs, states, attended_states, multi_step=True)
return logits
class Predictor(object):
def __init__(self, config):
with tf.name_scope("prev_emb_to_hidden"):
self.prev_emb_to_hidden = FeedForwardLayer(
in_size=config.embedding_size,
out_size=config.embedding_size,
non_linearity=lambda y: y)
with tf.name_scope("state_to_hidden"):
self.state_to_hidden = FeedForwardLayer(
in_size=config.state_size,
out_size=config.embedding_size,
non_linearity=lambda y: y)
with tf.name_scope("attended_context_to_hidden"):
self.att_ctx_to_hidden = FeedForwardLayer(
in_size=2*config.state_size,
out_size=config.embedding_size,
non_linearity=lambda y: y)
with tf.name_scope("hidden_to_logits"):
self.hidden_to_logits = FeedForwardLayer(
in_size=config.embedding_size,
out_size=config.target_vocab_size,
non_linearity=lambda y: y)
def get_logits(self, y_embs, states, attended_states, multi_step=True):
with tf.name_scope("prev_emb_to_hidden"):
hidden_emb = self.prev_emb_to_hidden.forward(y_embs, input_is_3d=multi_step)
with tf.name_scope("state_to_hidden"):
hidden_state = self.state_to_hidden.forward(states, input_is_3d=multi_step)
with tf.name_scope("attended_context_to_hidden"):
hidden_att_ctx = self.att_ctx_to_hidden.forward(attended_states,input_is_3d=multi_step)
hidden = hidden_emb + hidden_state + hidden_att_ctx
hidden = tf.tanh(hidden)
with tf.name_scope("hidden_to_logits"):
logits = self.hidden_to_logits.forward(hidden, input_is_3d=multi_step)
return logits
class Encoder(object):
def __init__(self, config):
with tf.name_scope("embedding"):
self.emb_layer = EmbeddingLayer(
config.source_vocab_size,
config.embedding_size)
if config.use_layer_norm:
GRUctor = GRUStepWithNormalization
else:
GRUctor = GRUStep
with tf.name_scope("forwardEncoder"):
self.gru_forward = GRUctor(
input_size=config.embedding_size,
state_size=config.state_size)
with tf.name_scope("backwardEncoder"):
self.gru_backward = GRUctor(
input_size=config.embedding_size,
state_size=config.state_size)
self.state_size = config.state_size
def get_context(self, x, x_mask):
with tf.name_scope("embedding"):
embs = self.emb_layer.forward(x)
embs_reversed = tf.reverse(embs, axis=[0], name='reverse_embeddings')
batch_size = tf.shape(x)[1]
init_state = tf.zeros(shape=[batch_size, self.state_size], dtype=tf.float32)
with tf.name_scope("forwardEncoder"):
gates_x, proposal_x = self.gru_forward.precompute_from_x(embs)
def step_fn(prev_state, x):
gates_x2d, proposal_x2d = x
return self.gru_forward.forward(
prev_state,
gates_x=gates_x2d,
proposal_x=proposal_x2d)
states = RecurrentLayer(
initial_state=init_state,
step_fn = step_fn).forward((gates_x, proposal_x))
with tf.name_scope("backwardEncoder"):
gates_x, proposal_x = self.gru_backward.precompute_from_x(embs_reversed)
def step_fn(prev_state, x):
gates_x2d, proposal_x2d, mask = x
new_state = self.gru_backward.forward(
prev_state,
gates_x=gates_x2d,
proposal_x=proposal_x2d)
new_state *= mask # batch x 1
# first couple of states of reversed encoder should be zero
# this is why we need to multiply by mask
# this way, when the reversed encoder reaches actual words
# the state will be zeros and not some accumulated garbage
return new_state
x_mask_r = tf.reverse(x_mask, axis=[0])
x_mask_r = tf.expand_dims(x_mask_r, axis=[2]) #seqLen x batch x 1
states_reversed = RecurrentLayer(
initial_state=init_state,
step_fn = step_fn).forward((gates_x, proposal_x, x_mask_r))
states_reversed = tf.reverse(states_reversed, axis=[0])
concat_states = tf.concat([states, states_reversed], axis=2)
return concat_states
class StandardModel(object):
def __init__(self, config):
#variable dimensions
seqLen = None
batch_size = None
self.x = tf.placeholder(
dtype=tf.int32,
name='x',
shape=(seqLen, batch_size))
self.x_mask = tf.placeholder(
dtype=tf.float32,
name='x_mask',
shape=(seqLen, batch_size))
self.y = tf.placeholder(
dtype=tf.int32,
name='y',
shape=(seqLen, batch_size))
self.y_mask = tf.placeholder(
dtype=tf.float32,
name='y_mask',
shape=(seqLen, batch_size))
with tf.name_scope("encoder"):
self.encoder = Encoder(config)
ctx = self.encoder.get_context(self.x, self.x_mask)
with tf.name_scope("decoder"):
self.decoder = Decoder(config, ctx, self.x_mask)
self.logits = self.decoder.score(self.y)
with tf.name_scope("loss"):
self.loss_layer = Masked_cross_entropy_loss(self.y, self.y_mask)
self.loss_per_sentence = self.loss_layer.forward(self.logits)
self.mean_loss = tf.reduce_mean(self.loss_per_sentence, keep_dims=False)
#with tf.name_scope("optimizer"):
self.optimizer = tf.train.AdamOptimizer(learning_rate=config.learning_rate)
self.t = tf.Variable(0, name='time', trainable=False, dtype=tf.int32)
grad_vars = self.optimizer.compute_gradients(self.mean_loss)
grads, varss = zip(*grad_vars)
clipped_grads, global_norm = tf.clip_by_global_norm(grads, clip_norm=config.clip_c)
# Might be interesting to see how the global norm changes over time, attach a summary?
grad_vars = zip(clipped_grads, varss)
self.apply_grads = self.optimizer.apply_gradients(grad_vars, global_step=self.t)
self.sampled_ys = None
self.beam_size, self.beam_ys, self.parents, self.cost = None, None, None, None
def get_score_inputs(self):
return self.x, self.x_mask, self.y, self.y_mask
def get_loss(self):
return self.loss_per_sentence
def get_mean_loss(self):
return self.mean_loss
def get_global_step(self):
return self.t
def get_apply_grads(self):
return self.apply_grads
def _get_samples(self):
if self.sampled_ys == None:
self.sampled_ys = self.decoder.sample()
return self.sampled_ys
def sample(self, session, x_in, x_mask_in):
sampled_ys = self._get_samples()
feeds = {self.x : x_in, self.x_mask : x_mask_in}
sampled_ys_out = session.run(sampled_ys, feed_dict=feeds)
sampled_ys_out = sampled_ys_out.T
samples = []
for sample in sampled_ys_out:
sample = numpy.trim_zeros(list(sample), trim='b')
sample.append(0)
samples.append(sample)
return samples
def _get_beam_search_outputs(self, beam_size):
if beam_size != self.beam_size:
self.beam_size = beam_size
self.beam_ys, self.parents, self.cost = self.decoder.beam_search(beam_size)
return self.beam_ys, self.parents, self.cost
def beam_search(self, session, x_in, x_mask_in, beam_size):
# x_in, x_mask_in are numpy arrays with shape (seqLen, batch)
# change init_state, context, context_in_attention_layer
x_in = numpy.repeat(x_in, repeats=beam_size, axis=1)
x_mask_in = numpy.repeat(x_mask_in, repeats=beam_size, axis=1)
feeds = {self.x : x_in, self.x_mask : x_mask_in}
beam_ys, parents, cost = self._get_beam_search_outputs(beam_size)
beam_ys_out, parents_out, cost_out = session.run(
[beam_ys, parents, cost],
feed_dict=feeds)
hypotheses = self._reconstruct(beam_ys_out, parents_out, cost_out, beam_size)
return hypotheses
def _reconstruct(self, ys, parents, cost, beam_size):
#ys.shape = parents.shape = (seqLen, beam_size x batch_size)
# output: hypothesis list with shape (batch_size, beam_size, (sequence, cost))
def reconstruct_single(ys, parents, hypoId, hypo, pos):
if pos < 0:
hypo.reverse()
return hypo
else:
hypo.append(ys[pos, hypoId])
hypoId = parents[pos, hypoId]
return reconstruct_single(ys, parents, hypoId, hypo, pos - 1)
hypotheses = []
batch_size = ys.shape[1] / beam_size
pos = ys.shape[0] - 1
for batch in range(batch_size):
hypotheses.append([])
for beam in range(beam_size):
i = batch*beam_size + beam
hypo = reconstruct_single(ys, parents, i, [], pos)
hypo = numpy.trim_zeros(hypo, trim='b') # b for back
hypo.append(0)
hypotheses[batch].append((hypo, cost[i]))
return hypotheses
``` |
{
"source": "JozefStefanInstitute/ew-shopp-public",
"score": 2
} |
#### File: ew-shopp-public/keyword_clustering/categoriser.py
```python
import cluster_keywords as ck
import json
import csv
import re
import argparse
import pdb
from tqdm import tqdm
def main_categorise(args):
# load language model
ft_model_filename = args.path_model
print(f"Loading language model from: {ft_model_filename}")
model = ck.load_FT_model(ft_model_filename)
print("Loaded embeddings!")
# build embedder
embedder_parameters_filename = args.path_embedder_parameters
print(f"Loading embedder parameters from: {embedder_parameters_filename}")
de_embedder_parameters_json = open(embedder_parameters_filename).read()
de_embedder = ck.SIFEmbedder(model)
de_embedder.load(de_embedder_parameters_json)
print("Built embedder!")
# get categories
categories_filename = args.path_categories
print(f"Loading categories from: {categories_filename}")
categories = ck.load_csv_column(categories_filename, args.categories_column, delimiter=',')
category_ids = ck.load_csv_column(categories_filename, args.categories_id_column, delimiter=',')
print(f'Loaded {len(categories)} categories.')
# build categorizer
categorizer = ck.Categorizer(de_embedder)
categorizer.fit(categories, category_ids=category_ids)
print("Categorizer built!")
# get keywords
keyword_filename = args.path_keywords
print(f"Loading keywords from: {keyword_filename}")
keywords = ck.load_csv_column(
keyword_filename,
args.keywords_column,
delimiter = args.keywords_delimiter)
print(f'Loaded {len(keywords)} keywords.')
# assigning closest 'n_categories' to each keyword
if args.mode == "categorise_keywords":
n_categories = args.n_categories
keyword_categories = categorizer.categorize(keywords, n_categories=n_categories)
output_filename = args.path_output
print(f"Writing categories to: {output_filename}")
with open(output_filename, "w", encoding="utf8") as outfile:
outwriter = csv.writer(outfile, delimiter=",", quotechar='"')
# write header
out_header = ["keyword"]
for cat_i in range(1, n_categories + 1):
out_header.extend([f"category{cat_i}_id", f"category{cat_i}", f"category{cat_i}_distance"])
outwriter.writerow(out_header)
# write results row by row
for keyword, categories in zip(keywords, keyword_categories):
row = [f"{keyword}"]
for category, id, distance in categories:
row.extend([f"{id}", f"{category}", f"{distance}"])
outwriter.writerow(row)
# assigning closest 'n_keywords' to each category
elif args.mode == "relevance_to_category":
n_keywords = args.n_keywords
keyword_categories = categorizer.closest_keywords(keywords, n_keywords=n_keywords)
output_filename = args.path_output
print(f"Writing categories to: {output_filename}")
with open(output_filename, "w", encoding="utf8") as outfile:
outwriter = csv.writer(outfile, delimiter="\t", quotechar='"')
# write header
out_header = ["keyword", "categories"]
outwriter.writerow(out_header)
# write results row by row
for keyword, categories in tqdm(zip(keywords, keyword_categories), desc='Writting to file'):
row = [f"{keyword}"]
if len(categories) == 0:
row += ["none"]
else:
row += [",".join([f"{category}({id})" for category, id, distance in categories])]
outwriter.writerow(row)
print("DONE!")
if __name__ == '__main__':
# parse command line arguments
argparser = argparse.ArgumentParser(description='Tool for categorising keywords using FastText models.')
argparser.add_argument('mode', type=str, choices=['categorise_keywords', 'relevance_to_category'], help='Categorization mode.')
argparser.add_argument('path_model', type=str, help='Path to the FastText model binary file.')
argparser.add_argument('path_embedder_parameters', type=str, help='Path to the embedder parameters json file.')
argparser.add_argument('path_categories', type=str, help='Path to the categories file.')
argparser.add_argument('path_keywords', type=str, help='Path to the input keywords csv file.')
argparser.add_argument('path_output', type=str, help='Path to the output csv file.')
argparser.add_argument('--n_categories', type=int, default=3, help='Number of closest categories to return. (default: 3)')
argparser.add_argument('--n_keywords', type=int, default=1000, help='Number of closest keywords to return. (default: 1000)')
argparser.add_argument('--categories_delimiter', '-cd', type=str, default=',', help='Delimiter used in the categories csv file. (default: \',\')')
argparser.add_argument('--categories_column', '-cc', type=str, default='Category', help='Name of column containing categories in the categories csv file. (default: \'Category\')')
argparser.add_argument('--categories_id_column', '-cic', type=str, default='CategoryID', help='Name of column containing category ids in the categories csv file. (default: \'CategoryID\')')
argparser.add_argument('--keywords_delimiter', '-kd', type=str, default=',', help='Delimiter used in the keywords csv file. (default: \',\')')
argparser.add_argument('--keywords_column', '-kc', type=str, default='Keyword', help='Name of column containing keywords in the keywords csv file. (default: \'Keyword\')')
args = argparser.parse_args()
main_categorise(args)
``` |
{
"source": "JozeHladnik/voila",
"score": 2
} |
#### File: tests/app/syntax_error_test.py
```python
import pytest
@pytest.fixture
def voila_args(notebook_directory, voila_args_extra):
return ['--VoilaTest.root_dir=%r' % notebook_directory] + voila_args_extra
async def test_syntax_error(capsys, http_server_client, syntax_error_notebook_url):
response = await http_server_client.fetch(syntax_error_notebook_url)
assert response.code == 200
output = response.body.decode('utf-8')
assert 'There was an error when executing cell' in output
assert 'This should not be executed' not in output
``` |
{
"source": "Jozelito/Raster2TXT",
"score": 2
} |
#### File: Raster2TXT/import_scripts/gpcc2gcmon.py
```python
import sys
from osgeo import gdal, ogr, osr
from osgeo.gdalconst import GA_ReadOnly, GA_Update
# Funcion para sobreescribir el mensaje de porcentaje completado
def restart_line():
sys.stdout.write('\r')
sys.stdout.flush()
# Funcion principal
def gpcc2gcm_win(pg_connection_string, mes, agno):
# Registra drivers gdal
gdal.AllRegister()
# Driver de postgis, para poder crear la tabla
driver = ogr.GetDriverByName('PostgreSQL')
# Se asume que el srs es 4326
srs = osr.SpatialReference()
srs.ImportFromEPSG(4326)
# Leemos la ultima banda del dataset (ultimo mes)
fuente = "C:/Cualquier_directorio" + mes + "_" + agno + "/first_guess_monthly_" + agno + "_" + mes + ".nc"
dataset = gdal.Open( fuente, GA_ReadOnly )
subdatasets = dataset.GetSubDatasets()
subdataset_p = subdatasets[0][0]
sds = gdal.Open(subdataset_p, gdal.GA_ReadOnly)
cols = sds.RasterXSize
rows = sds.RasterYSize
# Creamos la tabla Postgis llamada "ghcn_t" para nuestros datos en el esquema public
table_name = 'gpcc_p_' + mes + agno
pg_ds = ogr.Open(pg_connection_string, GA_Update )
pg_layer = pg_ds.CreateLayer(table_name, srs = srs, geom_type=ogr.wkbPoint,
options = [
'GEOMETRY_NAME=geom',# Nombre del campo de geometria
'OVERWRITE=YES', # Borra y crea la tabla de nuevo
'SCHEMA=public',# Nombre del esquema
])
print 'Creada la tabla %s.' % table_name
# Creamos el campo "temp" y campo "id_point" en la tabla
fd_mes = ogr.FieldDefn('mes', ogr.OFTInteger)
pg_layer.CreateField(fd_mes)
print 'Creado el campo mes.'
fd_agno = ogr.FieldDefn('agno', ogr.OFTInteger)
pg_layer.CreateField(fd_agno)
print 'Creado el campo agno.'
fd_temp = ogr.FieldDefn('p_mes', ogr.OFTReal)
pg_layer.CreateField(fd_temp)
print 'Creado el campo p_mes.'
# get georeference transformation information
pixelWidth = 1
pixelHeight = 1
xOrigin = -179.5
yOrigin = 89.5
# Iteramos filas y columnas y definimos x e y
data = []
band = sds.GetRasterBand(1)
band_data = band.ReadAsArray(0, 0, cols, rows)
data.append(band_data)
id_n = 0
for r in range(rows):
y = yOrigin - (r * pixelHeight)
for c in range(cols):
x = xOrigin + (c * pixelWidth)
# Para cada celdilla, anadimos un punto en la capa Postgis
point_wkt = 'POINT(%s %s)' % (x, y)
point = ogr.CreateGeometryFromWkt(point_wkt)
featureDefn = pg_layer.GetLayerDefn()
feature = ogr.Feature(featureDefn)
# Definimos el valor y lo anadimos al campo "temp". Grados Kelvin (restamos 273)
value = float(data[0][r,c])
# Gestion de los null
if value < 0:
feature.UnsetField('p_mes')
else:
feature.SetField('p_mes', value)
feature.SetField('mes', mes)
feature.SetField('agno', agno)
id_n+=1
porcent = id_n * 100/ 64800
sys.stdout.write('porcentaje completado: ' + str(porcent))
sys.stdout.flush()
restart_line()
# print 'Guardando el valor: %s para la variable %s en el punto x: %s, y: %s' % (value, 'temp', x, y)
# Definimos la geometria de la capa y finalizamos su creacion
feature.SetGeometry(point)
pg_layer.CreateFeature(feature)
if __name__ == '__main__':
# El usuario tiene que definir al menos un parametro: la cadena de conexion Postgis GDAL
if len(sys.argv) < 4 or len(sys.argv) > 4:
print "uso: <GDAL PostGIS connection string> <mes> <agno>"
raise SystemExit
pg_connection_string = sys.argv[1]
mes = sys.argv[2]
agno = sys.argv[3]
gpcc2gcm_win(pg_connection_string, mes, agno)
raise SystemExit
``` |
{
"source": "jozhalaj/gateway",
"score": 3
} |
#### File: gateway/tools/ba-oscillation-detector.py
```python
import sys
import argparse
import json
import logging
import paho.mqtt.client as mqtt
CLIENT_NAME = 'OscillationDetector'
BROKER_ADDRESS = '127.0.0.1'
DEFAULT_TOPIC = 'BeeeOnOut'
PREFIX_BLUETOOTH = '0xa6'
MAX_SAME_MEASUREMENTS = 5
class DeviceData:
def __init__(self, deviceID, availability):
self._deviceID = deviceID
self._availability = availability
self._sameMeasurements = 1
def updateAvailability(self, availability):
if availability == 1 and self._availability == 0 and self._sameMeasurements >= MAX_SAME_MEASUREMENTS:
logging.info("device %s is available" % (self._deviceID))
if self._availability != availability:
if self._sameMeasurements in range(1, MAX_SAME_MEASUREMENTS):
logging.info("presence of the device %s is oscillating(change occurred after %s measurements)" %
(self._deviceID, self._sameMeasurements))
self._availability = availability
self._sameMeasurements = 1
else:
self._sameMeasurements += 1
if self._availability == 0 and self._sameMeasurements == MAX_SAME_MEASUREMENTS:
logging.info("device %s is unavailable" % (self._deviceID))
def on_message(client, devices, message):
data = json.loads(message.payload.decode('utf-8'))
deviceID = data["device_id"]
if deviceID[0:4] != PREFIX_BLUETOOTH:
return
availability = data["data"][0]["value"]
if deviceID not in devices.keys():
devices[deviceID] = DeviceData(deviceID, availability)
else:
devices[deviceID].updateAvailability(availability)
if __name__ == "__main__":
logging.basicConfig(level = logging.DEBUG)
parser = argparse.ArgumentParser(description=sys.modules[__name__].__doc__)
parser.add_argument("--address", metavar="BROKER_ADDRESS",
help="Broker address from which messages will be received.")
parser.add_argument("--topic", metavar="MQTT_TOPIC",
help="From this topic will be recieved bluetooth device reports.")
args = parser.parse_args()
address = args.address if args.address is not None else BROKER_ADDRESS
topic = args.topic if args.topic is not None else DEFAULT_TOPIC
devices = dict()
logging.info("creating MQTT client with name %s" % (CLIENT_NAME));
client = mqtt.Client(CLIENT_NAME)
client.user_data_set(devices)
client.on_message = on_message
logging.info("connecting to broker on address %s" % (address));
client.connect(address)
logging.info("subscribing to topic %s" % (topic));
client.subscribe(topic)
logging.info("starting to process messages");
try:
client.loop_forever()
except KeyboardInterrupt:
client.disconnect()
``` |
{
"source": "jozhang97/CenterNet2",
"score": 2
} |
#### File: centernet/data/video_dataset_dataloader.py
```python
import copy
import logging
import numpy as np
import operator
import torch
import torch.utils.data
import json
from detectron2.utils.comm import get_world_size
from torch.utils.data.sampler import BatchSampler, Sampler
from detectron2.data import samplers
from detectron2.data.common import DatasetFromList, MapDataset
from detectron2.data.dataset_mapper import DatasetMapper
from detectron2.data.build import get_detection_dataset_dicts, build_batch_data_loader
from detectron2.data.samplers import TrainingSampler, RepeatFactorTrainingSampler
from detectron2.data.samplers import InferenceSampler
from detectron2.data.build import worker_init_reset_seed, print_instances_class_histogram
from detectron2.data.build import filter_images_with_only_crowd_annotations
from detectron2.data.build import filter_images_with_few_keypoints
from detectron2.data.build import check_metadata_consistency
from detectron2.data.catalog import MetadataCatalog, DatasetCatalog
from detectron2.utils import comm
import itertools
import math
from collections import defaultdict
from typing import Optional
from .custom_dataset_dataloader import MultiDatasetSampler
from .video_dataset_loader import get_video_dataset_dicts
def single_batch_collator(batch):
"""
A batch collator that does nothing.
"""
assert len(batch) == 1
return batch[0]
def build_video_train_loader(cfg, mapper):
"""
Modified from detectron2.data.build.build_custom_train_loader, but supports
different samplers
"""
dataset_dicts = get_video_dataset_dicts(
cfg.DATASETS.TRAIN,
filter_empty=cfg.DATALOADER.FILTER_EMPTY_ANNOTATIONS,
gen_inst_id=True,
)
sizes = [0 for _ in range(len(cfg.DATASETS.TRAIN))]
for d in dataset_dicts:
sizes[d['dataset_source']] += 1
dataset = DatasetFromList(dataset_dicts, copy=False)
dataset = MapDataset(dataset, mapper)
sampler_name = cfg.DATALOADER.SAMPLER_TRAIN
logger = logging.getLogger(__name__)
logger.info("Using training videos {}".format(sampler_name))
if len(cfg.DATASETS.TRAIN) > 1:
assert sampler_name == 'MultiDatasetSampler'
if sampler_name == "TrainingSampler":
sampler = TrainingSampler(len(dataset))
elif sampler_name == "MultiDatasetSampler":
sampler = MultiDatasetSampler(cfg, sizes, dataset_dicts)
elif sampler_name == "RepeatFactorTrainingSampler":
repeat_factors = RepeatFactorTrainingSampler.repeat_factors_from_category_frequency(
dataset_dicts, cfg.DATALOADER.REPEAT_THRESHOLD
)
sampler = RepeatFactorTrainingSampler(repeat_factors)
else:
raise ValueError("Unknown training sampler: {}".format(sampler_name))
world_size = get_world_size()
batch_size = cfg.SOLVER.IMS_PER_BATCH // world_size
assert batch_size == 1
batch_sampler = torch.utils.data.sampler.BatchSampler(
sampler, batch_size, drop_last=True
)
return torch.utils.data.DataLoader(
dataset,
num_workers=cfg.DATALOADER.NUM_WORKERS,
batch_sampler=batch_sampler,
collate_fn=single_batch_collator,
worker_init_fn=worker_init_reset_seed,
)
def build_video_test_loader(cfg, dataset_name, mapper):
"""
"""
assert comm.is_main_process()
dataset = get_video_dataset_dicts(
[dataset_name],
filter_empty=False,
)
dataset = DatasetFromList(dataset, copy=False)
dataset = MapDataset(dataset, mapper)
sampler = SingleGPUInferenceSampler(len(dataset))
batch_sampler = torch.utils.data.sampler.BatchSampler(sampler, 1, drop_last=False)
data_loader = torch.utils.data.DataLoader(
dataset,
num_workers=0,
batch_sampler=batch_sampler,
collate_fn=single_batch_collator,
)
return data_loader
def split_video_dataset(dataset, test_len, stride):
assert stride == test_len
ret = []
print('Spliting {} videos into shorter clips'.format(len(dataset)))
for video in dataset:
video_len = len(video['images'])
st, ed = 0, 0
num_clips = (video_len - 1) // test_len + 1
for _ in range(num_clips):
ed = min(st + test_len, video_len)
clip = {
'video_id': video['video_id'],
'dataset_source': video['dataset_source'],
'images': copy.deepcopy(video['images'][st: ed]),
}
ret.append(clip)
st += stride
print('#clips', len(ret))
return ret
class SingleGPUInferenceSampler(Sampler):
def __init__(self, size: int):
"""
self._world_size = 1
self._rank = 0
"""
self._size = size
assert size > 0
# self._rank = comm.get_rank()
self._rank = 0
# self._world_size = comm.get_world_size()
self._world_size = 1
shard_size = (self._size - 1) // self._world_size + 1
begin = shard_size * self._rank
end = min(shard_size * (self._rank + 1), self._size)
self._local_indices = range(begin, end)
def __iter__(self):
yield from self._local_indices
def __len__(self):
return len(self._local_indices)
```
#### File: trackeval/metrics/hota.py
```python
import os
import numpy as np
from scipy.optimize import linear_sum_assignment
from ._base_metric import _BaseMetric
from .. import _timing
class HOTA(_BaseMetric):
"""Class which implements the HOTA metrics.
See: https://link.springer.com/article/10.1007/s11263-020-01375-2
"""
def __init__(self):
super().__init__()
self.plottable = True
self.array_labels = np.arange(0.05, 0.99, 0.05)
self.integer_array_fields = ['HOTA_TP', 'HOTA_FN', 'HOTA_FP']
self.float_array_fields = ['HOTA', 'DetA', 'AssA', 'DetRe', 'DetPr', 'AssRe', 'AssPr', 'LocA', 'RHOTA']
self.float_fields = ['HOTA(0)', 'LocA(0)', 'HOTALocA(0)']
self.fields = self.float_array_fields + self.integer_array_fields + self.float_fields
self.summary_fields = self.float_array_fields + self.float_fields
@_timing.time
def eval_sequence(self, data):
"""Calculates the HOTA metrics for one sequence"""
# Initialise results
res = {}
res_per_gt = {} # metric_name: alpha: gt_id
res_per_pr = {}
for field in self.float_array_fields + self.integer_array_fields:
res[field] = np.zeros((len(self.array_labels)), dtype=np.float)
res_per_gt[field] = np.zeros((len(self.array_labels), data['num_gt_ids']), dtype=np.float)
res_per_pr[field] = np.zeros((len(self.array_labels), data['num_tracker_ids']), dtype=np.float)
for field in self.float_fields:
res[field] = 0
# Return result quickly if tracker or gt sequence is empty
if data['num_tracker_dets'] == 0:
res['HOTA_FN'] = data['num_gt_dets'] * np.ones((len(self.array_labels)), dtype=np.float)
res['LocA'] = np.ones((len(self.array_labels)), dtype=np.float)
res['LocA(0)'] = 1.0
return res
if data['num_gt_dets'] == 0:
res['HOTA_FP'] = data['num_tracker_dets'] * np.ones((len(self.array_labels)), dtype=np.float)
res['LocA'] = np.ones((len(self.array_labels)), dtype=np.float)
res['LocA(0)'] = 1.0
return res
# Variables counting global association
potential_matches_count = np.zeros((data['num_gt_ids'], data['num_tracker_ids']))
gt_id_count = np.zeros((data['num_gt_ids'], 1))
tracker_id_count = np.zeros((1, data['num_tracker_ids']))
# First loop through each timestep and accumulate global track information.
for t, (gt_ids_t, tracker_ids_t) in enumerate(zip(data['gt_ids'], data['tracker_ids'])):
# Count the potential matches between ids in each timestep
# These are normalised, weighted by the match similarity.
similarity = data['similarity_scores'][t]
sim_iou_denom = similarity.sum(0)[np.newaxis, :] + similarity.sum(1)[:, np.newaxis] - similarity
sim_iou = np.zeros_like(similarity)
sim_iou_mask = sim_iou_denom > 0 + np.finfo('float').eps
sim_iou[sim_iou_mask] = similarity[sim_iou_mask] / sim_iou_denom[sim_iou_mask]
potential_matches_count[gt_ids_t[:, np.newaxis], tracker_ids_t[np.newaxis, :]] += sim_iou
# Calculate the total number of dets for each gt_id and tracker_id.
gt_id_count[gt_ids_t] += 1
tracker_id_count[0, tracker_ids_t] += 1
# Calculate overall jaccard alignment score (before unique matching) between IDs
global_alignment_score = potential_matches_count / (gt_id_count + tracker_id_count - potential_matches_count)
matches_counts = [np.zeros_like(potential_matches_count) for _ in self.array_labels]
# Calculate scores for each timestep
for t, (gt_ids_t, tracker_ids_t) in enumerate(zip(data['gt_ids'], data['tracker_ids'])):
# Deal with the case that there are no gt_det/tracker_det in a timestep.
if len(gt_ids_t) == 0:
for a, alpha in enumerate(self.array_labels):
res['HOTA_FP'][a] += len(tracker_ids_t)
res_per_pr['HOTA_FP'][:, tracker_ids_t] += 1
continue
if len(tracker_ids_t) == 0:
for a, alpha in enumerate(self.array_labels):
res['HOTA_FN'][a] += len(gt_ids_t)
res_per_gt['HOTA_FN'][:, gt_ids_t] += 1
continue
# Get matching scores between pairs of dets for optimizing HOTA
similarity = data['similarity_scores'][t]
score_mat = global_alignment_score[gt_ids_t[:, np.newaxis], tracker_ids_t[np.newaxis, :]] * similarity
# Hungarian algorithm to find best matches
match_rows, match_cols = linear_sum_assignment(-score_mat)
# Calculate and accumulate basic statistics
for a, alpha in enumerate(self.array_labels):
actually_matched_mask = similarity[match_rows, match_cols] >= alpha - np.finfo('float').eps
alpha_match_rows = match_rows[actually_matched_mask]
alpha_match_cols = match_cols[actually_matched_mask]
num_matches = len(alpha_match_rows)
res['HOTA_TP'][a] += num_matches
res['HOTA_FN'][a] += len(gt_ids_t) - num_matches
res['HOTA_FP'][a] += len(tracker_ids_t) - num_matches
if num_matches > 0:
res['LocA'][a] += sum(similarity[alpha_match_rows, alpha_match_cols])
matches_counts[a][gt_ids_t[alpha_match_rows], tracker_ids_t[alpha_match_cols]] += 1
n_tp, n_fn = 0, 0
for i, gt_id in enumerate(gt_ids_t):
if i in alpha_match_rows: # score_mat is gt x pr
res_per_gt['HOTA_TP'][a][gt_id] += 1
n_tp += 1
else:
res_per_gt['HOTA_FN'][a][gt_id] += 1
n_fn += 1
for i, pr_id in enumerate(tracker_ids_t):
if i in alpha_match_cols:
res_per_pr['HOTA_TP'][a][pr_id] += 1
else:
res_per_pr['HOTA_FP'][a][pr_id] += 1
if not (res_per_gt['HOTA_FN'].sum(1) == res['HOTA_FN']).all():
print((num_matches, n_tp), (len(gt_ids_t) - num_matches, n_fn))
print(res_per_gt['HOTA_FN'].sum(1), res['HOTA_FN'])
assert (res_per_gt['HOTA_FN'].sum(1) == res['HOTA_FN']).all(), f'{res_per_gt["HOTA_FN"].sum(1)} != {res["HOTA_FN"]}'
assert (res_per_gt['HOTA_TP'].sum(1) == res['HOTA_TP']).all(), f'{res_per_gt["HOTA_TP"].sum(1)} != {res["HOTA_TP"]}'
assert (res_per_pr['HOTA_TP'].sum(1) == res['HOTA_TP']).all(), f'{res_per_pr["HOTA_TP"].sum(1)} != {res["HOTA_TP"]}'
assert (res_per_pr['HOTA_FP'].sum(1) == res['HOTA_FP']).all(), f'{res_per_pr["HOTA_FP"].sum(1)} != {res["HOTA_FP"]}'
# Calculate association scores (AssA, AssRe, AssPr) for the alpha value.
# First calculate scores per gt_id/tracker_id combo and then average over the number of detections.
for a, alpha in enumerate(self.array_labels):
matches_count = matches_counts[a]
ass_a = matches_count / np.maximum(1, gt_id_count + tracker_id_count - matches_count) # ass_iou
# All pred mapped to the same GT track have the same Ass-IOU. AssA is Ass-IOU weighted by track length
res['AssA'][a] = np.sum(matches_count * ass_a) / np.maximum(1, res['HOTA_TP'][a])
res_per_gt['AssA'][a] = np.sum((matches_count * ass_a) / np.maximum(1, res_per_gt['HOTA_TP'][a][:, None]), axis=1)
res_per_pr['AssA'][a] = np.sum((matches_count * ass_a) / np.maximum(1, res_per_pr['HOTA_TP'][a][None]), axis=0)
ass_re = matches_count / np.maximum(1, gt_id_count)
res['AssRe'][a] = np.sum(matches_count * ass_re) / np.maximum(1, res['HOTA_TP'][a])
res_per_gt['AssRe'][a] = np.sum((matches_count * ass_re) / np.maximum(1, res_per_gt['HOTA_TP'][a][:, None]), axis=1)
res_per_pr['AssRe'][a] = np.sum((matches_count * ass_re) / np.maximum(1, res_per_pr['HOTA_TP'][a][None]), axis=0)
ass_pr = matches_count / np.maximum(1, tracker_id_count)
res['AssPr'][a] = np.sum(matches_count * ass_pr) / np.maximum(1, res['HOTA_TP'][a])
res_per_gt['AssPr'][a] = np.sum((matches_count * ass_pr) / np.maximum(1, res_per_gt['HOTA_TP'][a][:, None]), axis=1)
res_per_pr['AssPr'][a] = np.sum((matches_count * ass_pr) / np.maximum(1, res_per_pr['HOTA_TP'][a][None]), axis=0)
# Get AssA, AssRe, AssPr, DetPr, RHOTA per GT track. This is useful to determine failure modes of trackers.
res_per_gt['DetRe'] = res_per_gt['HOTA_TP'] / np.maximum(1, res_per_gt['HOTA_TP'] + res_per_gt['HOTA_FN'])
res_per_gt['RHOTA'] = np.sqrt(res_per_gt['DetRe'] * res_per_gt['AssA']) # good, OWTA - do not penalize FP
res_per_gt['DetRe_mean'] = res_per_gt['DetRe'].mean(0)
res_per_gt['AssA_mean'] = res_per_gt['AssA'].mean(0)
res_per_gt['AssRe_mean'] = res_per_gt['AssRe'].mean(0)
res_per_gt['AssPr_mean'] = res_per_gt['AssPr'].mean(0)
res_per_gt['RHOTA_mean'] = res_per_gt['RHOTA'].mean(0)
res.update({f'GT-{k}': v for k, v in res_per_gt.items()})
# per predicted track
res_per_pr['DetPr'] = res_per_pr['HOTA_TP'] / np.maximum(1, res_per_pr['HOTA_TP'] + res_per_pr['HOTA_FP'])
res_per_pr['DetPr_mean'] = res_per_pr['DetPr'].mean(0)
res_per_pr['AssA_mean'] = res_per_pr['AssA'].mean(0)
res_per_pr['AssRe_mean'] = res_per_pr['AssRe'].mean(0)
res_per_pr['AssPr_mean'] = res_per_pr['AssPr'].mean(0)
res.update({f'PR-{k}': v for k, v in res_per_pr.items()})
if 'raw_gt_ids' in data:
res['raw_gt_ids'] = data['raw_gt_ids']
res['raw_pr_ids'] = data['raw_pr_ids']
elif 'gt_track_ids' in data:
res['raw_gt_ids'] = data['gt_track_ids']
res['raw_pr_ids'] = data['dt_track_ids']
# Calculate final scores
res['LocA'] = np.maximum(1e-10, res['LocA']) / np.maximum(1e-10, res['HOTA_TP'])
res = self._compute_final_fields(res)
return res
def combine_sequences(self, all_res):
"""Combines metrics across all sequences"""
res = {}
for field in self.integer_array_fields:
res[field] = self._combine_sum(all_res, field)
for field in ['AssRe', 'AssPr', 'AssA']:
res[field] = self._combine_weighted_av(all_res, field, res, weight_field='HOTA_TP')
loca_weighted_sum = sum([all_res[k]['LocA'] * all_res[k]['HOTA_TP'] for k in all_res.keys()])
res['LocA'] = np.maximum(1e-10, loca_weighted_sum) / np.maximum(1e-10, res['HOTA_TP'])
res = self._compute_final_fields(res)
return res
def combine_classes_class_averaged(self, all_res):
"""Combines metrics across all classes by averaging over the class values"""
res = {}
for field in self.integer_array_fields:
res[field] = self._combine_sum(
{k: v for k, v in all_res.items()
if (v['HOTA_TP'] + v['HOTA_FN'] + v['HOTA_FP'] > 0 + np.finfo('float').eps).any()}, field)
for field in self.float_fields:
res[field] = np.mean([v[field] for v in all_res.values()
if (v['HOTA_TP'] + v['HOTA_FN'] + v['HOTA_FP'] > 0 + np.finfo('float').eps).any()],
axis=0)
for field in self.float_array_fields:
res[field] = np.mean([v[field] for v in all_res.values()
if (v['HOTA_TP'] + v['HOTA_FN'] + v['HOTA_FP'] > 0 + np.finfo('float').eps).any()],
axis=0)
return res
def combine_classes_det_averaged(self, all_res):
"""Combines metrics across all classes by averaging over the detection values"""
res = {}
for field in self.integer_array_fields:
res[field] = self._combine_sum(all_res, field)
for field in ['AssRe', 'AssPr', 'AssA']:
res[field] = self._combine_weighted_av(all_res, field, res, weight_field='HOTA_TP')
loca_weighted_sum = sum([all_res[k]['LocA'] * all_res[k]['HOTA_TP'] for k in all_res.keys()])
res['LocA'] = np.maximum(1e-10, loca_weighted_sum) / np.maximum(1e-10, res['HOTA_TP'])
res = self._compute_final_fields(res)
return res
@staticmethod
def _compute_final_fields(res):
"""Calculate sub-metric ('field') values which only depend on other sub-metric values.
This function is used both for both per-sequence calculation, and in combining values across sequences.
"""
res['DetRe'] = res['HOTA_TP'] / np.maximum(1, res['HOTA_TP'] + res['HOTA_FN'])
res['DetPr'] = res['HOTA_TP'] / np.maximum(1, res['HOTA_TP'] + res['HOTA_FP'])
res['DetA'] = res['HOTA_TP'] / np.maximum(1, res['HOTA_TP'] + res['HOTA_FN'] + res['HOTA_FP'])
res['HOTA'] = np.sqrt(res['DetA'] * res['AssA'])
res['RHOTA'] = np.sqrt(res['DetRe'] * res['AssA'])
res['HOTA(0)'] = res['HOTA'][0]
res['LocA(0)'] = res['LocA'][0]
res['HOTALocA(0)'] = res['HOTA(0)']*res['LocA(0)']
return res
def plot_single_tracker_results(self, table_res, tracker, cls, output_folder):
"""Create plot of results"""
# Only loaded when run to reduce minimum requirements
from matplotlib import pyplot as plt
res = table_res['COMBINED_SEQ']
styles_to_plot = ['r', 'b', 'g', 'b--', 'b:', 'g--', 'g:', 'm']
for name, style in zip(self.float_array_fields, styles_to_plot):
plt.plot(self.array_labels, res[name], style)
plt.xlabel('alpha')
plt.ylabel('score')
plt.title(tracker + ' - ' + cls)
plt.axis([0, 1, 0, 1])
legend = []
for name in self.float_array_fields:
legend += [name + ' (' + str(np.round(np.mean(res[name]), 2)) + ')']
plt.legend(legend, loc='lower left')
out_file = os.path.join(output_folder, cls + '_plot.pdf')
os.makedirs(os.path.dirname(out_file), exist_ok=True)
plt.savefig(out_file)
plt.savefig(out_file.replace('.pdf', '.png'))
plt.clf()
```
#### File: modeling/backbone/checkpoint.py
```python
import io
import os
import os.path as osp
import pkgutil
import time
import warnings
from collections import OrderedDict
from importlib import import_module
from tempfile import TemporaryDirectory
import torch
import torchvision
from torch.optim import Optimizer
from torch.utils import model_zoo
from torch.nn import functional as F
from detectron2.utils.comm import get_rank
def load_state_dict(module, state_dict, strict=False, logger=None):
"""Load state_dict to a module.
This method is modified from :meth:`torch.nn.Module.load_state_dict`.
Default value for ``strict`` is set to ``False`` and the message for
param mismatch will be shown even if strict is False.
Args:
module (Module): Module that receives the state_dict.
state_dict (OrderedDict): Weights.
strict (bool): whether to strictly enforce that the keys
in :attr:`state_dict` match the keys returned by this module's
:meth:`~torch.nn.Module.state_dict` function. Default: ``False``.
logger (:obj:`logging.Logger`, optional): Logger to log the error
message. If not specified, print function will be used.
"""
unexpected_keys = []
all_missing_keys = []
err_msg = []
metadata = getattr(state_dict, '_metadata', None)
state_dict = state_dict.copy()
if metadata is not None:
state_dict._metadata = metadata
# use _load_from_state_dict to enable checkpoint version control
def load(module, prefix=''):
# recursively check parallel module in case that the model has a
# complicated structure, e.g., nn.Module(nn.Module(DDP))
# if is_module_wrapper(module):
# module = module.module
local_metadata = {} if metadata is None else metadata.get(
prefix[:-1], {})
module._load_from_state_dict(state_dict, prefix, local_metadata, True,
all_missing_keys, unexpected_keys,
err_msg)
for name, child in module._modules.items():
if child is not None:
load(child, prefix + name + '.')
load(module)
load = None # break load->load reference cycle
# ignore "num_batches_tracked" of BN layers
missing_keys = [
key for key in all_missing_keys if 'num_batches_tracked' not in key
]
if unexpected_keys:
err_msg.append('unexpected key in source '
f'state_dict: {", ".join(unexpected_keys)}\n')
if missing_keys:
err_msg.append(
f'missing keys in source state_dict: {", ".join(missing_keys)}\n')
# rank, _ = get_dist_info()
rank = get_rank()
if len(err_msg) > 0 and rank == 0:
err_msg.insert(
0, 'The model and loaded state dict do not match exactly\n')
err_msg = '\n'.join(err_msg)
if strict:
raise RuntimeError(err_msg)
elif logger is not None:
logger.warning(err_msg)
else:
print(err_msg)
def load_url_dist(url, model_dir=None):
"""In distributed setting, this function only download checkpoint at local
rank 0."""
rank, world_size = get_dist_info()
rank = int(os.environ.get('LOCAL_RANK', rank))
if rank == 0:
checkpoint = model_zoo.load_url(url, model_dir=model_dir)
if world_size > 1:
torch.distributed.barrier()
if rank > 0:
checkpoint = model_zoo.load_url(url, model_dir=model_dir)
return checkpoint
def get_torchvision_models():
model_urls = dict()
for _, name, ispkg in pkgutil.walk_packages(torchvision.models.__path__):
if ispkg:
continue
_zoo = import_module(f'torchvision.models.{name}')
if hasattr(_zoo, 'model_urls'):
_urls = getattr(_zoo, 'model_urls')
model_urls.update(_urls)
return model_urls
def _load_checkpoint(filename, map_location=None):
"""Load checkpoint from somewhere (modelzoo, file, url).
Args:
filename (str): Accept local filepath, URL, ``torchvision://xxx``,
``open-mmlab://xxx``. Please refer to ``docs/model_zoo.md`` for
details.
map_location (str | None): Same as :func:`torch.load`. Default: None.
Returns:
dict | OrderedDict: The loaded checkpoint. It can be either an
OrderedDict storing model weights or a dict containing other
information, which depends on the checkpoint.
"""
if filename.startswith('modelzoo://'):
warnings.warn('The URL scheme of "modelzoo://" is deprecated, please '
'use "torchvision://" instead')
model_urls = get_torchvision_models()
model_name = filename[11:]
checkpoint = load_url_dist(model_urls[model_name])
elif filename.startswith('torchvision://'):
model_urls = get_torchvision_models()
model_name = filename[14:]
checkpoint = load_url_dist(model_urls[model_name])
elif filename.startswith(('http://', 'https://')):
checkpoint = load_url_dist(filename)
else:
if not osp.isfile(filename):
raise IOError(f'{filename} is not a checkpoint file')
checkpoint = torch.load(filename, map_location=map_location)
return checkpoint
def load_checkpoint(model,
filename,
map_location='cpu',
strict=False,
logger=None):
"""Load checkpoint from a file or URI.
Args:
model (Module): Module to load checkpoint.
filename (str): Accept local filepath, URL, ``torchvision://xxx``,
``open-mmlab://xxx``. Please refer to ``docs/model_zoo.md`` for
details.
map_location (str): Same as :func:`torch.load`.
strict (bool): Whether to allow different params for the model and
checkpoint.
logger (:mod:`logging.Logger` or None): The logger for error message.
Returns:
dict or OrderedDict: The loaded checkpoint.
"""
checkpoint = _load_checkpoint(filename, map_location)
# OrderedDict is a subclass of dict
if not isinstance(checkpoint, dict):
raise RuntimeError(
f'No state_dict found in checkpoint file {filename}')
# get state_dict from checkpoint
if 'state_dict' in checkpoint:
state_dict = checkpoint['state_dict']
elif 'model' in checkpoint:
state_dict = checkpoint['model']
else:
state_dict = checkpoint
# strip prefix of state_dict
if list(state_dict.keys())[0].startswith('module.'):
state_dict = {k[7:]: v for k, v in state_dict.items()}
# reshape absolute position embedding
if state_dict.get('absolute_pos_embed') is not None:
absolute_pos_embed = state_dict['absolute_pos_embed']
N1, L, C1 = absolute_pos_embed.size()
N2, C2, H, W = model.absolute_pos_embed.size()
if N1 != N2 or C1 != C2 or L != H*W:
if logger is not None:
logger.warning("Error in loading absolute_pos_embed, pass")
else:
print("Error in loading absolute_pos_embed, pass")
else:
state_dict['absolute_pos_embed'] = absolute_pos_embed.view(N2, H, W, C2).permute(0, 3, 1, 2)
# interpolate position bias table if needed
relative_position_bias_table_keys = [k for k in state_dict.keys() if "relative_position_bias_table" in k]
for table_key in relative_position_bias_table_keys:
table_pretrained = state_dict[table_key]
table_current = model.state_dict()[table_key]
L1, nH1 = table_pretrained.size()
L2, nH2 = table_current.size()
if nH1 != nH2:
if logger is not None:
logger.warning(f"Error in loading {table_key}, pass")
else:
print(f"Error in loading {table_key}, pass")
else:
if L1 != L2:
S1 = int(L1 ** 0.5)
S2 = int(L2 ** 0.5)
table_pretrained_resized = F.interpolate(
table_pretrained.permute(1, 0).view(1, nH1, S1, S1),
size=(S2, S2), mode='bicubic')
state_dict[table_key] = table_pretrained_resized.view(nH2, L2).permute(1, 0)
# load state_dict
load_state_dict(model, state_dict, strict, logger)
return checkpoint
``` |
{
"source": "jozhang97/CenterTrack",
"score": 2
} |
#### File: lib/utils/tracker.py
```python
import numpy as np
from sklearn.utils.linear_assignment_ import linear_assignment
from numba import jit
import copy
class Tracker(object):
# This gives us the tracks at one timestep, combined in a dict indexed by time
def __init__(self, opt):
self.opt = opt # keys: hungarian, public_det, new_thresh, max_age
self.reset()
def init_track(self, results):
'''
:param results first set of detections
initialize tracks for all detections with high scores
'''
for item in results:
if item['score'] > self.opt.new_thresh:
self.id_count += 1
# active and age are never used in the paper
item['active'] = 1
item['age'] = 1
item['tracking_id'] = self.id_count
if not ('ct' in item):
bbox = item['bbox']
item['ct'] = [(bbox[0] + bbox[2]) / 2, (bbox[1] + bbox[3]) / 2]
self.tracks.append(item)
def reset(self):
self.id_count = 0
self.tracks = []
def step(self, results, public_det=None):
'''
:param results list of detection (dict)
[{score, class, ct, tracking, bbox}]
:param public_det using known detections
:return ret list of detections with ID. Does not include track history
[{score, class, ct, tracking, bbox, tracking_id, age, active}]
Sketch
- Find the size, class, center for each object for all existing objects and all new detections
- Compute pairwise distances between existing and new objects
- Perform matching (hungarian or greedy)
- Update tracks with new matched detections, create new tracks for unmatched detections,
remove old tracks with no new detections
'''
N = len(results)
M = len(self.tracks)
# for all tracks, get sizes and classes and cts
track_size = np.array([((track['bbox'][2] - track['bbox'][0]) * (track['bbox'][3] - track['bbox'][1])) \
for track in self.tracks], np.float32) # M
track_cat = np.array([track['class'] for track in self.tracks], np.int32) # M
tracks = np.array([pre_det['ct'] for pre_det in self.tracks], np.float32) # M x 2
# for all results, get sizes and classes, apply offset
dets = np.array([det['ct'] + det['tracking'] for det in results], np.float32) # N x 2
item_size = np.array([((item['bbox'][2] - item['bbox'][0]) * (item['bbox'][3] - item['bbox'][1])) \
for item in results], np.float32) # N
item_cat = np.array([item['class'] for item in results], np.int32) # N
# compute pairwise image distances, apply large dist if invalid (not same class, not close enough)
dist = (((tracks.reshape(1, -1, 2) - dets.reshape(-1, 1, 2)) ** 2).sum(axis=2)) # N x M
invalid = ((dist > track_size.reshape(1, M)) + (dist > item_size.reshape(N, 1)) + \
(item_cat.reshape(N, 1) != track_cat.reshape(1, M))) > 0
dist = dist + invalid * 1e18
# match bipartite graph
if self.opt.hungarian:
item_score = np.array([item['score'] for item in results], np.float32) # N
dist[dist > 1e18] = 1e18
matched_indices = linear_assignment(dist)
else:
matched_indices = greedy_assignment(copy.deepcopy(dist))
# tracks that disappeared or just started
unmatched_dets = [d for d in range(dets.shape[0]) if not (d in matched_indices[:, 0])]
unmatched_tracks = [d for d in range(tracks.shape[0]) if not (d in matched_indices[:, 1])]
# postprocess hungarian
if self.opt.hungarian:
matches = []
for m in matched_indices:
if dist[m[0], m[1]] > 1e16:
unmatched_dets.append(m[0])
unmatched_tracks.append(m[1])
else:
matches.append(m)
matches = np.array(matches).reshape(-1, 2)
else:
matches = matched_indices
# update existing tracks that found matching detections
ret = []
for m in matches:
track = results[m[0]]
track['tracking_id'] = self.tracks[m[1]]['tracking_id']
track['age'] = 1
track['active'] = self.tracks[m[1]]['active'] + 1
ret.append(track)
# handle new objects
if self.opt.public_det and len(unmatched_dets) > 0:
# Public detection: only create tracks from provided detections
pub_dets = np.array([d['ct'] for d in public_det], np.float32)
dist3 = ((dets.reshape(-1, 1, 2) - pub_dets.reshape(1, -1, 2)) ** 2).sum(axis=2)
matched_dets = [d for d in range(dets.shape[0]) if not (d in unmatched_dets)]
dist3[matched_dets] = 1e18
for j in range(len(pub_dets)):
i = dist3[:, j].argmin() # pick the closest object
if dist3[i, j] < item_size[i]:
dist3[i, :] = 1e18
track = results[i]
if track['score'] > self.opt.new_thresh:
self.id_count += 1
track['tracking_id'] = self.id_count
track['age'] = 1
track['active'] = 1
ret.append(track)
else:
# Private detection: create tracks for all un-matched detections
for i in unmatched_dets:
track = results[i]
if track['score'] > self.opt.new_thresh:
self.id_count += 1
track['tracking_id'] = self.id_count
track['age'] = 1
track['active'] = 1
ret.append(track)
# handle stale objects (objects that have not been seen for a while)
for i in unmatched_tracks:
track = self.tracks[i]
if track['age'] < self.opt.max_age:
track['age'] += 1
track['active'] = 0
bbox = track['bbox']
ct = track['ct']
v = [0, 0]
track['bbox'] = [
bbox[0] + v[0], bbox[1] + v[1],
bbox[2] + v[0], bbox[3] + v[1]]
track['ct'] = [ct[0] + v[0], ct[1] + v[1]]
ret.append(track)
self.tracks = ret
return ret
def greedy_assignment(dist):
'''
:param dist N, M or M, N distances
:return K, 2 matched indices
'''
matched_indices = []
if dist.shape[1] == 0:
return np.array(matched_indices, np.int32).reshape(-1, 2)
for i in range(dist.shape[0]):
# Pick the closest j
j = dist[i].argmin()
# If the closest is closer than 1e16, match and remove j from consideration
if dist[i][j] < 1e16:
dist[:, j] = 1e18
matched_indices.append([i, j])
return np.array(matched_indices, np.int32).reshape(-1, 2)
``` |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.