repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
bebound/linovel | novel.py | 1 | 2503 | from abc import ABC, abstractmethod, abstractstaticmethod
import requests
from bs4 import BeautifulSoup
class AbstractNovel(ABC):
"""
abstract novel class
Attributes:
url: The novel url
single_thread: A bool represent whether use single thread grab novel information
volume_name: A string represent the volume name
volume_number: A string represent the volume number
book_name: A string represent the book name
author: A string represent the author
illustrator: A string represent the illustrator
introduction: A string represent the introduction
chapters: A list represent the chapter
cover_url: A string represent the cover_url
date: A string represent the date the book last updated (As specified in ISO 8601)
novel_information: A list contains dict which represent the novel information
"""
_HEADERS = {'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:19.0) Gecko/20100101 Firefox/19.0'}
def __init__(self, url, single_thread=False):
self.url = url
self.single_thread = single_thread
self.volume_name = ''
self.volume_number = ''
self.author = ''
self.illustrator = ''
self.introduction = ''
self.chapters = []
self.cover_url = ''
self.date = ''
self.novel_information = []
def __str__(self):
return '{}:{}'.format(self.__name__, self.url)
@abstractstaticmethod
def check_url(url):
"""check whether the url match this website"""
pass
def parse_page(self, url, encoding=''):
"""
parse page with BeautifulSoup
Args:
url: A string represent the url to be parsed
encoding: A string represent the encoding of the html
Return:
A BeatifulSoup element
"""
r = requests.get(url, headers=self._HEADERS)
r.encoding = 'utf-8' if not encoding else encoding
return BeautifulSoup(r.text, 'lxml')
@abstractmethod
def extract_novel_information(self):
"""extract novel information"""
pass
@abstractmethod
def get_novel_information(self):
"""
return the novel information
Return:
A list contains dict, dict usually has these information: volume_name, volume_number, book_name,
author, illustrator, introduction, chapters, cover_url, date, source
"""
pass
| mit | 2,969,921,192,952,360,000 | 31.089744 | 108 | 0.623252 | false |
zzw922cn/Automatic_Speech_Recognition | speechvalley/models/dynamic_brnn.py | 1 | 7057 | # encoding: utf-8
# ******************************************************
# Author : zzw922cn
# Last modified: 2017-12-09 11:00
# Email : [email protected]
# Filename : dynamic_brnn.py
# Description : Dynamic Bidirectional RNN model for Automatic Speech Recognition
# ******************************************************
import argparse
import time
import datetime
import os
from six.moves import cPickle
from functools import wraps
import numpy as np
import tensorflow as tf
from tensorflow.python.ops.rnn import bidirectional_dynamic_rnn
from speechvalley.utils import load_batched_data, describe, setAttrs, list_to_sparse_tensor, dropout, get_edit_distance
from speechvalley.utils import lnBasicRNNCell, lnGRUCell, lnBasicLSTMCell
def build_multi_dynamic_brnn(args,
maxTimeSteps,
inputX,
cell_fn,
seqLengths,
time_major=True):
hid_input = inputX
for i in range(args.num_layer):
scope = 'DBRNN_' + str(i + 1)
forward_cell = cell_fn(args.num_hidden, activation=args.activation)
backward_cell = cell_fn(args.num_hidden, activation=args.activation)
# tensor of shape: [max_time, batch_size, input_size]
outputs, output_states = bidirectional_dynamic_rnn(forward_cell, backward_cell,
inputs=hid_input,
dtype=tf.float32,
sequence_length=seqLengths,
time_major=True,
scope=scope)
# forward output, backward ouput
# tensor of shape: [max_time, batch_size, input_size]
output_fw, output_bw = outputs
# forward states, backward states
output_state_fw, output_state_bw = output_states
# output_fb = tf.concat(2, [output_fw, output_bw])
output_fb = tf.concat([output_fw, output_bw], 2)
shape = output_fb.get_shape().as_list()
output_fb = tf.reshape(output_fb, [shape[0], shape[1], 2, int(shape[2] / 2)])
hidden = tf.reduce_sum(output_fb, 2)
hidden = dropout(hidden, args.keep_prob, (args.mode == 'train'))
if i != args.num_layer - 1:
hid_input = hidden
else:
outputXrs = tf.reshape(hidden, [-1, args.num_hidden])
# output_list = tf.split(0, maxTimeSteps, outputXrs)
output_list = tf.split(outputXrs, maxTimeSteps, 0)
fbHrs = [tf.reshape(t, [args.batch_size, args.num_hidden]) for t in output_list]
return fbHrs
class DBiRNN(object):
def __init__(self, args, maxTimeSteps):
self.args = args
self.maxTimeSteps = maxTimeSteps
if args.layerNormalization is True:
if args.rnncell == 'rnn':
self.cell_fn = lnBasicRNNCell
elif args.rnncell == 'gru':
self.cell_fn = lnGRUCell
elif args.rnncell == 'lstm':
self.cell_fn = lnBasicLSTMCell
else:
raise Exception("rnncell type not supported: {}".format(args.rnncell))
else:
if args.rnncell == 'rnn':
self.cell_fn = tf.contrib.rnn.BasicRNNCell
elif args.rnncell == 'gru':
self.cell_fn = tf.contrib.rnn.GRUCell
elif args.rnncell == 'lstm':
self.cell_fn = tf.contrib.rnn.BasicLSTMCell
else:
raise Exception("rnncell type not supported: {}".format(args.rnncell))
self.build_graph(args, maxTimeSteps)
@describe
def build_graph(self, args, maxTimeSteps):
self.graph = tf.Graph()
with self.graph.as_default():
self.inputX = tf.placeholder(tf.float32,
shape=(maxTimeSteps, args.batch_size, args.num_feature)) # [maxL,32,39]
inputXrs = tf.reshape(self.inputX, [-1, args.num_feature])
# self.inputList = tf.split(0, maxTimeSteps, inputXrs) #convert inputXrs from [32*maxL,39] to [32,maxL,39]
self.inputList = tf.split(inputXrs, maxTimeSteps, 0) # convert inputXrs from [32*maxL,39] to [32,maxL,39]
self.targetIxs = tf.placeholder(tf.int64)
self.targetVals = tf.placeholder(tf.int32)
self.targetShape = tf.placeholder(tf.int64)
self.targetY = tf.SparseTensor(self.targetIxs, self.targetVals, self.targetShape)
self.seqLengths = tf.placeholder(tf.int32, shape=(args.batch_size))
self.config = {'name': args.model,
'rnncell': self.cell_fn,
'num_layer': args.num_layer,
'num_hidden': args.num_hidden,
'num_class': args.num_class,
'activation': args.activation,
'optimizer': args.optimizer,
'learning rate': args.learning_rate,
'keep prob': args.keep_prob,
'batch size': args.batch_size}
fbHrs = build_multi_dynamic_brnn(self.args, maxTimeSteps, self.inputX, self.cell_fn, self.seqLengths)
with tf.name_scope('fc-layer'):
with tf.variable_scope('fc'):
weightsClasses = tf.Variable(
tf.truncated_normal([args.num_hidden, args.num_class], name='weightsClasses'))
biasesClasses = tf.Variable(tf.zeros([args.num_class]), name='biasesClasses')
logits = [tf.matmul(t, weightsClasses) + biasesClasses for t in fbHrs]
logits3d = tf.stack(logits)
self.loss = tf.reduce_mean(tf.nn.ctc_loss(self.targetY, logits3d, self.seqLengths))
self.var_op = tf.global_variables()
self.var_trainable_op = tf.trainable_variables()
if args.grad_clip == -1:
# not apply gradient clipping
self.optimizer = tf.train.AdamOptimizer(args.learning_rate).minimize(self.loss)
else:
# apply gradient clipping
grads, _ = tf.clip_by_global_norm(tf.gradients(self.loss, self.var_trainable_op), args.grad_clip)
opti = tf.train.AdamOptimizer(args.learning_rate)
self.optimizer = opti.apply_gradients(zip(grads, self.var_trainable_op))
self.predictions = tf.to_int32(
tf.nn.ctc_beam_search_decoder(logits3d, self.seqLengths, merge_repeated=False)[0][0])
if args.level == 'cha':
self.errorRate = tf.reduce_sum(tf.edit_distance(self.predictions, self.targetY, normalize=True))
self.initial_op = tf.global_variables_initializer()
self.saver = tf.train.Saver(tf.global_variables(), max_to_keep=5, keep_checkpoint_every_n_hours=1)
| mit | 5,569,468,753,389,374,000 | 49.769784 | 119 | 0.552784 | false |
ericholscher/django | tests/httpwrappers/tests.py | 1 | 24585 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import copy
import os
import pickle
import unittest
import warnings
from django.core.exceptions import SuspiciousOperation
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (QueryDict, HttpResponse, HttpResponseRedirect,
HttpResponsePermanentRedirect, HttpResponseNotAllowed,
HttpResponseNotModified, StreamingHttpResponse,
SimpleCookie, BadHeaderError,
parse_cookie)
from django.test import TestCase
from django.utils.encoding import smart_str, force_text
from django.utils.functional import lazy
from django.utils._os import upath
from django.utils import six
lazystr = lazy(force_text, six.text_type)
class QueryDictTests(unittest.TestCase):
def test_missing_key(self):
q = QueryDict(str(''))
self.assertRaises(KeyError, q.__getitem__, 'foo')
def test_immutability(self):
q = QueryDict(str(''))
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
def test_immutable_get_with_default(self):
q = QueryDict(str(''))
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict(str(''))
self.assertEqual(q.getlist('foo'), [])
if six.PY2:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [])
self.assertEqual(list(six.iterlists(q)), [])
self.assertEqual(list(six.iterkeys(q)), [])
self.assertEqual(list(six.itervalues(q)), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict(str('foo=bar'))
self.assertEqual(q['foo'], 'bar')
self.assertRaises(KeyError, q.__getitem__, 'bar')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if six.PY2:
self.assertTrue(q.has_key('foo'))
self.assertTrue('foo' in q)
if six.PY2:
self.assertFalse(q.has_key('bar'))
self.assertFalse('bar' in q)
self.assertEqual(list(six.iteritems(q)), [('foo', 'bar')])
self.assertEqual(list(six.iterlists(q)), [('foo', ['bar'])])
self.assertEqual(list(six.iterkeys(q)), ['foo'])
self.assertEqual(list(six.itervalues(q)), ['bar'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict(str(''), mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict(str(''), mutable=True)
q['next'] = '/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict(str('')).copy()
self.assertRaises(KeyError, q.__getitem__, "foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict(str('')).copy()
q['name'] = 'john'
del q['name']
self.assertFalse('name' in q)
def test_basic_mutable_operations(self):
q = QueryDict(str('')).copy()
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
if six.PY2:
self.assertTrue(q.has_key('foo'))
self.assertTrue('foo' in q)
self.assertListEqual(sorted(list(six.iteritems(q))),
[('foo', 'another'), ('name', 'john')])
self.assertListEqual(sorted(list(six.iterlists(q))),
[('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
self.assertListEqual(sorted(list(six.iterkeys(q))),
['foo', 'name'])
self.assertListEqual(sorted(list(six.itervalues(q))),
['another', 'john'])
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict(str('vote=yes&vote=no'))
self.assertEqual(q['vote'], 'no')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('vote', 'default'), 'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
self.assertEqual(q.getlist('foo'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if six.PY2:
self.assertEqual(q.has_key('vote'), True)
self.assertEqual('vote' in q, True)
if six.PY2:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [('vote', 'no')])
self.assertEqual(list(six.iterlists(q)), [('vote', ['yes', 'no'])])
self.assertEqual(list(six.iterkeys(q)), ['vote'])
self.assertEqual(list(six.itervalues(q)), ['no'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertRaises(AttributeError, q.__delitem__, 'vote')
if six.PY2:
def test_invalid_input_encoding(self):
"""
QueryDicts must be able to handle invalid input encoding (in this
case, bad UTF-8 encoding).
This test doesn't apply under Python 3 because the URL is a string
and not a bytestring.
"""
q = QueryDict(str(b'foo=bar&foo=\xff'))
self.assertEqual(q['foo'], '\ufffd')
self.assertEqual(q.getlist('foo'), ['bar', '\ufffd'])
def test_pickle(self):
q = QueryDict(str(''))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict(str('a=b&c=d'))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict(str('a=b&c=d&a=1'))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict(str("a=1&a=2"), mutable=True)
y = QueryDict(str("a=3&a=4"))
x.update(y)
self.assertEqual(x.getlist('a'), ['1', '2', '3', '4'])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict(str('cur=%A4'), encoding='iso-8859-15')
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(six.iteritems(q)), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
q = q.copy()
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(six.iteritems(q)), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
self.assertEqual(copy.copy(q).encoding, 'iso-8859-15')
self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15')
class HttpResponseTests(unittest.TestCase):
def test_headers_type(self):
r = HttpResponse()
# The following tests explicitly test types in addition to values
# because in Python 2 u'foo' == b'foo'.
# ASCII unicode or bytes values are converted to native strings.
r['key'] = 'test'
self.assertEqual(r['key'], str('test'))
self.assertIsInstance(r['key'], str)
r['key'] = 'test'.encode('ascii')
self.assertEqual(r['key'], str('test'))
self.assertIsInstance(r['key'], str)
self.assertIn(b'test', r.serialize_headers())
# Latin-1 unicode or bytes values are also converted to native strings.
r['key'] = 'café'
self.assertEqual(r['key'], smart_str('café', 'latin-1'))
self.assertIsInstance(r['key'], str)
r['key'] = 'café'.encode('latin-1')
self.assertEqual(r['key'], smart_str('café', 'latin-1'))
self.assertIsInstance(r['key'], str)
self.assertIn('café'.encode('latin-1'), r.serialize_headers())
# Other unicode values are MIME-encoded (there's no way to pass them as bytes).
r['key'] = '†'
self.assertEqual(r['key'], str('=?utf-8?b?4oCg?='))
self.assertIsInstance(r['key'], str)
self.assertIn(b'=?utf-8?b?4oCg?=', r.serialize_headers())
# The response also converts unicode or bytes keys to strings, but requires
# them to contain ASCII
r = HttpResponse()
del r['Content-Type']
r['foo'] = 'bar'
l = list(r.items())
self.assertEqual(len(l), 1)
self.assertEqual(l[0], ('foo', 'bar'))
self.assertIsInstance(l[0][0], str)
r = HttpResponse()
del r['Content-Type']
r[b'foo'] = 'bar'
l = list(r.items())
self.assertEqual(len(l), 1)
self.assertEqual(l[0], ('foo', 'bar'))
self.assertIsInstance(l[0][0], str)
r = HttpResponse()
self.assertRaises(UnicodeError, r.__setitem__, 'føø', 'bar')
self.assertRaises(UnicodeError, r.__setitem__, 'føø'.encode('utf-8'), 'bar')
def test_long_line(self):
# Bug #20889: long lines trigger newlines to be added to headers
# (which is not allowed due to bug #10188)
h = HttpResponse()
f = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88'.encode('latin-1')
f = f.decode('utf-8')
h['Content-Disposition'] = 'attachment; filename="%s"' % f
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
self.assertRaises(BadHeaderError, r.__setitem__, 'test\rstr', 'test')
self.assertRaises(BadHeaderError, r.__setitem__, 'test\nstr', 'test')
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertEqual(r.get('test'), None)
def test_non_string_content(self):
#Bug 16494: HttpResponse should behave consistently with non-strings
r = HttpResponse(12345)
self.assertEqual(r.content, b'12345')
#test content via property
r = HttpResponse()
r.content = 12345
self.assertEqual(r.content, b'12345')
def test_iter_content(self):
r = HttpResponse(['abc', 'def', 'ghi'])
self.assertEqual(r.content, b'abcdefghi')
#test iter content via property
r = HttpResponse()
r.content = ['idan', 'alex', 'jacob']
self.assertEqual(r.content, b'idanalexjacob')
r = HttpResponse()
r.content = [1, 2, 3]
self.assertEqual(r.content, b'123')
#test odd inputs
r = HttpResponse()
r.content = ['1', '2', 3, '\u079e']
#'\xde\x9e' == unichr(1950).encode('utf-8')
self.assertEqual(r.content, b'123\xde\x9e')
#with Content-Encoding header
r = HttpResponse()
r['Content-Encoding'] = 'winning'
r.content = [b'abc', b'def']
self.assertEqual(r.content, b'abcdef')
self.assertRaises(TypeError if six.PY3 else UnicodeEncodeError,
setattr, r, 'content', ['\u079e'])
# .content can safely be accessed multiple times.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, r.content)
self.assertEqual(r.content, b'helloworld')
# accessing the iterator works (once) after accessing .content
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(b''.join(r), b'')
# accessing .content still works
self.assertEqual(r.content, b'helloworld')
# Accessing .content also works if the response was iterated first.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(r.content, b'helloworld')
# Additional content can be written to the response.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, b'helloworld')
r.write('!')
self.assertEqual(r.content, b'helloworld!')
def test_iterator_isnt_rewound(self):
# Regression test for #13222
r = HttpResponse('abc')
i = iter(r)
self.assertEqual(list(i), [b'abc'])
self.assertEqual(list(i), [])
def test_lazy_content(self):
r = HttpResponse(lazystr('helloworld'))
self.assertEqual(r.content, b'helloworld')
def test_file_interface(self):
r = HttpResponse()
r.write(b"hello")
self.assertEqual(r.tell(), 5)
r.write("привет")
self.assertEqual(r.tell(), 17)
r = HttpResponse(['abc'])
r.write('def')
self.assertEqual(r.tell(), 6)
self.assertEqual(r.content, b'abcdef')
# with Content-Encoding header
r = HttpResponse()
r['Content-Encoding'] = 'winning'
r.write(b'abc')
r.write(b'def')
self.assertEqual(r.content, b'abcdef')
def test_unsafe_redirect(self):
bad_urls = [
'data:text/html,<script>window.alert("xss")</script>',
'mailto:[email protected]',
'file:///etc/passwd',
]
for url in bad_urls:
self.assertRaises(SuspiciousOperation,
HttpResponseRedirect, url)
self.assertRaises(SuspiciousOperation,
HttpResponsePermanentRedirect, url)
class HttpResponseSubclassesTests(TestCase):
def test_redirect(self):
response = HttpResponseRedirect('/redirected/')
self.assertEqual(response.status_code, 302)
# Test that standard HttpResponse init args can be used
response = HttpResponseRedirect('/redirected/',
content='The resource has temporarily moved',
content_type='text/html')
self.assertContains(response, 'The resource has temporarily moved', status_code=302)
# Test that url attribute is right
self.assertEqual(response.url, response['Location'])
def test_redirect_lazy(self):
"""Make sure HttpResponseRedirect works with lazy strings."""
r = HttpResponseRedirect(lazystr('/redirected/'))
self.assertEqual(r.url, '/redirected/')
def test_not_modified(self):
response = HttpResponseNotModified()
self.assertEqual(response.status_code, 304)
# 304 responses should not have content/content-type
with self.assertRaises(AttributeError):
response.content = "Hello dear"
self.assertNotIn('content-type', response)
def test_not_allowed(self):
response = HttpResponseNotAllowed(['GET'])
self.assertEqual(response.status_code, 405)
# Test that standard HttpResponse init args can be used
response = HttpResponseNotAllowed(['GET'],
content='Only the GET method is allowed',
content_type='text/html')
self.assertContains(response, 'Only the GET method is allowed', status_code=405)
class StreamingHttpResponseTests(TestCase):
def test_streaming_response(self):
r = StreamingHttpResponse(iter(['hello', 'world']))
# iterating over the response itself yields bytestring chunks.
chunks = list(r)
self.assertEqual(chunks, [b'hello', b'world'])
for chunk in chunks:
self.assertIsInstance(chunk, six.binary_type)
# and the response can only be iterated once.
self.assertEqual(list(r), [])
# even when a sequence that can be iterated many times, like a list,
# is given as content.
r = StreamingHttpResponse(['abc', 'def'])
self.assertEqual(list(r), [b'abc', b'def'])
self.assertEqual(list(r), [])
# streaming responses don't have a `content` attribute.
self.assertFalse(hasattr(r, 'content'))
# and you can't accidentally assign to a `content` attribute.
with self.assertRaises(AttributeError):
r.content = 'xyz'
# but they do have a `streaming_content` attribute.
self.assertTrue(hasattr(r, 'streaming_content'))
# that exists so we can check if a response is streaming, and wrap or
# replace the content iterator.
r.streaming_content = iter(['abc', 'def'])
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
self.assertEqual(list(r), [b'ABC', b'DEF'])
# coercing a streaming response to bytes doesn't return a complete HTTP
# message like a regular response does. it only gives us the headers.
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(
six.binary_type(r), b'Content-Type: text/html; charset=utf-8')
# and this won't consume its content.
self.assertEqual(list(r), [b'hello', b'world'])
# additional content cannot be written to the response.
r = StreamingHttpResponse(iter(['hello', 'world']))
with self.assertRaises(Exception):
r.write('!')
# and we can't tell the current position.
with self.assertRaises(Exception):
r.tell()
class FileCloseTests(TestCase):
def setUp(self):
# Disable the request_finished signal during this test
# to avoid interfering with the database connection.
request_finished.disconnect(close_old_connections)
def tearDown(self):
request_finished.connect(close_old_connections)
def test_response(self):
filename = os.path.join(os.path.dirname(upath(__file__)), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# don't automatically close file when we finish iterating the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertFalse(file1.closed)
with warnings.catch_warnings():
warnings.simplefilter("ignore", DeprecationWarning)
list(r)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = HttpResponse(file1)
r.content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
def test_streaming_response(self):
filename = os.path.join(os.path.dirname(upath(__file__)), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = StreamingHttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = StreamingHttpResponse(file1)
r.streaming_content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""
Test that we don't output tricky characters in encoded value
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
self.assertTrue(";" not in c.output().rstrip(';')) # IE compat
self.assertTrue("," not in c.output().rstrip(';')) # Safari compat
def test_decode(self):
"""
Test that we can still preserve semi-colons and commas
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output())
self.assertEqual(c['test'].value, c2['test'].value)
def test_decode_2(self):
"""
Test that we haven't broken normal encoding
"""
c = SimpleCookie()
c['test'] = b"\xf0"
c2 = SimpleCookie()
c2.load(c.output())
self.assertEqual(c['test'].value, c2['test'].value)
def test_nonstandard_keys(self):
"""
Test that a single non-standard cookie name doesn't affect all cookies. Ticket #13007.
"""
self.assertTrue('good_cookie' in parse_cookie('good_cookie=yes;bad:cookie=yes').keys())
def test_repeated_nonstandard_keys(self):
"""
Test that a repeated non-standard name doesn't affect all cookies. Ticket #15852
"""
self.assertTrue('good_cookie' in parse_cookie('a:=b; a:=c; good_cookie=yes').keys())
def test_httponly_after_load(self):
"""
Test that we can use httponly attribute on cookies that we load
"""
c = SimpleCookie()
c.load("name=val")
c['name']['httponly'] = True
self.assertTrue(c['name']['httponly'])
def test_load_dict(self):
c = SimpleCookie()
c.load({'name': 'val'})
self.assertEqual(c['name'].value, 'val')
@unittest.skipUnless(six.PY2, "PY3 throws an exception on invalid cookie keys.")
def test_bad_cookie(self):
"""
Regression test for #18403
"""
r = HttpResponse()
r.set_cookie("a:.b/", 1)
self.assertEqual(len(r.cookies.bad_cookies), 1)
| bsd-3-clause | -5,954,548,522,859,807,000 | 37.867089 | 95 | 0.594081 | false |
ljcooke/dotfiles | python/pythonrc.py | 1 | 2186 | # -----------------------------------------------------------------------------
# Python repl config
# -----------------------------------------------------------------------------
import datetime
import math
import os
import random
import re
import sys
from math import *
from pydoc import pager
try:
from see import see
except ImportError:
see = dir
PY = sys.version_info[0]
class Term:
RESET = "\x1b[0m"
BOLD = "\x1b[1m"
DIM = "\x1b[2m"
UNBOLD = "\x1b[21m"
UNDIM = "\x1b[22m"
RED = "\x1b[31m"
GREEN = "\x1b[32m"
YELLOW = "\x1b[33m"
BLUE = "\x1b[34m"
PINK = "\x1b[35m"
CYAN = "\x1b[36m"
@classmethod
def color(cls, string, color, uncolor=RESET):
return ''.join((color, string, uncolor))
@classmethod
def dim(cls, string):
return ''.join((cls.DIM, string, cls.UNDIM))
@classmethod
def setup_prompt(cls):
version = '.'.join(str(s) for s in sys.version_info[:2])
sys.ps1 = '(py%s)> ' % version
sys.ps2 = '%s ' % ('.' * 8)
Term.setup_prompt()
if PY < 3:
try:
import rlcompleter
if 'libedit' in rlcompleter.readline.__doc__:
rlcompleter.readline.parse_and_bind('bind ^I rl_complete')
else:
rlcompleter.readline.parse_and_bind('tab: complete')
except ImportError:
print("Install readline for tab completion.")
print('')
def take(seq, count=1):
queue = []
for item in seq:
queue.append(item)
if len(queue) == count:
yield tuple(queue)
queue = []
if queue:
yield tuple(queue)
def pairs(seq):
return take(seq, 2)
def enhex(seq):
if isinstance(seq, str):
seq = seq.encode('utf-8')
return ' '.join(hex(b)[2:].zfill(2) for b in seq).upper()
def dehex(s, utf8=True):
s = ''.join(s.lower().split())
if not all(c in '0123456789abcdef' for c in s):
raise ValueError('Not a hex string')
byteseq = bytes(int(''.join(p), 16) for p in pairs(s))
if utf8:
try:
return byteseq.decode('utf-8')
except UnicodeDecodeError:
pass
return byteseq
| mit | -3,096,192,023,812,464,000 | 21.306122 | 79 | 0.522873 | false |
edx/edx-e2e-tests | regression/pages/enterprise/enterprise_const.py | 1 | 1142 | """
URLs and constants for enterprise stuff
"""
import os
ENTERPRISE_PORTAL_LOGIN_URL = "https://pmsalesdemo8.successfactors.com/login?company=SFPART011327#/login"
DEFAULT_ENTERPRISE_NAME = 'SuccessFactors'
ENTERPRISE_NAME = os.environ.get('ENTERPRISE_NAME', DEFAULT_ENTERPRISE_NAME)
DEFAULT_IDP_CSS_ID = 'bestrun'
IDP_CSS_ID = os.environ.get('IDP_CSS_ID', DEFAULT_IDP_CSS_ID)
ENT_CUSTOMER_UUID = os.environ.get('ENT_CUSTOMER_UUID', '')
ENT_CUSTOMER_CATALOG_UUID = os.environ.get('ENT_CUSTOMER_CATALOG_UUID',)
ENT_COURSE_ID = os.environ.get('ENT_COURSE_ID', 'course-v1:Mattx+TCE2E+2018')
ENT_PORTAL_USERNAME = os.environ.get('ENT_PORTAL_USERNAME')
ENT_PORTAL_PASSWORD = os.environ.get('ENT_PORTAL_PASSWORD')
ENT_PORTAL_EDX_LINKED_USERNAME = os.environ.get('ENT_PORTAL_EDX_LINKED_USERNAME')
ENT_PORTAL_EDX_LINKED_PASSWORD = os.environ.get('ENT_PORTAL_EDX_LINKED_PASSWORD')
ENT_COURSE_TITLE = os.environ.get('ENT_COURSE_TITLE')
ENT_COURSE_ORG = os.environ.get('ENT_COURSE_ORG')
ENT_COURSE_PRICE = os.environ.get('ENT_COURSE_PRICE')
ENT_COURSE_START_DATE = os.environ.get('ENT_COURSE_START_DATE')
DEFAULT_COURSE_PRICE = 100.0
| agpl-3.0 | 7,302,774,754,136,636,000 | 27.55 | 105 | 0.744308 | false |
amillar2/light-django | esp/mqtt_init.py | 1 | 2256 | import paho.mqtt.client as mqtt
import json
from .models import Switch, Device, PWM
# The callback for when the client receives a CONNACK response from the server.
def on_connect(client, userdata, flags, rc):
print("Connected with result code "+str(rc))
#subscribe to status and discovery topics
client.subscribe("+/status")
client.subscribe("discovery")
#subscribe to switch topics
client.subscribe("+/switch/#")
# The callback for when a PUBLISH message is received from the server.
"""
def on_message(client, userdata, msg):
print(msg.topic+": "+str(msg.payload))
if "status" in msg.topic:
print("Received status message on " + msg.topic + " : " + msg.payload)
if "discovery" in msg.topic:
print("Received discovery message: " + msg.payload)
def on_publish(client, userdata, mid):
print("Published message")
"""
def on_status(client, userdata, msg):
#grab espID from <espID>/status topic string
espID = msg.topic.split('/')[0]
print(msg.payload)
if msg.payload:
statusData = json.loads(msg.payload)
d = Device.objects.filter(espID=espID)
#if device exists, update status
if d:
d = d[0]
d.update_status(statusData)
else:
print("Received status from unknown device: %s"%espID)
def on_discovery(client, userdata, msg):
print('received discovery messgage')
#get espID
espID = msg.payload
#if espID exists, configure
d = Device.objects.filter(espID=espID)
if d:
d[0].config_device()
#if espID does not exist, make new object and save
else:
Device.objects.create(espID=espID, name=espID)
def on_switch(client, userdata, msg):
print("received switch input")
sw = Switch.objects.filter(topic=msg.topic)
#if switch exists, toggle pwms
print(sw)
if sw:
sw[0].toggle_pwm()
def mqtt_init():
client = mqtt.Client()
client.on_connect = on_connect
#client.on_message = on_message #for test/debug. uncomment func defs if used
#client.on_publish = on_publish
#add topic callbacks here
client.message_callback_add("+/status", on_status)
client.message_callback_add("discovery", on_discovery)
client.message_callback_add("+/switch/#", on_switch)
client.username_pw_set('test', password='testpass')
client.connect("localhost",port=1883, keepalive=60)
client.loop_start()
return client
| mit | -4,793,063,382,847,781,000 | 30.774648 | 79 | 0.719858 | false |
felix9064/python | Demo/utils/route.py | 1 | 1139 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# mac 同时上内外网的路由设置,将以下代码保存为py文件,使用sudo跑py文件
import os
import re
def route():
# 获取路由表的网关IP地址
data = os.popen("netstat -rn|awk '{print $2}'").readlines()
# 外网网关IP的正则表达式
re_ip1 = re.compile(r'172.16.\d{1,3}.\d{1,3}')
# 内网网关IP的正则表达式
re_ip2 = re.compile(r'198.98.\d{1,3}.\d{1,3}')
ip1 = ""
ip2 = ""
for x in data:
print(x)
if re_ip1.match(x):
# 捕获外网网关IP
ip1 = re_ip1.findall(x)[0]
if re_ip2.match(x):
# 捕获内网网关IP
ip2 = re_ip2.findall(x)[0]
print(ip1, ip2)
if ip1 is not None and ip2 is not None:
pass
# 删除默认外网路由
os.popen('route delete 0.0.0.0')
# 添加内网路由
os.popen('route -n add -net 198.98.0.0 -netmask 255.0.0.0 %s' % ip2)
# 添加外网路由
os.popen('route -n add -net 0.0.0.0 -netmask 0.0.0.0 %s' % ip1)
if __name__ == "__main__":
route()
| mit | 3,120,792,054,979,646,500 | 20.311111 | 76 | 0.514077 | false |
amitjamadagni/sympy | setup.py | 2 | 8934 | #!/usr/bin/env python
"""Distutils based setup script for SymPy.
This uses Distutils (http://python.org/sigs/distutils-sig/) the standard
python mechanism for installing packages. For the easiest installation
just type the command (you'll probably need root privileges for that):
python setup.py install
This will install the library in the default location. For instructions on
how to customize the install procedure read the output of:
python setup.py --help install
In addition, there are some other commands:
python setup.py clean -> will clean all trash (*.pyc and stuff)
python setup.py test -> will run the complete test suite
python setup.py bench -> will run the complete benchmark suite
python setup.py audit -> will run pyflakes checker on source code
To get a full list of avaiable commands, read the output of:
python setup.py --help-commands
Or, if all else fails, feel free to write to the sympy list at
[email protected] and ask for help.
"""
from distutils.core import setup
from distutils.core import Command
import sys
import subprocess
import os
import sympy
# Make sure I have the right Python version.
if sys.version_info[:2] < (2, 5):
print("SymPy requires Python 2.5 or newer. Python %d.%d detected" %
sys.version_info[:2])
sys.exit(-1)
# Check that this list is uptodate against the result of the command:
# for i in `find sympy -name __init__.py | rev | cut -f 2- -d '/' | rev | egrep -v "^sympy$" | egrep -v "tests$" `; do echo "'${i//\//.}',"; done | sort
modules = [
'sympy.assumptions',
'sympy.assumptions.handlers',
'sympy.categories',
'sympy.combinatorics',
'sympy.concrete',
'sympy.core',
'sympy.diffgeom',
'sympy.external',
'sympy.functions',
'sympy.functions.combinatorial',
'sympy.functions.elementary',
'sympy.functions.special',
'sympy.galgebra',
'sympy.geometry',
'sympy.integrals',
'sympy.interactive',
'sympy.logic',
'sympy.logic.algorithms',
'sympy.logic.utilities',
'sympy.matrices',
'sympy.matrices.expressions',
'sympy.mpmath',
'sympy.mpmath.calculus',
'sympy.mpmath.functions',
'sympy.mpmath.libmp',
'sympy.mpmath.matrices',
'sympy.ntheory',
'sympy.parsing',
'sympy.physics',
'sympy.physics.mechanics',
'sympy.physics.quantum',
'sympy.plotting',
'sympy.plotting.intervalmath',
'sympy.plotting.pygletplot',
'sympy.polys',
'sympy.polys.agca',
'sympy.polys.domains',
'sympy.printing',
'sympy.printing.pretty',
'sympy.strategies',
'sympy.strategies.branch',
'sympy.series',
'sympy.sets',
'sympy.simplify',
'sympy.solvers',
'sympy.statistics',
'sympy.stats',
'sympy.tensor',
'sympy.unify',
'sympy.utilities',
'sympy.utilities.mathml',
]
class audit(Command):
"""Audits SymPy's source code for following issues:
- Names which are used but not defined or used before they are defined.
- Names which are redefined without having been used.
"""
description = "Audit SymPy source with PyFlakes"
user_options = []
def initialize_options(self):
self.all = None
def finalize_options(self):
pass
def run(self):
import os
try:
import pyflakes.scripts.pyflakes as flakes
except ImportError:
print("In order to run the audit, you need to have PyFlakes installed.")
sys.exit(-1)
# We don't want to audit external dependencies
ext = ('mpmath',)
dirs = (os.path.join(*d) for d in
(m.split('.') for m in modules) if d[1] not in ext)
warns = 0
for dir in dirs:
for filename in os.listdir(dir):
if filename.endswith('.py') and filename != '__init__.py':
warns += flakes.checkPath(os.path.join(dir, filename))
if warns > 0:
print ("Audit finished with total %d warnings" % warns)
class clean(Command):
"""Cleans *.pyc and debian trashs, so you should get the same copy as
is in the VCS.
"""
description = "remove build files"
user_options = [("all", "a", "the same")]
def initialize_options(self):
self.all = None
def finalize_options(self):
pass
def run(self):
import os
os.system("py.cleanup")
os.system("rm -f python-build-stamp-2.4")
os.system("rm -f MANIFEST")
os.system("rm -rf build")
os.system("rm -rf dist")
os.system("rm -rf doc/_build")
class test_sympy(Command):
"""Runs all tests under the sympy/ folder
"""
description = "run all tests and doctests; also see bin/test and bin/doctest"
user_options = [] # distutils complains if this is not here.
def __init__(self, *args):
self.args = args[0] # so we can pass it to other classes
Command.__init__(self, *args)
def initialize_options(self): # distutils wants this
pass
def finalize_options(self): # this too
pass
def run(self):
sympy.utilities.runtests.run_all_tests()
class run_benchmarks(Command):
"""Runs all SymPy benchmarks"""
description = "run all benchmarks"
user_options = [] # distutils complains if this is not here.
def __init__(self, *args):
self.args = args[0] # so we can pass it to other classes
Command.__init__(self, *args)
def initialize_options(self): # distutils wants this
pass
def finalize_options(self): # this too
pass
# we use py.test like architecture:
#
# o collector -- collects benchmarks
# o runner -- executes benchmarks
# o presenter -- displays benchmarks results
#
# this is done in sympy.utilities.benchmarking on top of py.test
def run(self):
from sympy.utilities import benchmarking
benchmarking.main(['sympy'])
# Check that this list is uptodate against the result of the command:
# $ python bin/generate_test_list.py
tests = [
'sympy.assumptions.tests',
'sympy.categories.tests',
'sympy.combinatorics.tests',
'sympy.concrete.tests',
'sympy.core.tests',
'sympy.diffgeom.tests',
'sympy.external.tests',
'sympy.functions.combinatorial.tests',
'sympy.functions.elementary.tests',
'sympy.functions.special.tests',
'sympy.galgebra.tests',
'sympy.geometry.tests',
'sympy.integrals.tests',
'sympy.interactive.tests',
'sympy.logic.tests',
'sympy.matrices.expressions.tests',
'sympy.matrices.tests',
'sympy.mpmath.tests',
'sympy.ntheory.tests',
'sympy.parsing.tests',
'sympy.physics.mechanics.tests',
'sympy.physics.quantum.tests',
'sympy.physics.tests',
'sympy.plotting.intervalmath.tests',
'sympy.plotting.pygletplot.tests',
'sympy.plotting.tests',
'sympy.polys.agca.tests',
'sympy.polys.tests',
'sympy.printing.pretty.tests',
'sympy.printing.tests',
'sympy.strategies.branch.tests',
'sympy.strategies.tests',
'sympy.series.tests',
'sympy.sets.tests',
'sympy.simplify.tests',
'sympy.solvers.tests',
'sympy.statistics.tests',
'sympy.stats.tests',
'sympy.tensor.tests',
'sympy.unify.tests',
'sympy.utilities.tests',
]
classifiers = [
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Mathematics',
'Topic :: Scientific/Engineering :: Physics',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
]
long_description = '''SymPy is a Python library for symbolic mathematics. It aims
to become a full-featured computer algebra system (CAS) while keeping the code
as simple as possible in order to be comprehensible and easily extensible.
SymPy is written entirely in Python and does not require any external libraries.'''
setup(
name='sympy',
version=sympy.__version__,
description='Computer algebra system (CAS) in Python',
long_description=long_description,
author='SymPy development team',
author_email='[email protected]',
license='BSD',
keywords="Math CAS",
url='http://code.google.com/p/sympy',
packages=['sympy'] + modules + tests,
scripts=['bin/isympy'],
ext_modules=[],
package_data={ 'sympy.utilities.mathml': ['data/*.xsl'] },
data_files=[('share/man/man1', ['doc/man/isympy.1'])],
cmdclass={'test': test_sympy,
'bench': run_benchmarks,
'clean': clean,
'audit': audit,
},
classifiers=classifiers,
)
| bsd-3-clause | 7,602,071,982,156,798,000 | 29.182432 | 152 | 0.638908 | false |
btxgit/gazee | specproc.py | 1 | 3211 | import sqlite3
import os
if __name__ == '__main__':
# with open('/tmp/out.txt', 'rb') as fd:
# s = fd.read()
# ll = s.split('\n')
# otl = []
# s = '''UPDATE all_comics SET width=?, height=?, ratio=? WHERE comicid=?;'''
#
# with sqlite3.connect('data/gazee_comics.db') as con:
#
# for l in ll:
# if l.strip() == '':
# continue
# tl = l.strip().split(',')
# id = int(tl[0].split('-')[0], 10)
# w = int(tl[1], 10)
# h = int(tl[2], 10)
# ratio = (1.0) * w / h
# otl.append( (w,h,ratio,id) )
#
# print "Committing %d records..." % len(otl)
# con.executemany(s, otl)
# con.commit()
tgtw = 225
tgth = 300
with sqlite3.connect('data/gazee_comics.db') as con:
sql = '''SELECT comicid, width, height, ratio FROM all_comics;'''
for row in con.execute(sql):
cid, w, h, ratio = row
if w == 0 or h == 0:
continue
part = (cid // 512)
if ratio >= 1.2:
rot = 90
tw = h
h = w
w = tw
ratio = (1.0) * w / h
print("convert data/cache/%d/%d-native.jpg -rotate 90 -thumbnail %dx%d data/cache/%d/%d-%dx%d.jpg" % (part, cid, tgtw, tgth, part, cid, tgtw, tgth))
# continue
else:
rot = 0
# print("%d [ %d x %d ] (%.4f)" % (cid, w, h, ratio))
h1 = tgth
w1 = int(h1 * ratio)
w2 = tgtw
h2 = int(w2 / ratio)
# print("Opt1: %d x %d Opt2: %d x %d" % (w1, h1, w2, h2))
if (w1 > tgtw):
infn = "data/cache/%d/%d-%dx%d.jpg" % (part, cid, tgtw, tgth)
ofn = "data/cache/%d/p%d-%dx%d.jpg" % (part, cid, tgtw, tgth)
# print("convert data/cache/%d/p%d-%dx%d.jpg -rotate 90 -thumbnail %dx%d %s" % (part, cid, tgtw, tgth, tgtw, tgth, infn))
pw = w2
ph = h2
fixh = tgth - ph
origfixh = fixh
if ((fixh %2) == 1):
fixh += 1
fixwh = fixh // 2
# print("w1, h1 (%d, %d) w2, h2 (%d, %d)" % (w1, h1, w2, h2))
if rot == 90 or not os.path.exists(ofn):
print("bash imageborder -s 0x%d -p 20 -e edge -b 2 %s %s" % (fixwh, infn, ofn))
else:
pw = w1
ph = h1
fixw = tgtw - pw
origfixw = fixw
if ((fixw % 2) == 1):
fixw += 1
fixwb = fixw//2
ofn = "data/cache/%d/p%d-%dx%d.jpg" % (part, cid, tgtw, tgth)
if rot == 90 or not os.path.exists(ofn):
print("bash imageborder -s %dx0 -p 20 -e edge -b 2 data/cache/%d/%d-300x400.jpg %s" % (fixwb, part, cid, ofn))
print("echo %d..." % cid)
| gpl-3.0 | 2,409,360,383,570,491,400 | 33.526882 | 164 | 0.379321 | false |
alexgorban/models | official/modeling/model_training_utils.py | 1 | 17423 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A light weight utilities to train NLP models."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import os
from absl import logging
import tensorflow as tf
from official.utils.misc import distribution_utils
from official.utils.misc import tpu_lib
_SUMMARY_TXT = 'training_summary.txt'
_MIN_SUMMARY_STEPS = 10
def _save_checkpoint(checkpoint, model_dir, checkpoint_prefix):
"""Saves model to with provided checkpoint prefix."""
checkpoint_path = os.path.join(model_dir, checkpoint_prefix)
saved_path = checkpoint.save(checkpoint_path)
logging.info('Saving model as TF checkpoint: %s', saved_path)
return
def _get_input_iterator(input_fn, strategy):
"""Returns distributed dataset iterator."""
# When training with TPU pods, datasets needs to be cloned across
# workers. Since Dataset instance cannot be cloned in eager mode, we instead
# pass callable that returns a dataset.
if not callable(input_fn):
raise ValueError('`input_fn` should be a closure that returns a dataset.')
iterator = iter(
strategy.experimental_distribute_datasets_from_function(input_fn))
return iterator
def _float_metric_value(metric):
"""Gets the value of a float-value keras metric."""
return metric.result().numpy().astype(float)
def steps_to_run(current_step, steps_per_epoch, steps_per_loop):
"""Calculates steps to run on device."""
if steps_per_loop <= 0:
raise ValueError('steps_per_loop should be positive integer.')
if steps_per_loop == 1:
return steps_per_loop
remainder_in_epoch = current_step % steps_per_epoch
if remainder_in_epoch != 0:
return min(steps_per_epoch - remainder_in_epoch, steps_per_loop)
else:
return steps_per_loop
def write_txt_summary(training_summary, summary_dir):
"""Writes a summary text file to record stats."""
summary_path = os.path.join(summary_dir, _SUMMARY_TXT)
with tf.io.gfile.GFile(summary_path, 'wb') as f:
logging.info('Training Summary: \n%s', str(training_summary))
f.write(json.dumps(training_summary, indent=4))
def run_customized_training_loop(
# pylint: disable=invalid-name
_sentinel=None,
# pylint: enable=invalid-name
strategy=None,
model_fn=None,
loss_fn=None,
model_dir=None,
train_input_fn=None,
steps_per_epoch=None,
steps_per_loop=1,
epochs=1,
eval_input_fn=None,
eval_steps=None,
metric_fn=None,
init_checkpoint=None,
custom_callbacks=None,
run_eagerly=False,
sub_model_export_name=None):
"""Run BERT pretrain model training using low-level API.
Arguments:
_sentinel: Used to prevent positional parameters. Internal, do not use.
strategy: Distribution strategy on which to run low level training loop.
model_fn: Function that returns a tuple (model, sub_model). Caller of this
function should add optimizer to the `model` via calling
`model.compile()` API or manually setting `model.optimizer` attribute.
Second element of the returned tuple(sub_model) is an optional sub model
to be used for initial checkpoint -- if provided.
loss_fn: Function with signature func(labels, logits) and returns a loss
tensor.
model_dir: Model directory used during training for restoring/saving model
weights.
train_input_fn: Function that returns a tf.data.Dataset used for training.
steps_per_epoch: Number of steps to run per epoch. At the end of each
epoch, model checkpoint will be saved and evaluation will be conducted
if evaluation dataset is provided.
steps_per_loop: Number of steps per graph-mode loop. In order to reduce
communication in eager context, training logs are printed every
steps_per_loop.
epochs: Number of epochs to train.
eval_input_fn: Function that returns evaluation dataset. If none,
evaluation is skipped.
eval_steps: Number of steps to run evaluation. Required if `eval_input_fn`
is not none.
metric_fn: A metrics function that returns a Keras Metric object to record
evaluation result using evaluation dataset or with training dataset
after every epoch.
init_checkpoint: Optional checkpoint to load to `sub_model` returned by
`model_fn`.
custom_callbacks: A list of Keras Callbacks objects to run during
training. More specifically, `on_batch_begin()`, `on_batch_end()`,
methods are invoked during training.
run_eagerly: Whether to run model training in pure eager execution. This
should be disable for TPUStrategy.
sub_model_export_name: If not None, will export `sub_model` returned by
`model_fn` into checkpoint files. The name of intermediate checkpoint
file is {sub_model_export_name}_step_{step}.ckpt and the last
checkpint's name is {sub_model_export_name}.ckpt;
if None, `sub_model` will not be exported as checkpoint.
Returns:
Trained model.
Raises:
ValueError: (1) When model returned by `model_fn` does not have optimizer
attribute or when required parameters are set to none. (2) eval args are
not specified correctly. (3) metric_fn must be a callable if specified.
(4) sub_model_checkpoint_name is specified, but `sub_model` returned
by `model_fn` is None.
"""
if _sentinel is not None:
raise ValueError('only call `run_customized_training_loop()` '
'with named arguments.')
required_arguments = [
strategy, model_fn, loss_fn, model_dir, steps_per_epoch, train_input_fn
]
if [arg for arg in required_arguments if arg is None]:
raise ValueError('`strategy`, `model_fn`, `loss_fn`, `model_dir`, '
'`steps_per_loop` and `steps_per_epoch` are required '
'parameters.')
if steps_per_loop > steps_per_epoch:
logging.error(
'steps_per_loop: %d is specified to be greater than '
' steps_per_epoch: %d, we will use steps_per_epoch as'
' steps_per_loop.', steps_per_loop, steps_per_epoch)
steps_per_loop = steps_per_epoch
assert tf.executing_eagerly()
if run_eagerly:
if steps_per_loop > 1:
raise ValueError(
'steps_per_loop is used for performance optimization. When you want '
'to run eagerly, you cannot leverage graph mode loop.')
if isinstance(strategy, tf.distribute.experimental.TPUStrategy):
raise ValueError(
'TPUStrategy should not run eagerly as it heavily replies on graph'
' optimization for the distributed system.')
if eval_input_fn and (eval_steps is None or metric_fn is None):
raise ValueError(
'`eval_step` and `metric_fn` are required when `eval_input_fn ` '
'is not none.')
if metric_fn and not callable(metric_fn):
raise ValueError(
'if `metric_fn` is specified, metric_fn must be a callable.')
total_training_steps = steps_per_epoch * epochs
# To reduce unnecessary send/receive input pipeline operation, we place input
# pipeline ops in worker task.
train_iterator = _get_input_iterator(train_input_fn, strategy)
with distribution_utils.get_strategy_scope(strategy):
# To correctly place the model weights on accelerators,
# model and optimizer should be created in scope.
model, sub_model = model_fn()
if not hasattr(model, 'optimizer'):
raise ValueError('User should set optimizer attribute to model '
'inside `model_fn`.')
if sub_model_export_name and sub_model is None:
raise ValueError('sub_model_export_name is specified as %s, but '
'sub_model is None.' % sub_model_export_name)
optimizer = model.optimizer
use_float16 = isinstance(
optimizer, tf.keras.mixed_precision.experimental.LossScaleOptimizer)
if init_checkpoint:
logging.info(
'Checkpoint file %s found and restoring from '
'initial checkpoint for core model.', init_checkpoint)
checkpoint = tf.train.Checkpoint(model=sub_model)
checkpoint.restore(init_checkpoint).assert_existing_objects_matched()
logging.info('Loading from checkpoint file completed')
train_loss_metric = tf.keras.metrics.Mean(
'training_loss', dtype=tf.float32)
eval_metrics = [metric_fn()] if metric_fn else []
# If evaluation is required, make a copy of metric as it will be used by
# both train and evaluation.
train_metrics = [
metric.__class__.from_config(metric.get_config())
for metric in eval_metrics
]
# Create summary writers
summary_dir = os.path.join(model_dir, 'summaries')
eval_summary_writer = tf.summary.create_file_writer(
os.path.join(summary_dir, 'eval'))
if steps_per_loop >= _MIN_SUMMARY_STEPS:
# Only writes summary when the stats are collected sufficiently over
# enough steps.
train_summary_writer = tf.summary.create_file_writer(
os.path.join(summary_dir, 'train'))
else:
train_summary_writer = None
# Collects training variables.
training_vars = model.trainable_variables
def _replicated_step(inputs):
"""Replicated training step."""
inputs, labels = inputs
with tf.GradientTape() as tape:
model_outputs = model(inputs, training=True)
loss = loss_fn(labels, model_outputs)
if use_float16:
scaled_loss = optimizer.get_scaled_loss(loss)
if use_float16:
scaled_grads = tape.gradient(scaled_loss, training_vars)
grads = optimizer.get_unscaled_gradients(scaled_grads)
else:
grads = tape.gradient(loss, training_vars)
optimizer.apply_gradients(zip(grads, training_vars))
# For reporting, the metric takes the mean of losses.
train_loss_metric.update_state(loss)
for metric in train_metrics:
metric.update_state(labels, model_outputs)
@tf.function
def train_steps(iterator, steps):
"""Performs distributed training steps in a loop.
Args:
iterator: the distributed iterator of training datasets.
steps: an tf.int32 integer tensor to specify number of steps to run
inside host training loop.
Raises:
ValueError: Any of the arguments or tensor shapes are invalid.
"""
if not isinstance(steps, tf.Tensor):
raise ValueError('steps should be an Tensor. Python object may cause '
'retracing.')
for _ in tf.range(steps):
strategy.experimental_run_v2(_replicated_step, args=(next(iterator),))
def train_single_step(iterator):
"""Performs a distributed training step.
Args:
iterator: the distributed iterator of training datasets.
Raises:
ValueError: Any of the arguments or tensor shapes are invalid.
"""
strategy.experimental_run_v2(_replicated_step, args=(next(iterator),))
def test_step(iterator):
"""Calculates evaluation metrics on distributed devices."""
def _test_step_fn(inputs):
"""Replicated accuracy calculation."""
inputs, labels = inputs
model_outputs = model(inputs, training=False)
for metric in eval_metrics:
metric.update_state(labels, model_outputs)
strategy.experimental_run_v2(_test_step_fn, args=(next(iterator),))
if not run_eagerly:
train_single_step = tf.function(train_single_step)
test_step = tf.function(test_step)
def _run_evaluation(current_training_step, test_iterator):
"""Runs validation steps and aggregate metrics."""
for _ in range(eval_steps):
test_step(test_iterator)
with eval_summary_writer.as_default():
for metric in eval_metrics + model.metrics:
metric_value = _float_metric_value(metric)
logging.info('Step: [%d] Validation %s = %f', current_training_step,
metric.name, metric_value)
tf.summary.scalar(
metric.name, metric_value, step=current_training_step)
eval_summary_writer.flush()
def _run_callbacks_on_batch_begin(batch):
"""Runs custom callbacks at the start of every step."""
if not custom_callbacks:
return
for callback in custom_callbacks:
callback.on_batch_begin(batch)
def _run_callbacks_on_batch_end(batch):
"""Runs custom callbacks at the end of every step."""
if not custom_callbacks:
return
for callback in custom_callbacks:
callback.on_batch_end(batch)
# Training loop starts here.
checkpoint = tf.train.Checkpoint(model=model, optimizer=optimizer)
sub_model_checkpoint = tf.train.Checkpoint(
model=sub_model) if sub_model_export_name else None
latest_checkpoint_file = tf.train.latest_checkpoint(model_dir)
if latest_checkpoint_file:
logging.info(
'Checkpoint file %s found and restoring from '
'checkpoint', latest_checkpoint_file)
checkpoint.restore(latest_checkpoint_file)
logging.info('Loading from checkpoint file completed')
current_step = optimizer.iterations.numpy()
checkpoint_name = 'ctl_step_{step}.ckpt'
while current_step < total_training_steps:
# Training loss/metric are taking average over steps inside micro
# training loop. We reset the their values before each round.
train_loss_metric.reset_states()
for metric in train_metrics + model.metrics:
metric.reset_states()
_run_callbacks_on_batch_begin(current_step)
# Runs several steps in the host while loop.
steps = steps_to_run(current_step, steps_per_epoch, steps_per_loop)
if steps == 1:
# TODO(zongweiz): merge with train_steps once tf.while_loop
# GPU performance bugs are fixed.
train_single_step(train_iterator)
else:
# Converts steps to a Tensor to avoid tf.function retracing.
train_steps(train_iterator,
tf.convert_to_tensor(steps, dtype=tf.int32))
_run_callbacks_on_batch_end(current_step)
current_step += steps
train_loss = _float_metric_value(train_loss_metric)
# Updates training logging.
training_status = 'Train Step: %d/%d / loss = %s' % (
current_step, total_training_steps, train_loss)
if train_summary_writer:
with train_summary_writer.as_default():
tf.summary.scalar(
train_loss_metric.name, train_loss, step=current_step)
for metric in train_metrics + model.metrics:
metric_value = _float_metric_value(metric)
training_status += ' %s = %f' % (metric.name, metric_value)
tf.summary.scalar(metric.name, metric_value, step=current_step)
train_summary_writer.flush()
logging.info(training_status)
# Saves model checkpoints and run validation steps at every epoch end.
if current_step % steps_per_epoch == 0:
# To avoid repeated model saving, we do not save after the last
# step of training.
if current_step < total_training_steps:
_save_checkpoint(checkpoint, model_dir,
checkpoint_name.format(step=current_step))
if sub_model_export_name:
_save_checkpoint(
sub_model_checkpoint, model_dir,
'%s_step_%d.ckpt' % (sub_model_export_name, current_step))
if eval_input_fn:
logging.info('Running evaluation after step: %s.', current_step)
_run_evaluation(current_step,
_get_input_iterator(eval_input_fn, strategy))
# Re-initialize evaluation metric.
for metric in eval_metrics + model.metrics:
metric.reset_states()
_save_checkpoint(checkpoint, model_dir,
checkpoint_name.format(step=current_step))
if sub_model_export_name:
_save_checkpoint(sub_model_checkpoint, model_dir,
'%s.ckpt' % sub_model_export_name)
if eval_input_fn:
logging.info('Running final evaluation after training is complete.')
_run_evaluation(current_step,
_get_input_iterator(eval_input_fn, strategy))
training_summary = {
'total_training_steps': total_training_steps,
'train_loss': _float_metric_value(train_loss_metric),
}
if eval_metrics:
# TODO(hongkuny): Cleans up summary reporting in text.
training_summary['last_train_metrics'] = _float_metric_value(
train_metrics[0])
training_summary['eval_metrics'] = _float_metric_value(eval_metrics[0])
write_txt_summary(training_summary, summary_dir)
return model
| apache-2.0 | 8,299,827,748,584,289,000 | 39.052874 | 80 | 0.66418 | false |
bponsler/icsv | icsv/tests/writeReadTests.py | 1 | 1580 | from os import unlink
from os.path import exists
from unittest import TestCase
from icsv import icsv, Row
class WriteReadTests(TestCase):
def setUp(self):
pass
def test_filter(self):
filename = "/tmp/testCsv.csv"
headers = ["one", "two", "three"]
csv = icsv(headers)
self.assertTrue(csv is not None)
self.assertEqual(csv.headers(), headers)
self.assertEqual(csv.delimiter(), ',')
rows = [
[0, 1, 2],
[3, 4, 5],
["hello", 1, True],
[1, False, "world"],
]
# Write all of the data to the file
for row in rows:
csv.addRow(row)
self.assertEqual(csv.numRows(), 4)
# Save the file
writer = csv.write(filename)
self.assertTrue(writer is not None)
# Read the same CSV
reader = csv.fromFile(filename, headers)
self.assertTrue(reader is not None)
# Compare the read data to the original
self.assertEqual(reader.numRows(), csv.numRows())
self.assertEqual(reader.numCols(), csv.numCols())
self.assertEqual(reader.headers(), csv.headers())
for index in range(len(rows)):
read = reader.getRow(index)
# Read data will be all strings
original = list(map(str, csv.getRow(index).list()))
expected = list(map(str, rows[index]))
for index in range(len(original)):
self.assertEqual(original[index], expected[index])
self.assertEqual(read.list(), expected)
| mit | 7,776,429,499,276,794,000 | 27.214286 | 63 | 0.570886 | false |
SomewhatDamaged/Damaged-Cogs | persistentname.py | 1 | 2155 | import discord
from discord.ext import commands
import asyncio
import os
from .utils import checks
from .utils.dataIO import fileIO
class Persistentname:
"""When a user changes their account name, and no nickname is set, this will set their nickname to their old account name."""
def __init__(self, bot):
self.bot = bot
self.data = fileIO("data/persistentname/servers.json", "load")
@checks.admin_or_permissions(manage_server=True)
@commands.command(pass_context=True, name="persistentname", no_pm=True)
async def _toggle(self, context):
"""Toggles persistent names on/off for this server.
When a user changes their account name, and they have no nickname set, this will force their old account name to be their nickname."""
if context.message.server.id in self.data:
self.data.remove(context.message.server.id)
await self.bot.say("I will no longer persist usernames on this server.")
else:
self.data.append(context.message.server.id)
await self.bot.say("I will now persist usernames on this server.")
fileIO("data/persistentname/servers.json", "save", self.data)
async def listener(self, old_member, new_member):
if new_member.server.id not in self.data:
return
if old_member.name == new_member.name:
return
if new_member.nick is not None:
return
await self.bot.change_nickname(new_member, old_member.name)
def check_folder():
if not os.path.exists("data/persistentname"):
print("Creating data/persistentname folder...")
os.makedirs("data/persistentname")
def check_files():
default_servers = [ ]
if not fileIO("data/persistentname/servers.json", "check"):
print("Creating default persistentname servers.json...")
fileIO("data/persistentname/servers.json", "save", default_servers)
def setup(bot):
check_folder()
check_files()
n = Persistentname(bot)
bot.add_cog(n)
bot.add_listener(n.listener, 'on_member_update') | mit | 9,112,897,618,856,068,000 | 38.698113 | 142 | 0.646868 | false |
timodonnell/sefara | docs/conf.py | 1 | 9542 | # -*- coding: utf-8 -*-
#
# sefara documentation build configuration file, created by
# sphinx-quickstart on Wed Jun 10 19:36:39 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.autosummary',
'sphinx.ext.viewcode',
'numpydoc',
'sphinxcontrib.programoutput',
'sphinxcontrib.autorun2',
'sphinxcontrib.autoprogram',
]
# Added by Tim
# http://stackoverflow.com/questions/12206334/sphinx-autosummary-toctree-contains-reference-to-nonexisting-document-warnings
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'sefara'
copyright = u'2015, Tim O\'Donnell'
author = u'Tim O\'Donnell'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.1'
# The full version, including alpha/beta/rc tags.
release = '0.0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
default_role = 'any'
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'sefaradoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'sefara.tex', u'sefara Documentation',
u'Tim O\'Donnell', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'sefara', u'sefara Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'sefara', u'sefara Documentation',
author, 'sefara', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| apache-2.0 | -6,398,887,219,144,911,000 | 31.236486 | 124 | 0.707713 | false |
Manolaru/Python_train | Les_2/Task_6/fixture/user.py | 1 | 2775 | class UserHelper:
def __init__(self,app):
self.app = app
def create_user(self, user):
wd = self.app.wd
self.app.open_home_page()
# open user form
wd.find_element_by_link_text("add new").click()
# fill user form
wd.find_element_by_name("firstname").click()
wd.find_element_by_name("firstname").clear()
wd.find_element_by_name("firstname").send_keys(user.fname)
wd.find_element_by_name("lastname").click()
wd.find_element_by_name("lastname").clear()
wd.find_element_by_name("lastname").send_keys(user.lname)
wd.find_element_by_name("title").click()
wd.find_element_by_name("title").clear()
wd.find_element_by_name("title").send_keys(user.title)
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys()
wd.find_element_by_name("company").click()
wd.find_element_by_name("company").clear()
wd.find_element_by_name("company").send_keys(user.company)
wd.find_element_by_name("address").click()
wd.find_element_by_name("address").clear()
wd.find_element_by_name("address").send_keys(user.address)
wd.find_element_by_name("home").click()
wd.find_element_by_name("home").clear()
wd.find_element_by_name("home").send_keys(user.phoneh)
wd.find_element_by_name("email").click()
wd.find_element_by_name("email").clear()
wd.find_element_by_name("email").send_keys(user.email)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[1]//option[3]").click()
if not wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[7]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[2]//option[7]").click()
wd.find_element_by_name("byear").click()
wd.find_element_by_name("byear").clear()
wd.find_element_by_name("byear").send_keys(user.byear)
if not wd.find_element_by_xpath("//div[@id='content']/form/select[5]//option[3]").is_selected():
wd.find_element_by_xpath("//div[@id='content']/form/select[5]//option[3]").click()
# submit user creation
wd.find_element_by_xpath("//div[@id='content']/form/input[21]").click()
def delete_first_user(self):
wd = self.app.wd
self.app.open_home_page()
# select first group
wd.find_element_by_name("selected[]").click()
# submit deletion
wd.find_element_by_xpath ("//div[@id='content']/form[2]/div[2]/input").click()
wd.switch_to_alert().accept()
| apache-2.0 | 1,534,537,264,706,128,100 | 46.844828 | 104 | 0.606486 | false |
simonspa/django-datacollect | datacollect/questionnaire/migrations/0015_auto_20170324_1600.py | 1 | 2504 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-03-24 15:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('questionnaire', '0014_auto_20170323_2344'),
]
operations = [
migrations.AlterField(
model_name='followup',
name='familiarity',
field=models.IntegerField(choices=[(1, 'I am very familiar with the case.'), (2, 'I have information but it might be incomplete.'), (3, 'I only have little information.'), (4, "I don't have any information.")], default=4, null=True, verbose_name='Level of familiarity'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_1',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_2',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_3',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_4',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='incident_date_5',
field=models.DateField(blank=True, help_text='Format YYYY-MM-DD', null=True, verbose_name='Date of the incident'),
),
migrations.AlterField(
model_name='followup',
name='language',
field=models.CharField(choices=[(b'en', 'English'), (b'es', 'Spanish'), (b'fr', 'French')], default='en', max_length=7, verbose_name='Language'),
),
migrations.AlterField(
model_name='followup',
name='rating',
field=models.IntegerField(choices=[(1, 'Much better'), (2, 'Somewhat better'), (3, 'Situation stayed the same overall'), (4, 'Somewhat worse'), (5, 'Much worse'), (6, "I don't know")], default=6, null=True, verbose_name='Rating'),
),
]
| gpl-3.0 | 2,766,687,761,227,599,000 | 44.527273 | 282 | 0.594249 | false |
redhat-openstack/manila | manila/tests/share/drivers/test_glusterfs.py | 1 | 29189 | # Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import socket
import ddt
import mock
from oslo_config import cfg
from manila import context
from manila import exception
from manila.share import configuration as config
from manila.share.drivers import ganesha
from manila.share.drivers import glusterfs
from manila.share.drivers.glusterfs import layout
from manila import test
from manila.tests import fake_share
from manila.tests import fake_utils
CONF = cfg.CONF
fake_gluster_manager_attrs = {
'export': '127.0.0.1:/testvol',
'host': '127.0.0.1',
'qualified': '[email protected]:/testvol',
'user': 'testuser',
'volume': 'testvol',
'path_to_private_key': '/fakepath/to/privatekey',
'remote_server_password': 'fakepassword',
}
fake_share_name = 'fakename'
NFS_EXPORT_DIR = 'nfs.export-dir'
NFS_EXPORT_VOL = 'nfs.export-volumes'
NFS_RPC_AUTH_ALLOW = 'nfs.rpc-auth-allow'
NFS_RPC_AUTH_REJECT = 'nfs.rpc-auth-reject'
@ddt.ddt
class GlusterfsShareDriverTestCase(test.TestCase):
"""Tests GlusterfsShareDriver."""
def setUp(self):
super(GlusterfsShareDriverTestCase, self).setUp()
fake_utils.stub_out_utils_execute(self)
self._execute = fake_utils.fake_execute
self._context = context.get_admin_context()
self.addCleanup(fake_utils.fake_execute_set_repliers, [])
self.addCleanup(fake_utils.fake_execute_clear_log)
CONF.set_default('reserved_share_percentage', 50)
CONF.set_default('driver_handles_share_servers', False)
self.fake_conf = config.Configuration(None)
self._driver = glusterfs.GlusterfsShareDriver(
execute=self._execute,
configuration=self.fake_conf)
self.share = fake_share.fake_share(share_proto='NFS')
def test_do_setup(self):
self.mock_object(self._driver, '_get_helper')
self.mock_object(layout.GlusterfsShareDriverBase, 'do_setup')
_context = mock.Mock()
self._driver.do_setup(_context)
self._driver._get_helper.assert_called_once_with()
layout.GlusterfsShareDriverBase.do_setup.assert_called_once_with(
_context)
@ddt.data(True, False)
def test_setup_via_manager(self, has_parent):
gmgr = mock.Mock()
gmgr.gluster_call = mock.Mock()
share_mgr_parent = mock.Mock() if has_parent else None
nfs_helper = mock.Mock()
nfs_helper.get_export = mock.Mock(return_value='host:/vol')
self._driver.nfs_helper = mock.Mock(return_value=nfs_helper)
ret = self._driver._setup_via_manager(
{'manager': gmgr, 'share': self.share},
share_manager_parent=share_mgr_parent)
gmgr.gluster_call.assert_called_once_with(
'volume', 'set', gmgr.volume, 'nfs.export-volumes', 'off')
self._driver.nfs_helper.assert_called_once_with(
self._execute, self.fake_conf, gluster_manager=gmgr)
nfs_helper.get_export.assert_called_once_with(self.share)
self.assertEqual('host:/vol', ret)
@ddt.data({'helpercls': None, 'path': '/fakepath'},
{'helpercls': None, 'path': None},
{'helpercls': glusterfs.GlusterNFSHelper, 'path': '/fakepath'},
{'helpercls': glusterfs.GlusterNFSHelper, 'path': None})
@ddt.unpack
def test_setup_via_manager_path(self, helpercls, path):
gmgr = mock.Mock()
gmgr.gluster_call = mock.Mock()
gmgr.path = path
if not helpercls:
helper = mock.Mock()
helper.get_export = mock.Mock(return_value='host:/vol')
helpercls = mock.Mock(return_value=helper)
self._driver.nfs_helper = helpercls
if helpercls == glusterfs.GlusterNFSHelper and path is None:
gmgr.get_gluster_vol_option = mock.Mock(return_value='on')
self._driver._setup_via_manager(
{'manager': gmgr, 'share': self.share})
if helpercls == glusterfs.GlusterNFSHelper and path is None:
gmgr.get_gluster_vol_option.assert_called_once_with(
NFS_EXPORT_VOL)
args = (NFS_RPC_AUTH_REJECT, '*')
else:
args = (NFS_EXPORT_VOL, 'off')
gmgr.gluster_call.assert_called_once_with(
'volume', 'set', gmgr.volume, *args)
@ddt.data(exception.ProcessExecutionError, RuntimeError)
def test_setup_via_manager_exception(self, _exception):
gmgr = mock.Mock()
gmgr.gluster_call = mock.Mock(side_effect=_exception)
gmgr.volume = 'somevol'
self.assertRaises(
{exception.ProcessExecutionError:
exception.GlusterfsException}.get(
_exception, _exception), self._driver._setup_via_manager,
{'manager': gmgr, 'share': self.share})
@ddt.data('off', 'no', '0', 'false', 'disable', 'foobarbaz')
def test_setup_via_manager_export_volumes_on(self, export_vol):
gmgr = mock.Mock()
gmgr.path = None
gmgr.get_gluster_vol_option = mock.Mock(return_value=export_vol)
self._driver.nfs_helper = glusterfs.GlusterNFSHelper
self.assertRaises(exception.GlusterfsException,
self._driver._setup_via_manager,
{'manager': gmgr, 'share': self.share})
gmgr.get_gluster_vol_option.assert_called_once_with(NFS_EXPORT_VOL)
def test_check_for_setup_error(self):
self._driver.check_for_setup_error()
def test_update_share_stats(self):
self.mock_object(layout.GlusterfsShareDriverBase,
'_update_share_stats')
self._driver._update_share_stats()
(layout.GlusterfsShareDriverBase._update_share_stats.
assert_called_once_with({'storage_protocol': 'NFS',
'vendor_name': 'Red Hat',
'share_backend_name': 'GlusterFS',
'reserved_percentage': 50}))
def test_get_network_allocations_number(self):
self.assertEqual(0, self._driver.get_network_allocations_number())
def test_get_helper(self):
ret = self._driver._get_helper()
self.assertIsInstance(ret, self._driver.nfs_helper)
@ddt.data({'path': '/fakepath', 'helper': glusterfs.GlusterNFSHelper},
{'path': None, 'helper': glusterfs.GlusterNFSVolHelper})
@ddt.unpack
def test_get_helper_vol(self, path, helper):
self._driver.nfs_helper = glusterfs.GlusterNFSHelper
gmgr = mock.Mock(path=path)
ret = self._driver._get_helper(gmgr)
self.assertIsInstance(ret, helper)
@ddt.data({'op': 'allow', 'kwargs': {}},
{'op': 'allow', 'kwargs': {'share_server': None}},
{'op': 'deny', 'kwargs': {}},
{'op': 'deny', 'kwargs': {'share_server': None}})
@ddt.unpack
def test_allow_deny_access_via_manager(self, op, kwargs):
self.mock_object(self._driver, '_get_helper')
gmgr = mock.Mock()
ret = getattr(self._driver, "_%s_access_via_manager" % op
)(gmgr, self._context, self.share,
fake_share.fake_access, **kwargs)
self._driver._get_helper.assert_called_once_with(gmgr)
getattr(
self._driver._get_helper(),
"%s_access" % op).assert_called_once_with(
'/', self.share, fake_share.fake_access)
self.assertIsNone(ret)
@ddt.ddt
class GlusterNFSHelperTestCase(test.TestCase):
"""Tests GlusterNFSHelper."""
def setUp(self):
super(GlusterNFSHelperTestCase, self).setUp()
fake_utils.stub_out_utils_execute(self)
gluster_manager = mock.Mock(**fake_gluster_manager_attrs)
self._execute = mock.Mock(return_value=('', ''))
self.fake_conf = config.Configuration(None)
self._helper = glusterfs.GlusterNFSHelper(
self._execute, self.fake_conf, gluster_manager=gluster_manager)
def test_get_export(self):
ret = self._helper.get_export(mock.Mock())
self.assertEqual(fake_gluster_manager_attrs['export'], ret)
@ddt.data({'output_str': '/foo(10.0.0.1|10.0.0.2),/bar(10.0.0.1)',
'expected': {'foo': ['10.0.0.1', '10.0.0.2'],
'bar': ['10.0.0.1']}},
{'output_str': None, 'expected': {}})
@ddt.unpack
def test_get_export_dir_dict(self, output_str, expected):
self.mock_object(self._helper.gluster_manager,
'get_gluster_vol_option',
mock.Mock(return_value=output_str))
ret = self._helper._get_export_dir_dict()
self.assertEqual(expected, ret)
(self._helper.gluster_manager.get_gluster_vol_option.
assert_called_once_with(NFS_EXPORT_DIR))
def test_manage_access_bad_access_type(self):
cbk = None
access = {'access_type': 'bad', 'access_to': None}
self.assertRaises(exception.InvalidShareAccess,
self._helper._manage_access, fake_share_name,
access['access_type'], access['access_to'], cbk)
def test_manage_access_noop(self):
cbk = mock.Mock(return_value=True)
access = fake_share.fake_access()
export_dir_dict = mock.Mock()
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
ret = self._helper._manage_access(fake_share_name,
access['access_type'],
access['access_to'], cbk)
self._helper._get_export_dir_dict.assert_called_once_with()
cbk.assert_called_once_with(export_dir_dict, fake_share_name,
access['access_to'])
self.assertIsNone(ret)
def test_manage_access_adding_entry(self):
def cbk(d, key, value):
d[key].append(value)
access = fake_share.fake_access()
export_dir_dict = {
'example.com': ['10.0.0.1'],
'fakename': ['10.0.0.2'],
}
export_str = '/example.com(10.0.0.1),/fakename(10.0.0.2|10.0.0.1)'
args = ('volume', 'set', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR, export_str)
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
ret = self._helper._manage_access(fake_share_name,
access['access_type'],
access['access_to'], cbk)
self.assertIsNone(ret)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
def test_manage_access_adding_entry_cmd_fail(self):
def cbk(d, key, value):
d[key].append(value)
def raise_exception(*args, **kwargs):
raise exception.ProcessExecutionError()
access = fake_share.fake_access()
export_dir_dict = {
'example.com': ['10.0.0.1'],
'fakename': ['10.0.0.2'],
}
export_str = '/example.com(10.0.0.1),/fakename(10.0.0.2|10.0.0.1)'
args = ('volume', 'set', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR, export_str)
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self.mock_object(self._helper.gluster_manager, 'gluster_call',
mock.Mock(side_effect=raise_exception))
self.mock_object(glusterfs.LOG, 'error')
self.assertRaises(exception.ProcessExecutionError,
self._helper._manage_access,
fake_share_name, access['access_type'],
access['access_to'], cbk)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
glusterfs.LOG.error.assert_called_once_with(mock.ANY, mock.ANY)
def test_manage_access_removing_last_entry(self):
def cbk(d, key, value):
d.pop(key)
access = fake_share.fake_access()
args = ('volume', 'reset', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR)
export_dir_dict = {'fakename': ['10.0.0.1']}
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
ret = self._helper._manage_access(fake_share_name,
access['access_type'],
access['access_to'], cbk)
self.assertIsNone(ret)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
def test_allow_access_with_share_having_noaccess(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_dir_dict = {'example.com': ['10.0.0.1']}
export_str = '/example.com(10.0.0.1),/fakename(10.0.0.1)'
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self._helper.gluster_manager.path = '/fakename'
self._helper.allow_access(None, share, access)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
'volume', 'set', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR, export_str)
def test_allow_access_with_share_having_access(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_dir_dict = {'fakename': ['10.0.0.1']}
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self._helper.gluster_manager.path = '/fakename'
self._helper.allow_access(None, share, access)
self._helper._get_export_dir_dict.assert_called_once_with()
self.assertFalse(self._helper.gluster_manager.gluster_call.called)
def test_deny_access_with_share_having_noaccess(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_dir_dict = {}
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self._helper.gluster_manager.path = '/fakename'
self._helper.deny_access(None, share, access)
self._helper._get_export_dir_dict.assert_called_once_with()
self.assertFalse(self._helper.gluster_manager.gluster_call.called)
def test_deny_access_with_share_having_access(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_dir_dict = {
'example.com': ['10.0.0.1'],
'fakename': ['10.0.0.1'],
}
export_str = '/example.com(10.0.0.1)'
args = ('volume', 'set', self._helper.gluster_manager.volume,
NFS_EXPORT_DIR, export_str)
self.mock_object(self._helper, '_get_export_dir_dict',
mock.Mock(return_value=export_dir_dict))
self._helper.gluster_manager.path = '/fakename'
self._helper.deny_access(None, share, access)
self._helper._get_export_dir_dict.assert_called_once_with()
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
@ddt.ddt
class GlusterNFSVolHelperTestCase(test.TestCase):
"""Tests GlusterNFSVolHelper."""
def setUp(self):
super(GlusterNFSVolHelperTestCase, self).setUp()
fake_utils.stub_out_utils_execute(self)
gluster_manager = mock.Mock(**fake_gluster_manager_attrs)
self._execute = mock.Mock(return_value=('', ''))
self.fake_conf = config.Configuration(None)
self._helper = glusterfs.GlusterNFSVolHelper(
self._execute, self.fake_conf, gluster_manager=gluster_manager)
@ddt.data({'output_str': '10.0.0.1,10.0.0.2',
'expected': ['10.0.0.1', '10.0.0.2']},
{'output_str': None, 'expected': []})
@ddt.unpack
def test_get_vol_exports(self, output_str, expected):
self.mock_object(self._helper.gluster_manager,
'get_gluster_vol_option',
mock.Mock(return_value=output_str))
ret = self._helper._get_vol_exports()
self.assertEqual(expected, ret)
(self._helper.gluster_manager.get_gluster_vol_option.
assert_called_once_with(NFS_RPC_AUTH_ALLOW))
def test_manage_access_bad_access_type(self):
cbk = None
access = {'access_type': 'bad', 'access_to': None}
self.assertRaises(exception.InvalidShareAccess,
self._helper._manage_access,
access['access_type'], access['access_to'], cbk)
def test_manage_access_noop(self):
cbk = mock.Mock(return_value=True)
access = fake_share.fake_access()
export_list = mock.Mock()
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
ret = self._helper._manage_access(access['access_type'],
access['access_to'], cbk)
self._helper._get_vol_exports.assert_called_once_with()
cbk.assert_called_once_with(export_list, access['access_to'])
self.assertIsNone(ret)
def test_manage_access_adding_entry(self):
def cbk(li, v):
li.append(v)
access = fake_share.fake_access()
export_list = ['10.0.0.2']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
ret = self._helper._manage_access(access['access_type'],
access['access_to'], cbk)
self.assertIsNone(ret)
self._helper._get_vol_exports.assert_called_once_with()
export_str = '10.0.0.2,10.0.0.1'
argseq = (('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW, export_str),
('volume', 'reset', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_REJECT))
self.assertEqual(
[mock.call(*a) for a in argseq],
self._helper.gluster_manager.gluster_call.call_args_list)
def test_manage_access_adding_entry_cmd_fail(self):
def cbk(li, v):
li.append(v)
def raise_exception(*args, **kwargs):
raise exception.ProcessExecutionError()
access = fake_share.fake_access()
export_list = ['10.0.0.2']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self.mock_object(self._helper.gluster_manager, 'gluster_call',
mock.Mock(side_effect=raise_exception))
self.assertRaises(exception.ProcessExecutionError,
self._helper._manage_access,
access['access_type'],
access['access_to'], cbk)
self._helper._get_vol_exports.assert_called_once_with()
export_str = '10.0.0.2,10.0.0.1'
args = ('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW, export_str)
self._helper.gluster_manager.gluster_call.assert_called_once_with(
*args)
def test_manage_access_removing_last_entry(self):
def cbk(li, v):
li.remove(v)
access = fake_share.fake_access()
export_list = ['10.0.0.1']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
ret = self._helper._manage_access(access['access_type'],
access['access_to'], cbk)
self.assertIsNone(ret)
self._helper._get_vol_exports.assert_called_once_with()
argseq = (('volume', 'reset', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW),
('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_REJECT, '*'))
self.assertEqual(
[mock.call(*a) for a in argseq],
self._helper.gluster_manager.gluster_call.call_args_list)
def test_allow_access_with_share_having_noaccess(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_list = ['10.0.0.2']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self._helper.allow_access(None, share, access)
self._helper._get_vol_exports.assert_called_once_with()
export_str = '10.0.0.2,10.0.0.1'
argseq = (('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW, export_str),
('volume', 'reset', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_REJECT))
self.assertEqual(
[mock.call(*a) for a in argseq],
self._helper.gluster_manager.gluster_call.call_args_list)
def test_allow_access_with_share_having_access(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_list = ['10.0.0.1']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self._helper.allow_access(None, share, access)
self._helper._get_vol_exports.assert_called_once_with()
self.assertFalse(self._helper.gluster_manager.gluster_call.called)
def test_deny_access_with_share_having_noaccess(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_list = []
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self._helper.deny_access(None, share, access)
self._helper._get_vol_exports.assert_called_once_with()
self.assertFalse(self._helper.gluster_manager.gluster_call.called)
def test_deny_access_with_share_having_access(self):
access = fake_share.fake_access()
share = fake_share.fake_share()
export_list = ['10.0.0.1', '10.0.0.2']
self.mock_object(self._helper, '_get_vol_exports',
mock.Mock(return_value=export_list))
self._helper.deny_access(None, share, access)
self._helper._get_vol_exports.assert_called_once_with()
export_str = '10.0.0.2'
argseq = (('volume', 'set', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_ALLOW, export_str),
('volume', 'reset', self._helper.gluster_manager.volume,
NFS_RPC_AUTH_REJECT))
self.assertEqual(
[mock.call(*a) for a in argseq],
self._helper.gluster_manager.gluster_call.call_args_list)
class GaneshaNFSHelperTestCase(test.TestCase):
"""Tests GaneshaNFSHelper."""
def setUp(self):
super(GaneshaNFSHelperTestCase, self).setUp()
self.gluster_manager = mock.Mock(**fake_gluster_manager_attrs)
self._execute = mock.Mock(return_value=('', ''))
self._root_execute = mock.Mock(return_value=('', ''))
self.access = fake_share.fake_access()
self.fake_conf = config.Configuration(None)
self.fake_template = {'key': 'value'}
self.share = fake_share.fake_share()
self.mock_object(glusterfs.ganesha_utils, 'RootExecutor',
mock.Mock(return_value=self._root_execute))
self.mock_object(glusterfs.ganesha.GaneshaNASHelper, '__init__',
mock.Mock())
socket.gethostname = mock.Mock(return_value='example.com')
self._helper = glusterfs.GaneshaNFSHelper(
self._execute, self.fake_conf,
gluster_manager=self.gluster_manager)
self._helper.tag = 'GLUSTER-Ganesha-localhost'
def test_init_local_ganesha_server(self):
glusterfs.ganesha_utils.RootExecutor.assert_called_once_with(
self._execute)
socket.gethostname.assert_has_calls([mock.call()])
glusterfs.ganesha.GaneshaNASHelper.__init__.assert_has_calls(
[mock.call(self._root_execute, self.fake_conf,
tag='GLUSTER-Ganesha-example.com')])
def test_get_export(self):
ret = self._helper.get_export(self.share)
self.assertEqual('example.com:/fakename', ret)
def test_init_remote_ganesha_server(self):
ssh_execute = mock.Mock(return_value=('', ''))
CONF.set_default('glusterfs_ganesha_server_ip', 'fakeip')
self.mock_object(glusterfs.ganesha_utils, 'SSHExecutor',
mock.Mock(return_value=ssh_execute))
glusterfs.GaneshaNFSHelper(
self._execute, self.fake_conf,
gluster_manager=self.gluster_manager)
glusterfs.ganesha_utils.SSHExecutor.assert_called_once_with(
'fakeip', 22, None, 'root', password=None, privatekey=None)
glusterfs.ganesha.GaneshaNASHelper.__init__.assert_has_calls(
[mock.call(ssh_execute, self.fake_conf,
tag='GLUSTER-Ganesha-fakeip')])
def test_init_helper(self):
ganeshelper = mock.Mock()
exptemp = mock.Mock()
def set_attributes(*a, **kw):
self._helper.ganesha = ganeshelper
self._helper.export_template = exptemp
self.mock_object(ganesha.GaneshaNASHelper, 'init_helper',
mock.Mock(side_effect=set_attributes))
self.assertEqual({}, glusterfs.GaneshaNFSHelper.shared_data)
self._helper.init_helper()
ganesha.GaneshaNASHelper.init_helper.assert_called_once_with()
self.assertEqual(ganeshelper, self._helper.ganesha)
self.assertEqual(exptemp, self._helper.export_template)
self.assertEqual({
'GLUSTER-Ganesha-localhost': {
'ganesha': ganeshelper,
'export_template': exptemp}},
glusterfs.GaneshaNFSHelper.shared_data)
other_helper = glusterfs.GaneshaNFSHelper(
self._execute, self.fake_conf,
gluster_manager=self.gluster_manager)
other_helper.tag = 'GLUSTER-Ganesha-localhost'
other_helper.init_helper()
self.assertEqual(ganeshelper, other_helper.ganesha)
self.assertEqual(exptemp, other_helper.export_template)
def test_default_config_hook(self):
fake_conf_dict = {'key': 'value1'}
mock_ganesha_utils_patch = mock.Mock()
def fake_patch_run(tmpl1, tmpl2):
mock_ganesha_utils_patch(
copy.deepcopy(tmpl1), tmpl2)
tmpl1.update(tmpl2)
self.mock_object(glusterfs.ganesha.GaneshaNASHelper,
'_default_config_hook',
mock.Mock(return_value=self.fake_template))
self.mock_object(glusterfs.ganesha_utils, 'path_from',
mock.Mock(return_value='/fakedir/glusterfs/conf'))
self.mock_object(self._helper, '_load_conf_dir',
mock.Mock(return_value=fake_conf_dict))
self.mock_object(glusterfs.ganesha_utils, 'patch',
mock.Mock(side_effect=fake_patch_run))
ret = self._helper._default_config_hook()
glusterfs.ganesha.GaneshaNASHelper._default_config_hook.\
assert_called_once_with()
glusterfs.ganesha_utils.path_from.assert_called_once_with(
glusterfs.__file__, 'conf')
self._helper._load_conf_dir.assert_called_once_with(
'/fakedir/glusterfs/conf')
glusterfs.ganesha_utils.patch.assert_called_once_with(
self.fake_template, fake_conf_dict)
self.assertEqual(fake_conf_dict, ret)
def test_fsal_hook(self):
self._helper.gluster_manager.path = '/fakename'
output = {
'Hostname': '127.0.0.1',
'Volume': 'testvol',
'Volpath': '/fakename'
}
ret = self._helper._fsal_hook('/fakepath', self.share, self.access)
self.assertEqual(output, ret)
| apache-2.0 | -4,747,165,154,843,611,000 | 39.48405 | 78 | 0.590908 | false |
boxed/python-terminal-menu | terminal_menu.py | 1 | 2331 | import os
import sys
import termios
import fcntl
from blessings import Terminal
def getch():
fd = sys.stdin.fileno()
oldterm = termios.tcgetattr(fd)
newattr = termios.tcgetattr(fd)
newattr[3] = newattr[3] & ~termios.ICANON & ~termios.ECHO
termios.tcsetattr(fd, termios.TCSANOW, newattr)
oldflags = fcntl.fcntl(fd, fcntl.F_GETFL)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags | os.O_NONBLOCK)
c = None
try:
while 1:
try:
c = sys.stdin.read(1)
break
except IOError:
pass
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
fcntl.fcntl(fd, fcntl.F_SETFL, oldflags)
return c
prefix = '\x1b\x5b'
lookup = {
'\x1b\x5b\x41': 'up',
'\x1b\x5b\x42': 'down',
'\x1b\x5b\x44': 'left',
'\x1b\x5b\x43': 'right',
}
def get_arrow_key_or_character():
buf = ''
while True:
buf += getch()
if buf in lookup:
return lookup[buf]
if buf and not prefix.startswith(buf):
return buf
def menu(menu_items):
if not menu_items:
return None
# hide cursor
sys.stdout.write("\033[?25l")
sys.stdout.flush()
try:
term = Terminal()
print '\n' * (len(menu_items) - 2)
focus = 0
while True:
for i, line in enumerate(menu_items):
with term.location(0, term.height - len(menu_items) + i):
if i == focus:
print term.on_red(term.bright_white(line)),
else:
print line,
k = get_arrow_key_or_character()
if k == 'down':
focus += 1
elif k == 'up':
focus -= 1
elif k == '\n':
break
# make sure we don't go outside menu
if focus < 0:
focus = 0
if focus == len(menu_items):
focus = len(menu_items) - 1
finally:
# show cursor again
sys.stdout.write("\033[?25h")
sys.stdout.flush()
print '' # Write a newline to avoid next output writing over the last line of the menu
return menu_items[focus]
m = menu(['foo 1', 'foo 2', 'foo 3', 'foo 4', 'foo 5', 'foo 6'])
print 'chosen:', m | mit | 3,803,221,867,195,180,500 | 24.075269 | 91 | 0.510511 | false |
akshayaurora/kivy | kivy/config.py | 1 | 36306 | '''
Configuration object
====================
The :class:`Config` object is an instance of a modified Python ConfigParser.
See the `ConfigParser documentation
<http://docs.python.org/library/configparser.html>`_ for more information.
Kivy has a configuration file which determines the default settings. In
order to change these settings, you can alter this file manually or use
the Config object. Please see the :ref:`Configure Kivy` section for more
information.
Applying configurations
-----------------------
Configuration options control the initialization of the :class:`~kivy.app.App`.
In order to avoid situations where the config settings do not work or are not
applied before window creation (like setting an initial window size),
:meth:`Config.set <kivy.config.ConfigParser.set>` should be used before
importing any other Kivy modules. Ideally, this means setting them right at
the start of your main.py script.
Alternatively, you can save these settings permanently using
:meth:`Config.set <ConfigParser.set>` then
:meth:`Config.write <ConfigParser.write>`. In this case, you will need to
restart the app for the changes to take effect. Note that this approach will
effect all Kivy apps system wide.
Please note that no underscores (`_`) are allowed in the section name.
Usage of the Config object
--------------------------
To read a configuration token from a particular section::
>>> from kivy.config import Config
>>> Config.getint('kivy', 'show_fps')
0
Change the configuration and save it::
>>> Config.set('postproc', 'retain_time', '50')
>>> Config.write()
For information on configuring your :class:`~kivy.app.App`, please see the
:ref:`Application configuration` section.
.. versionchanged:: 1.7.1
The ConfigParser should work correctly with utf-8 now. The values are
converted from ascii to unicode only when needed. The method get() returns
utf-8 strings.
Changing configuration with environment variables
-------------------------------------------------
Since 1.11.0, it is now possible to change the configuration using
environment variables. They take precedence on the loaded config.ini.
The format is::
KCFG_<section>_<key> = <value>
For example:
KCFG_GRAPHICS_FULLSCREEN=auto ...
KCFG_KIVY_LOG_LEVEL=warning ...
Or in your file before any kivy import:
import os
os.environ["KCFG_KIVY_LOG_LEVEL"] = "warning"
If you don't want to map any environment variables, you can disable
the behavior::
os.environ["KIVY_NO_ENV_CONFIG"] = "1"
.. _configuration-tokens:
Available configuration tokens
------------------------------
.. |log_levels| replace::
'trace', 'debug', 'info', 'warning', 'error' or 'critical'
:kivy:
`default_font`: list
Default fonts used for widgets displaying any text. It defaults to
['Roboto', 'data/fonts/Roboto-Regular.ttf',
'data/fonts/Roboto-Italic.ttf', 'data/fonts/Roboto-Bold.ttf',
'data/fonts/Roboto-BoldItalic.ttf'].
`desktop`: int, 0 or 1
This option controls desktop OS specific features, such as enabling
drag-able scroll-bar in scroll views, disabling of bubbles in
TextInput etc. 0 is disabled, 1 is enabled.
`exit_on_escape`: int, 0 or 1
Enables exiting kivy when escape is pressed.
0 is disabled, 1 is enabled.
`pause_on_minimize`: int, 0 or 1
If set to `1`, the main loop is paused and the `on_pause` event
is dispatched when the window is minimized. This option is intended
for desktop use only. Defaults to `0`.
`keyboard_layout`: string
Identifier of the layout to use.
`keyboard_mode`: string
Specifies the keyboard mode to use. If can be one of the following:
* '' - Let Kivy choose the best option for your current platform.
* 'system' - real keyboard.
* 'dock' - one virtual keyboard docked to a screen side.
* 'multi' - one virtual keyboard for every widget request.
* 'systemanddock' - virtual docked keyboard plus input from real
keyboard.
* 'systemandmulti' - analogous.
`kivy_clock`: one of `default`, `interrupt`, `free_all`, `free_only`
The clock type to use with kivy. See :mod:`kivy.clock`.
`log_dir`: string
Path of log directory.
`log_enable`: int, 0 or 1
Activate file logging. 0 is disabled, 1 is enabled.
`log_level`: string, one of |log_levels|
Set the minimum log level to use.
`log_name`: string
Format string to use for the filename of log file.
`log_maxfiles`: int
Keep log_maxfiles recent logfiles while purging the log directory. Set
'log_maxfiles' to -1 to disable logfile purging (eg keep all logfiles).
.. note::
You end up with 'log_maxfiles + 1' logfiles because the logger
adds a new one after purging.
`window_icon`: string
Path of the window icon. Use this if you want to replace the default
pygame icon.
:postproc:
`double_tap_distance`: float
Maximum distance allowed for a double tap, normalized inside the range
0 - 1000.
`double_tap_time`: int
Time allowed for the detection of double tap, in milliseconds.
`ignore`: list of tuples
List of regions where new touches are ignored.
This configuration token can be used to resolve hotspot problems
with DIY hardware. The format of the list must be::
ignore = [(xmin, ymin, xmax, ymax), ...]
All the values must be inside the range 0 - 1.
`jitter_distance`: int
Maximum distance for jitter detection, normalized inside the range 0
- 1000.
`jitter_ignore_devices`: string, separated with commas
List of devices to ignore from jitter detection.
`retain_distance`: int
If the touch moves more than is indicated by retain_distance, it will
not be retained. Argument should be an int between 0 and 1000.
`retain_time`: int
Time allowed for a retain touch, in milliseconds.
`triple_tap_distance`: float
Maximum distance allowed for a triple tap, normalized inside the range
0 - 1000.
`triple_tap_time`: int
Time allowed for the detection of triple tap, in milliseconds.
:graphics:
`borderless`: int , one of 0 or 1
If set to `1`, removes the window border/decoration. Window resizing
must also be disabled to hide the resizing border.
`window_state`: string , one of 'visible', 'hidden', 'maximized'
or 'minimized'
Sets the window state, defaults to 'visible'. This option is available
only for the SDL2 window provider and it should be used on desktop
OSes.
`fbo`: string, one of 'hardware', 'software' or 'force-hardware'
Selects the FBO backend to use.
`fullscreen`: int or string, one of 0, 1, 'fake' or 'auto'
Activate fullscreen. If set to `1`, a resolution of `width`
times `height` pixels will be used.
If set to `auto`, your current display's resolution will be
used instead. This is most likely what you want.
If you want to place the window in another display,
use `fake`, or set the `borderless` option from the graphics section,
then adjust `width`, `height`, `top` and `left`.
`height`: int
Height of the :class:`~kivy.core.window.Window`, not used if
`fullscreen` is set to `auto`.
`left`: int
Left position of the :class:`~kivy.core.window.Window`.
`maxfps`: int, defaults to 60
Maximum FPS allowed.
.. warning::
Setting maxfps to 0 will lead to max CPU usage.
'multisamples': int, defaults to 2
Sets the `MultiSample Anti-Aliasing (MSAA)
<http://en.wikipedia.org/wiki/Multisample_anti-aliasing>`_ level.
Increasing this value results in smoother graphics but at the cost of
processing time.
.. note::
This feature is limited by device hardware support and will have no
effect on devices which do not support the level of MSAA requested.
`position`: string, one of 'auto' or 'custom'
Position of the window on your display. If `auto` is used, you have no
control of the initial position: `top` and `left` are ignored.
`show_cursor`: int, one of 0 or 1
Set whether or not the cursor is shown on the window.
`top`: int
Top position of the :class:`~kivy.core.window.Window`.
`resizable`: int, one of 0 or 1
If 0, the window will have a fixed size. If 1, the window will be
resizable.
`rotation`: int, one of 0, 90, 180 or 270
Rotation of the :class:`~kivy.core.window.Window`.
`width`: int
Width of the :class:`~kivy.core.window.Window`, not used if
`fullscreen` is set to `auto`.
`minimum_width`: int
Minimum width to restrict the window to. (sdl2 only)
`minimum_height`: int
Minimum height to restrict the window to. (sdl2 only)
`min_state_time`: float, defaults to .035
Minimum time for widgets to display a given visual state.
This attrib is currently used by widgets like
:class:`~kivy.uix.dropdown.DropDown` &
:class:`~kivy.uix.behaviors.buttonbehavior.ButtonBehavior` to
make sure they display their current visual state for the given
time.
`allow_screensaver`: int, one of 0 or 1, defaults to 1
Allow the device to show a screen saver, or to go to sleep
on mobile devices. Only works for the sdl2 window provider.
:input:
You can create new input devices using this syntax::
# example of input provider instance
yourid = providerid,parameters
# example for tuio provider
default = tuio,127.0.0.1:3333
mytable = tuio,192.168.0.1:3334
.. seealso::
Check the providers in :mod:`kivy.input.providers` for the syntax to
use inside the configuration file.
:widgets:
`scroll_distance`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_distance`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_friction`: float
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_friction`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
`scroll_timeout`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_timeout`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
`scroll_stoptime`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_stoptime`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
`scroll_moves`: int
Default value of the
:attr:`~kivy.uix.scrollview.ScrollView.scroll_moves`
property used by the :class:`~kivy.uix.scrollview.ScrollView` widget.
Check the widget documentation for more information.
.. deprecated:: 1.7.0
Please use
:class:`~kivy.uix.scrollview.ScrollView.effect_cls` instead.
:modules:
You can activate modules with this syntax::
modulename =
Anything after the = will be passed to the module as arguments.
Check the specific module's documentation for a list of accepted
arguments.
.. versionchanged:: 1.10.0
`min_state_time` and `allow_screensaver` have been added
to the `graphics` section.
`kivy_clock` has been added to the kivy section.
`default_font` has beed added to the kivy section.
.. versionchanged:: 1.9.0
`borderless` and `window_state` have been added to the graphics section.
The `fake` setting of the `fullscreen` option has been deprecated,
use the `borderless` option instead.
`pause_on_minimize` has been added to the kivy section.
.. versionchanged:: 1.8.0
`systemanddock` and `systemandmulti` has been added as possible values for
`keyboard_mode` in the kivy section. `exit_on_escape` has been added
to the kivy section.
.. versionchanged:: 1.2.0
`resizable` has been added to graphics section.
.. versionchanged:: 1.1.0
tuio no longer listens by default. Window icons are not copied to
user directory anymore. You can still set a new window icon by using the
``window_icon`` config setting.
.. versionchanged:: 1.0.8
`scroll_timeout`, `scroll_distance` and `scroll_friction` have been added.
`list_friction`, `list_trigger_distance` and `list_friction_bound`
have been removed. `keyboard_type` and `keyboard_layout` have been
removed from the widget. `keyboard_mode` and `keyboard_layout` have
been added to the kivy section.
'''
__all__ = ('Config', 'ConfigParser')
try:
from ConfigParser import ConfigParser as PythonConfigParser
except ImportError:
from configparser import RawConfigParser as PythonConfigParser
from os import environ
from os.path import exists
from kivy import kivy_config_fn
from kivy.logger import Logger, logger_config_update
from collections import OrderedDict
from kivy.utils import platform
from kivy.compat import PY2, string_types
from weakref import ref
_is_rpi = exists('/opt/vc/include/bcm_host.h')
# Version number of current configuration format
KIVY_CONFIG_VERSION = 21
Config = None
'''The default Kivy configuration object. This is a :class:`ConfigParser`
instance with the :attr:`~kivy.config.ConfigParser.name` set to 'kivy'.
.. code-block:: python
Config = ConfigParser(name='kivy')
'''
class ConfigParser(PythonConfigParser, object):
'''Enhanced ConfigParser class that supports the addition of default
sections and default values.
By default, the kivy ConfigParser instance, :attr:`~kivy.config.Config`,
is named `'kivy'` and the ConfigParser instance used by the
:meth:`App.build_settings <~kivy.app.App.build_settings>` method is named
`'app'`.
:Parameters:
`name`: string
The name of the instance. See :attr:`name`. Defaults to `''`.
.. versionchanged:: 1.9.0
Each ConfigParser can now be :attr:`named <name>`. You can get the
ConfigParser associated with a name using :meth:`get_configparser`.
In addition, you can now control the config values with
:class:`~kivy.properties.ConfigParserProperty`.
.. versionadded:: 1.0.7
'''
def __init__(self, name='', **kwargs):
PythonConfigParser.__init__(self, **kwargs)
self._sections = OrderedDict()
self.filename = None
self._callbacks = []
self.name = name
def add_callback(self, callback, section=None, key=None):
'''Add a callback to be called when a specific section or key has
changed. If you don't specify a section or key, it will call the
callback for all section/key changes.
Callbacks will receive 3 arguments: the section, key and value.
.. versionadded:: 1.4.1
'''
if section is None and key is not None:
raise Exception('You cannot specify a key without a section')
self._callbacks.append((callback, section, key))
def remove_callback(self, callback, section=None, key=None):
'''Removes a callback added with :meth:`add_callback`.
:meth:`remove_callback` must be called with the same parameters as
:meth:`add_callback`.
Raises a `ValueError` if not found.
.. versionadded:: 1.9.0
'''
self._callbacks.remove((callback, section, key))
def _do_callbacks(self, section, key, value):
for callback, csection, ckey in self._callbacks:
if csection is not None and csection != section:
continue
elif ckey is not None and ckey != key:
continue
callback(section, key, value)
def read(self, filename):
'''Read only one filename. In contrast to the original ConfigParser of
Python, this one is able to read only one file at a time. The last
read file will be used for the :meth:`write` method.
.. versionchanged:: 1.9.0
:meth:`read` now calls the callbacks if read changed any values.
'''
if not isinstance(filename, string_types):
raise Exception('Only one filename is accepted ({})'.format(
string_types.__name__))
self.filename = filename
# If we try to open directly the configuration file in utf-8,
# we correctly get the unicode value by default.
# But, when we try to save it again, all the values we didn't changed
# are still unicode, and then the PythonConfigParser internal do
# a str() conversion -> fail.
# Instead we currently to the conversion to utf-8 when value are
# "get()", but we internally store them in ascii.
# with codecs.open(filename, 'r', encoding='utf-8') as f:
# self.readfp(f)
old_vals = {sect: {k: v for k, v in self.items(sect)} for sect in
self.sections()}
PythonConfigParser.read(self, filename)
# when reading new file, sections/keys are only increased, not removed
f = self._do_callbacks
for section in self.sections():
if section not in old_vals: # new section
for k, v in self.items(section):
f(section, k, v)
continue
old_keys = old_vals[section]
for k, v in self.items(section): # just update new/changed keys
if k not in old_keys or v != old_keys[k]:
f(section, k, v)
def set(self, section, option, value):
'''Functions similarly to PythonConfigParser's set method, except that
the value is implicitly converted to a string.
'''
e_value = value
if not isinstance(value, string_types):
# might be boolean, int, etc.
e_value = str(value)
if PY2:
if isinstance(value, unicode):
e_value = value.encode('utf-8')
ret = PythonConfigParser.set(self, section, option, e_value)
self._do_callbacks(section, option, value)
return ret
def setall(self, section, keyvalues):
'''Sets multiple key-value pairs in a section. keyvalues should be a
dictionary containing the key-value pairs to be set.
'''
for key, value in keyvalues.items():
self.set(section, key, value)
def get(self, section, option, **kwargs):
value = PythonConfigParser.get(self, section, option, **kwargs)
if PY2:
if type(value) is str:
return value.decode('utf-8')
return value
def setdefaults(self, section, keyvalues):
'''Set multiple key-value defaults in a section. keyvalues should be
a dictionary containing the new key-value defaults.
'''
self.adddefaultsection(section)
for key, value in keyvalues.items():
self.setdefault(section, key, value)
def setdefault(self, section, option, value):
'''Set the default value for an option in the specified section.
'''
if self.has_option(section, option):
return
self.set(section, option, value)
def getdefault(self, section, option, defaultvalue):
'''Get the value of an option in the specified section. If not found,
it will return the default value.
'''
if not self.has_section(section):
return defaultvalue
if not self.has_option(section, option):
return defaultvalue
return self.get(section, option)
def getdefaultint(self, section, option, defaultvalue):
'''Get the value of an option in the specified section. If not found,
it will return the default value. The value will always be
returned as an integer.
.. versionadded:: 1.6.0
'''
return int(self.getdefault(section, option, defaultvalue))
def adddefaultsection(self, section):
'''Add a section if the section is missing.
'''
assert("_" not in section)
if self.has_section(section):
return
self.add_section(section)
def write(self):
'''Write the configuration to the last file opened using the
:meth:`read` method.
Return True if the write finished successfully, False otherwise.
'''
if self.filename is None:
return False
try:
with open(self.filename, 'w') as fd:
PythonConfigParser.write(self, fd)
except IOError:
Logger.exception('Unable to write the config <%s>' % self.filename)
return False
return True
def update_config(self, filename, overwrite=False):
'''Upgrade the configuration based on a new default config file.
Overwrite any existing values if overwrite is True.
'''
pcp = PythonConfigParser()
pcp.read(filename)
confset = self.setall if overwrite else self.setdefaults
for section in pcp.sections():
confset(section, dict(pcp.items(section)))
self.write()
@staticmethod
def _register_named_property(name, widget_ref, *largs):
''' Called by the ConfigParserProperty to register a property which
was created with a config name instead of a config object.
When a ConfigParser with this name is later created, the properties
are then notified that this parser now exists so they can use it.
If the parser already exists, the property is notified here. See
:meth:`~kivy.properties.ConfigParserProperty.set_config`.
:Parameters:
`name`: a non-empty string
The name of the ConfigParser that is associated with the
property. See :attr:`name`.
`widget_ref`: 2-tuple.
The first element is a reference to the widget containing the
property, the second element is the name of the property. E.g.:
class House(Widget):
address = ConfigParserProperty('', 'info', 'street',
'directory')
Then, the first element is a ref to a House instance, and the
second is `'address'`.
'''
configs = ConfigParser._named_configs
try:
config, props = configs[name]
except KeyError:
configs[name] = (None, [widget_ref])
return
props.append(widget_ref)
if config:
config = config()
widget = widget_ref[0]()
if config and widget: # associate this config with property
widget.property(widget_ref[1]).set_config(config)
@staticmethod
def get_configparser(name):
'''Returns the :class:`ConfigParser` instance whose name is `name`, or
None if not found.
:Parameters:
`name`: string
The name of the :class:`ConfigParser` instance to return.
'''
try:
config = ConfigParser._named_configs[name][0]
if config is not None:
config = config()
if config is not None:
return config
del ConfigParser._named_configs[name]
except KeyError:
return None
# keys are configparser names, values are 2-tuple of (ref(configparser),
# widget_ref), where widget_ref is same as in _register_named_property
_named_configs = {}
_name = ''
@property
def name(self):
''' The name associated with this ConfigParser instance, if not `''`.
Defaults to `''`. It can be safely changed dynamically or set to `''`.
When a ConfigParser is given a name, that config object can be
retrieved using :meth:`get_configparser`. In addition, that config
instance can also be used with a
:class:`~kivy.properties.ConfigParserProperty` instance that set its
`config` value to this name.
Setting more than one ConfigParser with the same name will raise a
`ValueError`.
'''
return self._name
@name.setter
def name(self, value):
old_name = self._name
if value is old_name:
return
self._name = value
configs = ConfigParser._named_configs
if old_name: # disconnect this parser from previously connected props
_, props = configs.get(old_name, (None, []))
for widget, prop in props:
widget = widget()
if widget:
widget.property(prop).set_config(None)
configs[old_name] = (None, props)
if not value:
return
# if given new name, connect it with property that used this name
try:
config, props = configs[value]
except KeyError:
configs[value] = (ref(self), [])
return
if config is not None and config() is not None:
raise ValueError('A parser named {} already exists'.format(value))
for widget, prop in props:
widget = widget()
if widget:
widget.property(prop).set_config(self)
configs[value] = (ref(self), props)
if not environ.get('KIVY_DOC_INCLUDE'):
#
# Read, analyse configuration file
# Support upgrade of older config file versions
#
# Create default configuration
Config = ConfigParser(name='kivy')
Config.add_callback(logger_config_update, 'kivy', 'log_level')
# Read config file if exist
if (exists(kivy_config_fn) and
'KIVY_USE_DEFAULTCONFIG' not in environ and
'KIVY_NO_CONFIG' not in environ):
try:
Config.read(kivy_config_fn)
except Exception as e:
Logger.exception('Core: error while reading local'
'configuration')
version = Config.getdefaultint('kivy', 'config_version', 0)
# Add defaults section
Config.adddefaultsection('kivy')
Config.adddefaultsection('graphics')
Config.adddefaultsection('input')
Config.adddefaultsection('postproc')
Config.adddefaultsection('widgets')
Config.adddefaultsection('modules')
Config.adddefaultsection('network')
# Upgrade default configuration until we have the current version
need_save = False
if version != KIVY_CONFIG_VERSION and 'KIVY_NO_CONFIG' not in environ:
Logger.warning('Config: Older configuration version detected'
' ({0} instead of {1})'.format(
version, KIVY_CONFIG_VERSION))
Logger.warning('Config: Upgrading configuration in progress.')
need_save = True
while version < KIVY_CONFIG_VERSION:
Logger.debug('Config: Upgrading from %d to %d' %
(version, version + 1))
if version == 0:
# log level
Config.setdefault('kivy', 'keyboard_repeat_delay', '300')
Config.setdefault('kivy', 'keyboard_repeat_rate', '30')
Config.setdefault('kivy', 'log_dir', 'logs')
Config.setdefault('kivy', 'log_enable', '1')
Config.setdefault('kivy', 'log_level', 'info')
Config.setdefault('kivy', 'log_name', 'kivy_%y-%m-%d_%_.txt')
Config.setdefault('kivy', 'window_icon', '')
# default graphics parameters
Config.setdefault('graphics', 'display', '-1')
Config.setdefault('graphics', 'fullscreen', 'no')
Config.setdefault('graphics', 'height', '600')
Config.setdefault('graphics', 'left', '0')
Config.setdefault('graphics', 'maxfps', '0')
Config.setdefault('graphics', 'multisamples', '2')
Config.setdefault('graphics', 'position', 'auto')
Config.setdefault('graphics', 'rotation', '0')
Config.setdefault('graphics', 'show_cursor', '1')
Config.setdefault('graphics', 'top', '0')
Config.setdefault('graphics', 'vsync', '1')
Config.setdefault('graphics', 'width', '800')
# input configuration
Config.setdefault('input', 'mouse', 'mouse')
# activate native input provider in configuration
# from 1.0.9, don't activate mactouch by default, or app are
# unusable.
if platform == 'win':
Config.setdefault('input', 'wm_touch', 'wm_touch')
Config.setdefault('input', 'wm_pen', 'wm_pen')
elif platform == 'linux':
probesysfs = 'probesysfs'
if _is_rpi:
probesysfs += ',provider=hidinput'
Config.setdefault('input', '%(name)s', probesysfs)
# input postprocessing configuration
Config.setdefault('postproc', 'double_tap_distance', '20')
Config.setdefault('postproc', 'double_tap_time', '250')
Config.setdefault('postproc', 'ignore', '[]')
Config.setdefault('postproc', 'jitter_distance', '0')
Config.setdefault('postproc', 'jitter_ignore_devices',
'mouse,mactouch,')
Config.setdefault('postproc', 'retain_distance', '50')
Config.setdefault('postproc', 'retain_time', '0')
# default configuration for keyboard repetition
Config.setdefault('widgets', 'keyboard_layout', 'qwerty')
Config.setdefault('widgets', 'keyboard_type', '')
Config.setdefault('widgets', 'list_friction', '10')
Config.setdefault('widgets', 'list_friction_bound', '20')
Config.setdefault('widgets', 'list_trigger_distance', '5')
elif version == 1:
Config.remove_option('graphics', 'vsync')
Config.set('graphics', 'maxfps', '60')
elif version == 2:
# was a version to automatically copy windows icon in the user
# directory, but it's now not used anymore. User can still change
# the window icon by touching the config.
pass
elif version == 3:
# add token for scrollview
Config.setdefault('widgets', 'scroll_timeout', '55')
Config.setdefault('widgets', 'scroll_distance', '20')
Config.setdefault('widgets', 'scroll_friction', '1.')
# remove old list_* token
Config.remove_option('widgets', 'list_friction')
Config.remove_option('widgets', 'list_friction_bound')
Config.remove_option('widgets', 'list_trigger_distance')
elif version == 4:
Config.remove_option('widgets', 'keyboard_type')
Config.remove_option('widgets', 'keyboard_layout')
# add keyboard token
Config.setdefault('kivy', 'keyboard_mode', '')
Config.setdefault('kivy', 'keyboard_layout', 'qwerty')
elif version == 5:
Config.setdefault('graphics', 'resizable', '1')
elif version == 6:
# if the timeout is still the default value, change it
Config.setdefault('widgets', 'scroll_stoptime', '300')
Config.setdefault('widgets', 'scroll_moves', '5')
elif version == 7:
# desktop bool indicating whether to use desktop specific features
is_desktop = int(platform in ('win', 'macosx', 'linux'))
Config.setdefault('kivy', 'desktop', is_desktop)
Config.setdefault('postproc', 'triple_tap_distance', '20')
Config.setdefault('postproc', 'triple_tap_time', '375')
elif version == 8:
if Config.getint('widgets', 'scroll_timeout') == 55:
Config.set('widgets', 'scroll_timeout', '250')
elif version == 9:
Config.setdefault('kivy', 'exit_on_escape', '1')
elif version == 10:
Config.set('graphics', 'fullscreen', '0')
Config.setdefault('graphics', 'borderless', '0')
elif version == 11:
Config.setdefault('kivy', 'pause_on_minimize', '0')
elif version == 12:
Config.setdefault('graphics', 'window_state', 'visible')
elif version == 13:
Config.setdefault('graphics', 'minimum_width', '0')
Config.setdefault('graphics', 'minimum_height', '0')
elif version == 14:
Config.setdefault('graphics', 'min_state_time', '.035')
elif version == 15:
Config.setdefault('kivy', 'kivy_clock', 'default')
elif version == 16:
Config.setdefault('kivy', 'default_font', [
'Roboto',
'data/fonts/Roboto-Regular.ttf',
'data/fonts/Roboto-Italic.ttf',
'data/fonts/Roboto-Bold.ttf',
'data/fonts/Roboto-BoldItalic.ttf'])
elif version == 17:
Config.setdefault('graphics', 'allow_screensaver', '1')
elif version == 18:
Config.setdefault('kivy', 'log_maxfiles', '100')
elif version == 19:
Config.setdefault('graphics', 'shaped', '0')
Config.setdefault(
'kivy', 'window_shape',
'data/images/defaultshape.png'
)
elif version == 20:
Config.setdefault('network', 'useragent', 'curl')
else:
# for future.
break
# Pass to the next version
version += 1
# Indicate to the Config that we've upgrade to the latest version.
Config.set('kivy', 'config_version', KIVY_CONFIG_VERSION)
# Now, activate log file
Logger.logfile_activated = bool(Config.getint('kivy', 'log_enable'))
# If no configuration exist, write the default one.
if ((not exists(kivy_config_fn) or need_save) and
'KIVY_NO_CONFIG' not in environ):
try:
Config.filename = kivy_config_fn
Config.write()
except Exception as e:
Logger.exception('Core: Error while saving default config file')
# Load configuration from env
if environ.get('KIVY_NO_ENV_CONFIG', '0') != '1':
for key, value in environ.items():
if not key.startswith("KCFG_"):
continue
try:
_, section, name = key.split("_", 2)
except ValueError:
Logger.warning((
"Config: Environ `{}` invalid format, "
"must be KCFG_section_name").format(key))
continue
# extract and check section
section = section.lower()
if not Config.has_section(section):
Logger.warning(
"Config: Environ `{}`: unknown section `{}`".format(
key, section))
continue
# extract and check the option name
name = name.lower()
sections_to_check = {
"kivy", "graphics", "widgets", "postproc", "network"}
if (section in sections_to_check and
not Config.has_option(section, name)):
Logger.warning((
"Config: Environ `{}` unknown `{}` "
"option in `{}` section.").format(
key, name, section))
# we don't avoid to set an unknown option, because maybe
# an external modules or widgets (in garden?) may want to
# save its own configuration here.
Config.set(section, name, value)
| mit | 467,672,156,130,070,140 | 37.50053 | 79 | 0.612736 | false |
christensen5/pyIBCC | python/ibcc.py | 1 | 30196 | '''
@author: Edwin Simpson
'''
import sys, logging
import numpy as np
from copy import deepcopy
from scipy.sparse import coo_matrix, csr_matrix
from scipy.special import psi, gammaln
from ibccdata import DataHandler
from scipy.optimize import fmin, fmin_cobyla
from scipy.stats import gamma
class IBCC(object):
# Print extra debug info
verbose = False
keeprunning = True # set to false causes the combine_classifications method to exit without completing if another
# thread is checking whether IBCC is taking too long. Probably won't work well if the optimize_hyperparams is true.
# Configuration for variational Bayes (VB) algorithm for approximate inference -------------------------------------
# determine convergence by calculating lower bound? Quicker to set=False so we check convergence of target variables
uselowerbound = False
min_iterations = 1
max_iterations = 500
conv_threshold = 0.0001
# Data set attributes -----------------------------------------------------------------------------------------------
discretedecisions = False # If true, decisions are rounded to discrete integers. If false, you can submit undecided
# responses as fractions between two classes. E.g. 2.3 means that 0.3 of the decision will go to class 3, and 0.7
# will go to class 2. Only neighbouring classes can have uncertain decisions in this way.
table_format_flag = False
nclasses = None
nscores = None
K = None
N = 0 #number of objects
Ntrain = 0 # no. training objects
Ntest = 0 # no. test objects
# Sparsity handling
sparse = False
observed_idxs = []
full_N = 0
# The data from the crowd
C = None
Ctest = None # data for the test points (excluding training)
goldlabels = None
# Indices into the current data set
trainidxs = None
testidxs = None
conf_mat_ind = [] # indices into the confusion matrices corresponding to the current set of crowd labels
# the joint likelihood (interim value saved to reduce computation)
lnpCT = None
# Model parameters and hyper-parameters -----------------------------------------------------------------------------
#The model
alpha0 = None
clusteridxs_alpha0 = [] # use this if you want to use an alpha0 where each matrix is a prior for a group of agents. This
# is and array of indicies that indicates which of the original alpha0 groups should be used for each agent.
alpha0_length = 1 # can be either 1, K or nclusters
alpha0_cluster = [] # copy of the alpha0 values for each cluster.
nu0 = None
lnkappa = []
nu = []
lnPi = []
alpha = []
E_t = []
#hyper-hyper-parameters: the parameters for the hyper-prior over the hyper-parameters. These are only used if you
# run optimize_hyperparams
gam_scale_alpha = [] #Gamma distribution scale parameters
gam_shape_alpha = 10 #Gamma distribution shape parameters --> confidence in seed values
gam_scale_nu = []
gam_shape_nu = 200
# Initialisation ---------------------------------------------------------------------------------------------------
def __init__(self, nclasses=2, nscores=2, alpha0=None, nu0=None, K=1, uselowerbound=False, dh=None):
if dh != None:
self.nclasses = dh.nclasses
self.nscores = len(dh.scores)
self.alpha0 = dh.alpha0
self.nu0 = dh.nu0
self.K = dh.K
self.uselowerbound = dh.uselowerbound
else:
self.nclasses = nclasses
self.nscores = nscores
self.alpha0 = alpha0
self.nu0 = nu0
self.K = K
self.uselowerbound = uselowerbound
# Ensure we have float arrays so we can do division with these parameters properly
self.nu0 = np.array(self.nu0).astype(float)
if self.nu0.ndim==1:
self.nu0 = self.nu0.reshape((self.nclasses,1))
elif self.nu0.shape[0]!=self.nclasses and self.nu0.shape[1]==self.nclasses:
self.nu0 = self.nu0.T
def init_params(self, force_reset=False):
'''
Checks that parameters are intialized, but doesn't overwrite them if already set up.
'''
if self.verbose:
logging.debug('Initialising parameters...Alpha0: ' + str(self.alpha0))
#if alpha is already initialised, and no new agents, skip this
if self.alpha == [] or self.alpha.shape[2] != self.K or force_reset:
self.init_lnPi()
if self.verbose:
logging.debug('Nu0: ' + str(self.nu0))
if self.nu ==[] or force_reset:
self.init_lnkappa()
def init_lnkappa(self):
self.nu = deepcopy(np.float64(self.nu0))
sumNu = np.sum(self.nu)
self.lnkappa = psi(self.nu) - psi(sumNu)
def init_lnPi(self):
'''
Always creates new self.alpha and self.lnPi objects and calculates self.alpha and self.lnPi values according to
either the prior, or where available, values of self.E_t from previous runs.
'''
self.alpha0 = self.alpha0.astype(float)
# if we specify different alpha0 for some agents, we need to do so for all K agents. The last agent passed in
# will be duplicated for any missing agents.
if self.clusteridxs_alpha0 != []: # map from a list of cluster IDs
if self.alpha0_cluster == []:
self.alpha0_cluster = self.alpha0
self.alpha0_length = self.alpha0_cluster.shape[2]
self.alpha0 = self.alpha0_cluster[:, :, self.clusteridxs_alpha0]
elif len(self.alpha0.shape) == 3 and self.alpha0.shape[2] < self.K:
# We have a new dataset with more agents than before -- create more priors.
nnew = self.K - self.alpha0.shape[2]
alpha0new = self.alpha0[:, :, 0]
alpha0new = alpha0new[:, :, np.newaxis]
alpha0new = np.repeat(alpha0new, nnew, axis=2)
self.alpha0 = np.concatenate((self.alpha0, alpha0new), axis=2)
self.alpha0_length = self.K
elif len(self.alpha0.shape)==2:
self.alpha0 = self.alpha0[:,:,np.newaxis]
self.alpha0_length = 1
# Make sure self.alpha is the right size as well. Values of self.alpha not important as we recalculate below
self.alpha = np.zeros((self.nclasses, self.nscores, self.K), dtype=np.float) + self.alpha0
self.lnPi = np.zeros((self.nclasses, self.nscores, self.K))
self.expec_lnPi()
def init_t(self):
kappa = (self.nu0 / np.sum(self.nu0, axis=0)).T
if len(self.E_t) > 0:
if self.sparse:
oldE_t = self.E_t_sparse
else:
oldE_t = self.E_t
Nold = oldE_t.shape[0]
if Nold > self.N:
Nold = self.N
else:
oldE_t = []
self.E_t = np.zeros((self.N, self.nclasses)) + kappa
if len(oldE_t) > 0:
self.E_t[0:Nold, :] = oldE_t[0:Nold, :]
uncert_trainidxs = self.trainidxs.copy() # look for labels that are not discrete values of valid classes
for j in range(self.nclasses):
# training labels
row = np.zeros((1, self.nclasses))
row[0, j] = 1
jidxs = self.goldlabels == j
uncert_trainidxs = uncert_trainidxs - jidxs
self.E_t[jidxs, :] = row
# deal with uncertain training idxs
for j in range(self.nclasses):
# values a fraction above class j
partly_j_idxs = np.bitwise_and(self.goldlabels[uncert_trainidxs] > j, self.goldlabels[uncert_trainidxs] < j + 1)
partly_j_idxs = uncert_trainidxs[partly_j_idxs]
self.E_t[partly_j_idxs, j] = (j + 1) - self.goldlabels[partly_j_idxs]
# values a fraction below class j
partly_j_idxs = np.bitwise_and(self.goldlabels[uncert_trainidxs] < j, self.goldlabels[uncert_trainidxs] > j - 1)
partly_j_idxs = uncert_trainidxs[partly_j_idxs]
self.E_t[partly_j_idxs, j] = self.goldlabels[partly_j_idxs] - j + 1
if self.sparse:
self.E_t_sparse = self.E_t # current working version is a sparse set of observations of the complete space of data points
# Data preprocessing and helper functions --------------------------------------------------------------------------
def desparsify_crowdlabels(self, crowdlabels):
'''
Converts the IDs of data points in the crowdlabels to a set of consecutive integer indexes. If a data point has
no crowdlabels when using table format, it will be skipped.
'''
if self.table_format_flag:
# First, record which objects were actually observed.
self.observed_idxs = np.argwhere(np.sum(np.isfinite(crowdlabels), axis=1) > 0).reshape(-1)
# full set of test points will include those with crowd labels = NaN, unless gold labels are passed in
self.full_N = crowdlabels.shape[0]
if crowdlabels.shape[0] > len(self.observed_idxs):
self.sparse = True
# cut out the unobserved data points. We'll put them back in at the end of the classification procedure.
crowdlabels = crowdlabels[self.observed_idxs, :]
else:
crowdobjects = crowdlabels[:,1].astype(int)
self.observed_idxs, mappedidxs = np.unique(crowdobjects, return_inverse=True)
self.full_N = int(np.max(crowdlabels[:,1])) + 1 # have to add one since indexes start from 0
if self.full_N > len(self.observed_idxs):
self.sparse = True
# map the IDs so we skip unobserved data points. We'll map back at the end of the classification procedure.
crowdlabels[:, 1] = mappedidxs
return crowdlabels
def preprocess_goldlabels(self, goldlabels):
if goldlabels != None and self.sparse:
if self.full_N<len(goldlabels):
# the full set of test points that we output will come from the gold labels if longer than crowd labels
self.full_N = len(goldlabels)
goldlabels = goldlabels[self.observed_idxs]
# Find out how much training data and how many total data points we have
if goldlabels != None:
len_t = goldlabels.shape[0]
goldlabels[np.isnan(goldlabels)] = -1
else:
len_t = 0 # length of the training vector
len_c = len(self.observed_idxs)# length of the crowdlabels
# How many data points in total?
if len_c > len_t:
self.N = len_c
else:
self.N = len_t
# Make sure that goldlabels is the right size in case we have passed in a training set for the first idxs
if goldlabels == None:
self.goldlabels = np.zeros(self.N) - 1
elif goldlabels.shape[0] < self.N:
extension = np.zeros(self.N - goldlabels.shape[0]) - 1
self.goldlabels = np.concatenate((goldlabels, extension))
else:
self.goldlabels = goldlabels
def set_test_and_train_idxs(self, testidxs=None):
# record the test and train idxs
self.trainidxs = self.goldlabels > -1
self.Ntrain = np.sum(self.trainidxs)
# self.testidxs only includes points with crowd labels!
if testidxs != None: # include the pre-specified set of unlabelled data points in the inference process. All
# other data points are either training data or ignored.
self.testidxs = testidxs[self.observed_idxs]
else: # If the test indexes are not specified explicitly, assume that all data points with a NaN or a -1 in the
# training data must be test indexes.
self.testidxs = np.bitwise_or(np.isnan(self.goldlabels), self.goldlabels < 0)
self.testidxs = self.testidxs>0
self.Ntest = np.sum(self.testidxs)
def preprocess_crowdlabels(self, crowdlabels):
# Initialise all objects relating to the crowd labels.
C = {}
crowdlabels[np.isnan(crowdlabels)] = -1
if self.discretedecisions:
crowdlabels = np.round(crowdlabels).astype(int)
if self.table_format_flag:# crowd labels as a full KxN table? If false, use a sparse 3-column list, where 1st
# column=classifier ID, 2nd column = obj ID, 3rd column = score.
self.K = crowdlabels.shape[1]
for l in range(self.nscores):
Cl = np.zeros((self.N, self.K))
#crowd labels may not be supplied for all N data points in the gold labels, so use argwhere
lidxs = np.argwhere(crowdlabels==l)
Cl[lidxs[:,0], lidxs[:,1]] = 1
if not self.discretedecisions:
if l + 1 < self.nscores:
partly_l_idxs = np.bitwise_and(crowdlabels > l, crowdlabels < (l+1)) # partly above l
Cl[partly_l_idxs] = (l + 1) - crowdlabels[partly_l_idxs]
if l > 0:
partly_l_idxs = np.bitwise_and(crowdlabels < l, crowdlabels > (l-1)) # partly below l
Cl[partly_l_idxs] = crowdlabels[partly_l_idxs] - l + 1
C[l] = Cl
else:
self.K = int(np.nanmax(crowdlabels[:,0]))+1 # add one because indexes start from 0
for l in range(self.nscores):
lIdxs = np.argwhere(crowdlabels[:, 2] == l)[:,0]
data = np.ones((len(lIdxs), 1)).reshape(-1)
rows = np.array(crowdlabels[lIdxs, 1]).reshape(-1)
cols = np.array(crowdlabels[lIdxs, 0]).reshape(-1)
if not self.discretedecisions:
partly_l_idxs = np.bitwise_and(crowdlabels[:, 2] > l, crowdlabels[:, 2] < l + 1) # partly above l
data = np.concatenate((data, (l + 1) - crowdlabels[partly_l_idxs, 2]))
rows = np.concatenate((rows, crowdlabels[partly_l_idxs, 1].reshape(-1)))
cols = np.concatenate((cols, crowdlabels[partly_l_idxs, 0].reshape(-1)))
partly_l_idxs = np.bitwise_and(crowdlabels[:, 2] < l, crowdlabels[:, 2] > l - 1) # partly below l
data = np.concatenate((data, crowdlabels[partly_l_idxs, 2] - l + 1))
rows = np.concatenate((rows, crowdlabels[partly_l_idxs, 1].reshape(-1)))
cols = np.concatenate((cols, crowdlabels[partly_l_idxs, 0].reshape(-1)))
Cl = csr_matrix(coo_matrix((data,(rows,cols)), shape=(self.N, self.K)))
C[l] = Cl
# Set and reset object properties for the new dataset
self.C = C
self.lnpCT = np.zeros((self.N, self.nclasses))
self.conf_mat_ind = []
# pre-compute the indices into the pi arrays
# repeat for test labels only
self.Ctest = {}
for l in range(self.nscores):
self.Ctest[l] = C[l][self.testidxs, :]
# Reset the pre-calculated data for the training set in case goldlabels has changed
self.alpha_tr = []
def resparsify_t(self):
'''
Puts the expectations of target values, E_t, at the points we observed crowd labels back to their original
indexes in the output array. Values are inserted for the unobserved indices using only kappa (class proportions).
'''
E_t_full = np.zeros((self.full_N, self.nclasses))
E_t_full[:] = (np.exp(self.lnkappa) / np.sum(np.exp(self.lnkappa),axis=0)).T
E_t_full[self.observed_idxs,:] = self.E_t
self.E_t_sparse = self.E_t # save the sparse version
self.E_t = E_t_full
# Run the inference algorithm --------------------------------------------------------------------------------------
def combine_classifications(self, crowdlabels, goldlabels=None, testidxs=None, optimise_hyperparams=False, maxiter=200,
table_format=False):
'''
Takes crowdlabels in either sparse list or table formats, along with optional training labels (goldlabels)
and applies data-preprocessing steps before running inference for the model parameters and target labels.
Returns the expected values of the target labels.
'''
self.table_format_flag = table_format
oldK = self.K
crowdlabels = self.desparsify_crowdlabels(crowdlabels)
self.preprocess_goldlabels(goldlabels)
self.set_test_and_train_idxs(testidxs)
self.preprocess_crowdlabels(crowdlabels)
self.init_t()
#Check that we have the right number of agents/base classifiers, K, and initialise parameters if necessary
if self.K != oldK or self.nu == [] or self.alpha==[]: # data shape has changed or not initialised yet
self.init_params()
# Either run the model optimisation or just use the inference method with fixed hyper-parameters
if optimise_hyperparams:
self.optimize_hyperparams(maxiter=maxiter)
else:
self.run_inference()
if self.sparse:
self.resparsify_t()
return self.E_t
def convergence_measure(self, oldET):
return np.max(np.sum(np.absolute(oldET - self.E_t), 1))
def run_inference(self):
'''
Variational approximate inference. Assumes that all data and hyper-parameters are ready for use. Overwrite
do implement EP or Gibbs' sampling etc.
'''
logging.info('IBCC: combining %i training points + %i noisy-labelled points' % (np.sum(self.trainidxs),
np.sum(self.testidxs)))
oldL = -np.inf
converged = False
self.nIts = 0 #object state so we can check it later
while not converged and self.keeprunning:
oldET = self.E_t.copy()
#update targets
self.expec_t()
#update params
self.expec_lnkappa()
self.expec_lnPi()
#check convergence
if self.uselowerbound:
L = self.lowerbound()
if self.verbose:
logging.debug('Lower bound: ' + str(L) + ', increased by ' + str(L - oldL))
change = L-oldL
oldL = L
else:
change = self.convergence_measure(oldET)
if (self.nIts>=self.max_iterations or change<self.conv_threshold) and self.nIts>self.min_iterations:
converged = True
self.nIts+=1
if change < -0.001 and self.verbose:
logging.warning('IBCC iteration ' + str(self.nIts) + ' absolute change was ' + str(change) + '. Possible bug or rounding error?')
else:#if self.verbose:
logging.debug('IBCC iteration ' + str(self.nIts) + ' absolute change was ' + str(change))
logging.info('IBCC finished in ' + str(self.nIts) + ' iterations (max iterations allowed = ' + str(self.max_iterations) + ').')
# Posterior Updates to Hyperparameters --------------------------------------------------------------------------------
def post_Alpha(self): # Posterior Hyperparams
# Save the counts from the training data so we only recalculate the test data on every iteration
if self.alpha_tr == []:
self.alpha_tr = np.zeros(self.alpha.shape)
if self.Ntrain:
for j in range(self.nclasses):
for l in range(self.nscores):
Tj = self.E_t[self.trainidxs, j].reshape((self.Ntrain, 1))
self.alpha_tr[j,l,:] = self.C[l][self.trainidxs,:].T.dot(Tj).reshape(-1)
self.alpha_tr += self.alpha0
# Add the counts from the test data
for j in range(self.nclasses):
for l in range(self.nscores):
Tj = self.E_t[self.testidxs, j].reshape((self.Ntest, 1))
counts = self.Ctest[l].T.dot(Tj).reshape(-1)
self.alpha[j, l, :] = self.alpha_tr[j, l, :] + counts
# Expectations: methods for calculating expectations with respect to parameters for the VB algorithm ------------------
def expec_lnkappa(self):
sumET = np.sum(self.E_t, 0)
for j in range(self.nclasses):
self.nu[j] = self.nu0[j] + sumET[j]
self.lnkappa = psi(self.nu) - psi(np.sum(self.nu))
def expec_lnPi(self):
# check if E_t has been initialised. Only update alpha if it has. Otherwise E[lnPi] is given by the prior
if self.E_t != []:
self.post_Alpha()
sumAlpha = np.sum(self.alpha, 1)
psiSumAlpha = psi(sumAlpha)
for s in range(self.nscores):
self.lnPi[:, s, :] = psi(self.alpha[:, s, :]) - psiSumAlpha
def expec_t(self):
self.lnjoint()
joint = self.lnpCT
joint = joint[self.testidxs, :]
# ensure that the values are not too small
largest = np.max(joint, 1)[:, np.newaxis]
joint = joint - largest
joint = np.exp(joint)
norma = np.sum(joint, axis=1)[:, np.newaxis]
pT = joint / norma
self.E_t[self.testidxs, :] = pT
# Likelihoods of observations and current estimates of parameters --------------------------------------------------
def lnjoint(self, alldata=False):
'''
For use with crowdsourced data in table format (should be converted on input)
'''
if self.uselowerbound or alldata:
for j in range(self.nclasses):
data = []
for l in range(self.nscores):
if self.table_format_flag:
data_l = self.C[l] * self.lnPi[j, l, :][np.newaxis,:]
else:
data_l = self.C[l].multiply(self.lnPi[j, l, :][np.newaxis,:])
data = data_l if data==[] else data+data_l
self.lnpCT[:, j] = np.array(np.sum(data, 1)).reshape(-1) + self.lnkappa[j]
else: # no need to calculate in full
for j in range(self.nclasses):
data = []
for l in range(self.nscores):
if self.table_format_flag:
data_l = self.Ctest[l] * self.lnPi[j, l, :][np.newaxis,:]
else:
data_l = self.Ctest[l].multiply(self.lnPi[j, l, :][np.newaxis,:])
data = data_l if data==[] else data+data_l
self.lnpCT[self.testidxs, j] = np.array(np.sum(data, 1)).reshape(-1) + self.lnkappa[j]
def post_lnkappa(self):
lnpKappa = gammaln(np.sum(self.nu0)) - np.sum(gammaln(self.nu0)) + sum((self.nu0 - 1) * self.lnkappa)
return lnpKappa
def q_lnkappa(self):
lnqKappa = gammaln(np.sum(self.nu)) - np.sum(gammaln(self.nu)) + np.sum((self.nu - 1) * self.lnkappa)
return lnqKappa
def q_ln_t(self):
ET = self.E_t[self.E_t != 0]
return np.sum(ET * np.log(ET))
def post_lnpi(self):
x = np.sum((self.alpha0-1) * self.lnPi,1)
z = gammaln(np.sum(self.alpha0,1)) - np.sum(gammaln(self.alpha0),1)
return np.sum(x+z)
def q_lnPi(self):
x = np.sum((self.alpha-1) * self.lnPi,1)
z = gammaln(np.sum(self.alpha,1)) - np.sum(gammaln(self.alpha),1)
return np.sum(x+z)
# Lower Bound ---------------------------------------------------------------------------------------------------------
def lowerbound(self):
# Expected Energy: entropy given the current parameter expectations
lnpCT = self.post_lnjoint_ct()
lnpPi = self.post_lnpi()
lnpKappa = self.post_lnkappa()
EEnergy = lnpCT + lnpPi + lnpKappa
# Entropy of the variational distribution
lnqT = self.q_ln_t()
lnqPi = self.q_lnPi()
lnqKappa = self.q_lnkappa()
H = lnqT + lnqPi + lnqKappa
# Lower Bound
L = EEnergy - H
# logging.debug('EEnergy ' + str(EEnergy) + ', H ' + str(H))
return L
# Hyperparameter Optimisation ------------------------------------------------------------------------------------------
def set_hyperparams(self,hyperparams):
n_alpha_elements = len(hyperparams) - self.nclasses
alpha_shape = (self.nclasses, self.nscores, self.alpha0_length)
alpha0 = hyperparams[0:n_alpha_elements].reshape(alpha_shape)
nu0 = np.array(hyperparams[-self.nclasses:]).reshape(self.nclasses, 1)
if self.clusteridxs_alpha0 != []:
self.alpha0_cluster = alpha0
else:
self.alpha0 = alpha0
self.nu0 = nu0
return (alpha0, nu0)
def get_hyperparams(self):
constraints = [lambda hp: 1 if np.all(np.asarray(hp[0:self.alpha0_length*self.nclasses*self.nscores + self.nclasses])>0) else -1]
if self.clusteridxs_alpha0 != []:
alpha0 = self.alpha0_cluster
else:
alpha0 = self.alpha0
rhobeg = np.ones(len(hyperparams))
rhoend = np.ones(len(hyperparams)) * 0.2
return np.concatenate((alpha0.flatten(), self.nu0.flatten())), constraints, rhobeg, rhoend
def post_lnjoint_ct(self):
# If we have not already calculated lnpCT for the lower bound, then make sure we recalculate using all data
if not self.uselowerbound:
self.lnjoint(alldata=True)
return np.sum(self.E_t * self.lnpCT)
def ln_modelprior(self):
#Check and initialise the hyper-hyper-parameters if necessary
if self.gam_scale_alpha==[] or (len(self.gam_scale_alpha.shape) == 3 and self.gam_scale_alpha.shape[2]!=self.alpha0.shape[2]):
self.gam_shape_alpha = np.float(self.gam_shape_alpha)
# if the scale was not set, assume current values of alpha0 are the means given by the hyper-prior
self.gam_scale_alpha = self.alpha0/self.gam_shape_alpha
if self.gam_scale_nu==[]:
self.gam_shape_nu = np.float(self.gam_shape_nu)
# if the scale was not set, assume current values of nu0 are the means given by the hyper-prior
self.gam_scale_nu = self.nu0/self.gam_shape_nu
#Gamma distribution over each value. Set the parameters of the gammas.
p_alpha0 = gamma.logpdf(self.alpha0, a=self.gam_shape_alpha, scale=self.gam_scale_alpha)
p_nu0 = gamma.logpdf(self.nu0, a=self.gam_shape_nu, scale=self.gam_scale_nu)
return np.sum(p_alpha0) + np.sum(p_nu0)
def neg_marginal_likelihood(self, hyperparams):
'''
Weight the marginal log data likelihood by the hyper-prior. Unnormalised posterior over the hyper-parameters.
'''
if self.verbose:
logging.debug("Hyper-parameters: %s" % str(hyperparams))
if np.any(np.isnan(hyperparams)) or np.any(hyperparams <= 0):
return np.inf
self.set_hyperparams(hyperparams)
#ensure new alpha0 and nu0 values are used when updating E_t
self.init_params(force_reset=True)
#run inference algorithm
self.run_inference()
#calculate likelihood from the fitted model
data_loglikelihood = self.post_lnjoint_ct()
log_model_prior = self.ln_modelprior()
lml = data_loglikelihood + log_model_prior
logging.debug("Log joint probability of the model & data: %f" % lml)
return -lml #returns Negative!
def optimize_hyperparams(self, maxiter=200):
'''
Assuming gamma distributions over the hyper-parameters, we find the MAP values. The heatmapcombiner object is updated
to contain the optimal values, searched for using BFGS.
'''
#Evaluate the first guess using the current value of the hyper-parameters
initialguess, constraints, rhobeg, rhoend = self.get_hyperparams()
#opt_hyperparams = fmin_cobyla(self.neg_marginal_likelihood, initialguess, constraints, maxfun=maxiter, rhobeg=rhobeg, rhoend=rhoend)
opt_hyperparams, _, niterations, _, _ = fmin(self.neg_marginal_likelihood, initialguess, maxfun=maxiter,
full_output=True, ftol=1, xtol=rhoend)
opt_hyperparams = self.set_hyperparams(opt_hyperparams)
logging.debug("Optimal hyper-parameters: ")
for param in opt_hyperparams:
logging.debug(str(param))
return self.E_t
# Loader and Runner helper functions -------------------------------------------------------------------------------
def load_combiner(config_file, ibcc_class=None):
dh = DataHandler()
dh.loadData(config_file)
if ibcc_class==None:
heatmapcombiner = IBCC(dh=dh)
else:
heatmapcombiner = ibcc_class(dh=dh)
return heatmapcombiner, dh
def load_and_run_ibcc(configFile, ibcc_class=None, optimise_hyperparams=False):
heatmapcombiner, dh = load_combiner(configFile, ibcc_class)
#combine labels
heatmapcombiner.verbose = True
pT = heatmapcombiner.combine_classifications(dh.crowdlabels, dh.goldlabels, optimise_hyperparams=optimise_hyperparams,
table_format=dh.table_format)
if dh.output_file is not None:
dh.save_targets(pT)
dh.save_pi(heatmapcombiner.alpha, heatmapcombiner.nclasses, heatmapcombiner.nscores)
dh.save_hyperparams(heatmapcombiner.alpha, heatmapcombiner.nu)
pT = dh.map_predictions_to_original_IDs(pT)
return pT, heatmapcombiner
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
if len(sys.argv)>1:
configFile = sys.argv[1]
else:
configFile = './config/my_project.py'
load_and_run_ibcc(configFile)
| mit | -2,089,740,508,263,431,200 | 49.076285 | 157 | 0.576467 | false |
pingdynasty/OwlProgram | Tools/Heavy/uploader.py | 1 | 24501 | #!/usr/bin/env python
# Copyright (c) 2015-2017 Enzien Audio, Ltd. ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import argparse
import base64
import datetime
import getpass
import json
import os
import requests
import shutil
import stat
import sys
import tempfile
import time
import urlparse
import zipfile
class Colours:
purple = "\033[95m"
cyan = "\033[96m"
dark_cyan = "\033[36m"
blue = "\033[94m"
green = "\033[92m"
yellow = "\033[93m"
red = "\033[91m"
bold = "\033[1m"
underline = "\033[4m"
end = "\033[0m"
class ErrorCodes(object):
# NOTE(mhroth): this class could inherit from Enum, but we choose not to
# as to not require an additional dependency
# http://www.tldp.org/LDP/abs/html/exitcodes.html
# http://stackoverflow.com/questions/1101957/are-there-any-standard-exit-status-codes-in-linux
CODE_OK = 0 # success!
CODE_MAIN_NOT_FOUND = 3 # _main.pd not found
CODE_HEAVY_COMPILE_ERRORS = 4 # heavy returned compiler errors
CODE_UPLOAD_ASSET_TOO_LARGE = 5 # the size of the uploadable asset is too large
CODE_RELEASE_NOT_AVAILABLE = 6 # the requested release is not available
CODE_CONNECTION_ERROR = 7 # HTTPS connection could not be made to the server
CODE_CONNECTION_TIMEOUT = 8 # HTTPS connection has timed out
CODE_CONNECTION_400_500 = 9 # a 400 or 500 error has occured
CODE_INVALID_TOKEN = 10 # the user token could not be parsed
CODE_NEW_PATCH_FAIL = 11 # a new patch could not be made
CODE_EXCEPTION = 125 # a generic execption has occurred
class UploaderException(Exception):
def __init__(self, code, message=None, e=None):
self.code = code
self.message = message
self.e = e
# the maxmimum file upload size of 1MB
__HV_MAX_UPLOAD_SIZE = 1 * 1024*1024
__HV_UPLOADER_SERVICE_TOKEN = \
"eyJhbGciOiAiSFMyNTYiLCAidHlwIjogIkpXVCJ9." \
"eyJzdGFydERhdGUiOiAiMjAxNi0xMi0xNVQyMzoyNToxMC4wOTU2MjIiLCAic2VydmljZSI6ICJoZWF2eV91cGxvYWRlciJ9." \
"w2o1_RttJUAiq6WyN0J7MhDsaSseISzgDAQ9aP9Di6M="
__SUPPORTED_GENERATOR_SET = {
"c-src",
"web-local", "web-js",
"fabric-src", "fabric-macos-x64", "fabric-win-x86", "fabric-win-x64", "fabric-linux-x64", "fabric-android-armv7a",
"unity-src", "unity-macos-x64", "unity-win-x86", "unity-win-x64", "unity-linux-x64", "unity-android-armv7a",
"wwise-src", "wwise-macos-x64", "wwise-win-x86", "wwise-win-x64", "wwise-linux-x64", "wwise-ios-armv7a"
"vst2-src", "vst2-macos-x64", "vst2-win-x86", "vst2-win-x64", "vst2-linux-x64"
}
def __zip_dir(in_dir, zip_path, file_filter=None):
""" Recursively zip an entire directory with an optional file filter
"""
zf = zipfile.ZipFile(zip_path, mode="w", compression=zipfile.ZIP_DEFLATED)
for subdir, dirs, files in os.walk(in_dir):
for f in files:
if (file_filter is None) or (f.lower().split(".")[-1] in file_filter):
zf.write(
filename=os.path.join(subdir,f),
arcname=os.path.relpath(os.path.join(subdir,f), start=in_dir))
return zip_path
def __unzip(zip_path, target_dir):
""" Unzip a file to a given directory. All destination files are overwritten.
"""
zipfile.ZipFile(zip_path).extractall(target_dir)
def __get_file_url_stub_for_generator(json_api, g):
""" Returns the file link for a specific generator.
Returns None if no link could be found.
"""
for i in json_api["included"]:
if (i["type"] == "file") and (g == i["data"]["buildId"]):
return i["links"]["self"]
return None # by default, return None
def upload(input_dir, output_dirs=None, name=None, owner=None, generators=None, b=False, y=False, release=None, release_override=False, domain=None, verbose=False, token=None, clear_token=False, service_token=None, force_new_patch=False):
""" Upload a directory to the Heavy Cloud Service.
Parameters
----------
input_dir : str
Directory containing _main.pd file.
output_dirs : list, optional
List of directories where the output should be placed. Usually the output directory list has only one element.
If no argument is given, the input directory will be used.
name : str, optional
The name of the patch.
If no argument is given, the name "heavy" is used.
owner : str, optional
The name of the owner of the patch. Usually this is an organisation.
If no argument is given, the submitting user name is used.
generators : list, optional
A list of generators e.g. 'c', 'unity', or 'vst2-x86'
b : bool, optional
If True, puts the results of each generator into its own directory.
False by default in which case all files are put into the same directory.
y : bool, optional
If True, extract only generated C files, static files are deleted. False by default.
release : str, optional
The name of the release to use for compiling.
release_override : bool, optional
Disable the validity check for a requested release. Forces sending a
release request to the server.
verbose : bool, optional
False by default.
token : str, optional
The token used to identify the user to Heavy Cloud Service.
By default the stored token will be used.
clear_token : bool, optional
Clears and ignores any existing stored tokens. Requests a new one from the command line.
service_token : str, optional
Pass an optional service token to be used instead of the default heavy_uploader.
force_new_patch : bool, optional
Indicate that a new patch should be created with the given name, if it does not yet exist.
"""
# https://github.com/numpy/numpy/blob/master/doc/HOWTO_DOCUMENT.rst.txt
try:
# set default values
name = name or "heavy"
domain = domain or "https://enzienaudio.com"
exit_code = ErrorCodes.CODE_OK
reply_json = {}
temp_dir = None
post_data = {}
# token should be stored in ~/.heavy/token
token_path = os.path.expanduser(os.path.join("~/", ".heavy", "token"))
if token is None:
if os.path.exists(token_path):
if clear_token:
os.remove(token_path)
else:
with open(token_path, "r") as f:
token = f.read()
if token is None:
print "Please provide a user token from enzienaudio.com. " \
"Create or copy one from https://enzienaudio.com/h/<username>/settings."
token = getpass.getpass("Enter user token: ")
# write token to file
if not os.path.exists(os.path.dirname(token_path)):
# ensure that the .heavy directory exists
os.makedirs(os.path.dirname(token_path))
with open(token_path, "w") as f:
f.write(token)
os.chmod(token_path, stat.S_IRUSR | stat.S_IWUSR) # force rw------- permissions on the file
tick = time.time()
# check the validity of the token
try:
# check the valifity of the token
payload = json.loads(base64.urlsafe_b64decode(token.split(".")[1]))
payload["startDate"] = datetime.datetime.strptime(payload["startDate"], "%Y-%m-%dT%H:%M:%S.%f")
# ensure that the token is valid
now = datetime.datetime.utcnow()
assert payload["startDate"] <= now
if owner is None:
# if an owner is not supplied, default to the user name in the token
owner = payload["name"]
except Exception as e:
print "The user token is invalid. Generate a new one at https://enzienaudio.com/h/<username>/settings/."
exit_code = ErrorCodes.CODE_INVALID_TOKEN
raise e
# if there is a user-supplied service token, do a basic validity check
if service_token:
try:
# check the valifity of the token
payload = json.loads(base64.urlsafe_b64decode(token.split(".")[1]))
payload["startDate"] = datetime.datetime.strptime(payload["startDate"], "%Y-%m-%dT%H:%M:%S.%f")
# ensure that the token is valid
now = datetime.datetime.utcnow()
assert payload["startDate"] <= now
assert "service" in payload, "'service' field required in service token payload."
except Exception as e:
print "The supplied service token is invalid. A default token will be used."
service_token = __HV_UPLOADER_SERVICE_TOKEN
else:
service_token = __HV_UPLOADER_SERVICE_TOKEN
# parse the optional release argument
if release:
if not release_override:
# check the validity of the current release
releases_json = requests.get(urlparse.urljoin(domain, "/a/releases/")).json()
if release in releases_json:
today = datetime.datetime.now()
valid_until = datetime.datetime.strptime(releases_json[release]["validUntil"], "%Y-%m-%d")
if today > valid_until:
print "{0}Warning:{1} The release \"{2}\" expired on {3}. It may be removed at any time!".format(
Colours.yellow, Colours.end,
release,
releases_json[release]["validUntil"])
elif (valid_until - today) <= datetime.timedelta(weeks=4):
print "{0}Warning:{1} The release \"{2}\" will expire soon on {3}.".format(
Colours.yellow, Colours.end,
release,
releases_json[release]["validUntil"])
else:
print "{0}Error:{1} The release \"{2}\" is not available. Available releases are:".format(
Colours.red, Colours.end,
release)
for k,v in releases_json.items():
print "* {0} ({1})".format(
k,
v["releaseDate"])
raise UploaderException(ErrorCodes.CODE_RELEASE_NOT_AVAILABLE)
post_data["release"] = release
# make a temporary directory
temp_dir = tempfile.mkdtemp(prefix="lroyal-")
# zip up the pd directory into the temporary directory
if not os.path.exists(os.path.join(input_dir, "_main.pd")):
raise UploaderException(
ErrorCodes.CODE_MAIN_NOT_FOUND,
"Root Pd directory does not contain a file named _main.pd.")
zip_path = __zip_dir(
input_dir,
os.path.join(temp_dir, "archive.zip"),
file_filter={"pd"})
if os.stat(zip_path).st_size > __HV_MAX_UPLOAD_SIZE:
raise UploaderException(
ErrorCodes.CODE_UPLOAD_ASSET_TOO_LARGE,
"The target directory, zipped, is {0} bytes. The maximum upload size of 1MB.".format(
os.stat(zip_path).st_size))
# the outputs to generate
generators = list({s.lower() for s in set(generators or [])} & __SUPPORTED_GENERATOR_SET)
# check if the patch exists already. Ask to create it if it doesn't exist
r = requests.get(
urlparse.urljoin(domain, "/a/patches/{0}/{1}/".format(owner, name)),
headers={
"Accept": "application/json",
"Authorization": "Bearer " + token,
"X-Heavy-Service-Token": service_token
})
r.raise_for_status()
reply_json = r.json()
if "errors" in reply_json:
if reply_json["errors"][0]["status"] == "404":
# the patch does not exist
if force_new_patch:
create_new_patch = True
else:
create_new_patch = raw_input("A patch called \"{0}\" does not exist for owner \"{1}\". Create it? (y/n):".format(name, owner))
create_new_patch = (create_new_patch == "y")
if create_new_patch:
r = requests.post(
urlparse.urljoin(domain, "/a/patches/"),
data={"owner_name":owner, "name":name},
headers={
"Accept": "application/json",
"Authorization": "Bearer " + token,
"X-Heavy-Service-Token": service_token
})
r.raise_for_status()
reply_json = r.json()
if "errors" in reply_json:
raise UploaderException(
ErrorCodes.CODE_NEW_PATCH_FAIL,
reply_json["errors"][0]["detail"])
else:
pass # no errors? everything is cool! Proceed.
else:
UploaderException(
ErrorCodes.CODE_NEW_PATCH_FAIL,
"A patch called \"{0}\" does not exist for owner \"{1}\"".format(owner, name))
else:
raise UploaderException(
ErrorCodes.CODE_NEW_PATCH_FAIL,
reply_json["errors"][0]["detail"])
else:
pass # the patch exists, move on
# upload the job, get the response back
r = requests.post(
urlparse.urljoin(domain, "/a/patches/{0}/{1}/jobs/".format(owner, name)),
data=post_data,
headers={
"Accept": "application/json",
"Authorization": "Bearer " + token,
"X-Heavy-Service-Token": service_token
},
files={"file": (os.path.basename(zip_path), open(zip_path, "rb"), "application/zip")})
r.raise_for_status()
# decode the JSON API response (See below for an example response)
reply_json = r.json()
if verbose:
print json.dumps(reply_json, sort_keys=True, indent=2, separators=(",", ": "))
# print any warnings
for i,x in enumerate(reply_json.get("warnings",[])):
print "{3}) {0}Warning:{1} {2}".format(
Colours.yellow, Colours.end, x["detail"], i+1)
# check for errors
if len(reply_json.get("errors",[])) > 0:
for i,x in enumerate(reply_json["errors"]):
print "{3}) {0}Error:{1} {2}".format(
Colours.red, Colours.end, x["detail"], i+1)
raise UploaderException(ErrorCodes.CODE_HEAVY_COMPILE_ERRORS)
print "Job URL:", urlparse.urljoin(domain, reply_json["data"]["links"]["html"])
print "Heavy release:", reply_json["data"]["attributes"]["release"]
if len(generators) > 0:
print "Downloaded files placed in:"
# retrieve all requested files
for i,g in enumerate(generators):
file_url = urlparse.urljoin(
domain,
"/".join([
reply_json["data"]["links"]["html"],
g.replace("-", "/"),
"archive.zip"
])
)
if file_url and (len(output_dirs) > i or b):
r = requests.get(
file_url,
headers={
"Authorization": "Bearer " + token,
"X-Heavy-Service-Token": service_token
},
timeout=None # some builds can take a very long time
)
r.raise_for_status()
# write the reply to a temporary file
c_zip_path = os.path.join(temp_dir, "archive.{0}.zip".format(g))
with open(c_zip_path, "wb") as f:
f.write(r.content)
# unzip the files to where they belong
if b:
target_dir = os.path.join(os.path.abspath(os.path.expanduser(output_dirs[0])), g)
else:
target_dir = os.path.abspath(os.path.expanduser(output_dirs[i]))
if not os.path.exists(target_dir):
os.makedirs(target_dir) # ensure that the output directory exists
__unzip(c_zip_path, target_dir)
if g == "c-src" and y:
keep_files = ("_{0}.h".format(name), "_{0}.hpp".format(name), "_{0}.cpp".format(name))
for f in os.listdir(target_dir):
if not f.endswith(keep_files):
os.remove(os.path.join(target_dir, f));
print " * {0}: {1}".format(g, target_dir)
else:
print " * {0}Warning:{1} {2} files could not be retrieved.".format(
Colours.yellow, Colours.end,
g)
print "Total request time: {0}ms".format(int(1000.0*(time.time()-tick)))
except UploaderException as e:
exit_code = e.code
if e.message:
print "{0}Error:{1} {2}".format(Colours.red, Colours.end, e.message)
except requests.ConnectionError as e:
print "{0}Error:{1} Could not connect to server. Is the server down? Is the internet down?\n{2}".format(Colours.red, Colours.end, e)
exit_code = ErrorCodes.CODE_CONNECTION_ERROR
except requests.Timeout as e:
print "{0}Error:{1} Connection to server timed out. The server might be overloaded. Try again later?\n{2}".format(Colours.red, Colours.end, e)
exit_code = ErrorCodes.CODE_CONNECTION_TIMEOUT
except requests.HTTPError as e:
if e.response.status_code == requests.codes.unauthorized:
print "{0}Error:{1} Unknown username or password.".format(Colours.red, Colours.end)
else:
print "{0}Error:{1} An HTTP error has occurred with URL {2}\n{3}".format(Colours.red, Colours.end, e.request.path_url, e)
exit_code = ErrorCodes.CODE_CONNECTION_400_500
except Exception as e:
# a generic catch for any other exception
exit_code = exit_code if exit_code != ErrorCodes.CODE_OK else ErrorCodes.CODE_EXCEPTION
print "{0}Error:{1} ({2}) {3}".format(Colours.red, Colours.end, e.__class__, e)
print "Getting a weird error? Get the latest version with 'pip install hv-uploader -U', or check for issues at https://github.com/enzienaudio/heavy/issues."
finally:
if temp_dir:
shutil.rmtree(temp_dir) # delete the temporary directory no matter what
return exit_code, reply_json
def main():
parser = argparse.ArgumentParser(
description="Compiles a Pure Data file.")
parser.add_argument(
"input_dir",
help="A directory containing _main.pd. All .pd files in the directory structure will be uploaded.")
parser.add_argument(
"-n", "--name",
default="heavy",
help="Patch name. If it doesn't exist on the Heavy site, the uploader will fail.")
parser.add_argument(
"--owner",
help="The name of the owner of patch. Usually this is of an organisation.")
parser.add_argument(
"-g", "--gen",
nargs="+",
help="List of generator outputs. Currently supported generators are '" + "', '".join(sorted(__SUPPORTED_GENERATOR_SET)) + "'.")
parser.add_argument(
"-b",
help="All files will be placed in the output directory, placed in their own subdirectory corresponding to the generator name.",
action="count")
parser.add_argument(
"-y",
help="Extract only the generated C files. Static files are deleted. "
"Only effective for the 'c' generator.",
action="count")
parser.add_argument(
"-o", "--out",
nargs="+",
default=["./"], # by default
help="List of destination directories for retrieved files. Order should be the same as for --gen.")
parser.add_argument(
"-r", "--release",
help="Optionally request a specific release of Heavy to use while compiling.")
parser.add_argument(
"-rr",
help="Send a request for a specific release to the server without checking for validity first.",
action="count")
parser.add_argument(
"-v", "--verbose",
help="Show debugging information.",
action="count")
parser.add_argument(
"-t", "--token",
help="Use the specified token.")
parser.add_argument(
"--clear_token",
help="Clears the exsiting token and asks for a new one from the command line.",
action="count")
parser.add_argument(
"--service_token",
help="Use a custom service token.")
parser.add_argument(
"-d", "--domain",
default="https://enzienaudio.com",
help="Domain. Default is https://enzienaudio.com.")
parser.add_argument(
"--force_new_patch",
help="Create a new patch if the given name doesn't already exist.",
action="count")
args = parser.parse_args()
exit_code, reponse_obj = upload(
input_dir=args.input_dir,
output_dirs=args.out,
name=args.name,
owner=args.owner,
generators=args.gen,
b=args.b,
y=args.y,
release=args.release,
release_override=args.rr,
domain=args.domain,
verbose=args.verbose,
token=args.token,
clear_token=args.clear_token,
service_token=args.service_token,
force_new_patch=args.force_new_patch)
# exit and return the exit code
sys.exit(exit_code)
if __name__ == "__main__":
main()
"""
An example of the server response:
{
"data": {
"attributes": {
"compileTime": 0.266899,
"index": 188,
"release": "r2016.11",
"submittedAt": "2016-12-23T12:49:04.500000",
"warnings": []
},
"id": "mhroth/test_osc/188",
"links": {
"html": "/h/mhroth/test_osc/188",
"self": "/a/jobs/mhroth/test_osc/188"
},
"relationships": {
"files": {
"data": [
{
"id": "mhroth/test_osc/188/c/src",
"type": "file"
}
]
},
"patch": {
"links": {
"html": "/h/mhroth/test_osc",
"self": "/a/patches/mhroth/test_osc"
}
},
"submittedBy": {
"links": {
"html": "/h/mhroth",
"self": "/a/users/mhroth"
}
}
},
"type": "job"
},
"included": [
{
"data": {
"buildId": "c-src",
"compileTime": 0.266899,
"date": "2016-12-23T12:49:04.500000",
"mime": "application/zip",
"size": 51484
},
"id": "mhroth/test_osc/188/c/src",
"links": {
"self": "/h/mhroth/test_osc/188/c/src/archive.zip"
},
"type": "file"
}
]
}
"""
| gpl-2.0 | -762,648,957,300,477,000 | 39.835 | 238 | 0.5621 | false |
erikabarros/naguil | backend/appengine/routes/formacaos/edit.py | 1 | 1215 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from config.template_middleware import TemplateResponse
from gaebusiness.business import CommandExecutionException
from gaepermission.decorator import login_not_required
from tekton import router
from gaecookie.decorator import no_csrf
from formacao_app import formacao_facade
from routes import formacaos
from tekton.gae.middleware.redirect import RedirectResponse
@login_not_required
@no_csrf
def index(formacao_id):
formacao = formacao_facade.get_formacao_cmd(formacao_id)()
formacao_form = formacao_facade.formacao_form()
context = {'save_path': router.to_path(save, formacao_id), 'formacao': formacao_form.fill_with_model(formacao)}
return TemplateResponse(context, 'formacaos/formacao_form.html')
@login_not_required
def save(formacao_id, **formacao_properties):
cmd = formacao_facade.update_formacao_cmd(formacao_id, **formacao_properties)
try:
cmd()
except CommandExecutionException:
context = {'errors': cmd.errors, 'formacao': formacao_properties}
return TemplateResponse(context, 'formacaos/formacao_form.html')
return RedirectResponse(router.to_path(formacaos))
| mit | 1,807,663,158,633,435,000 | 39.5 | 115 | 0.766255 | false |
graalvm/mx | mx_ide_eclipse.py | 1 | 63800 | #
# ----------------------------------------------------------------------------------------------------
#
# Copyright (c) 2007, 2020, Oracle and/or its affiliates. All rights reserved.
# DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
#
# This code is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 2 only, as
# published by the Free Software Foundation.
#
# This code is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# version 2 for more details (a copy is included in the LICENSE file that
# accompanied this code).
#
# You should have received a copy of the GNU General Public License version
# 2 along with this work; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
# or visit www.oracle.com if you need additional information or have any
# questions.
#
# ----------------------------------------------------------------------------------------------------
#
from __future__ import print_function
import sys
import os, time, zipfile, tempfile
# TODO use defusedexpat?
import xml.parsers.expat, xml.sax.saxutils, xml.dom.minidom
import re
import difflib
from collections import namedtuple
from argparse import ArgumentParser, FileType
from os.path import join, basename, dirname, exists, isdir, abspath
import mx
import mx_ideconfig
import mx_javamodules
# Temporary imports and (re)definitions while porting mx from Python 2 to Python 3
if sys.version_info[0] < 3:
from StringIO import StringIO
else:
from io import StringIO
@mx.command('mx', 'eclipseformat')
def eclipseformat(args):
"""run the Eclipse Code Formatter on the Java sources
The exit code 1 denotes that at least one file was modified."""
parser = ArgumentParser(prog='mx eclipseformat')
parser.add_argument('-e', '--eclipse-exe', help='location of the Eclipse executable')
parser.add_argument('-C', '--no-backup', action='store_false', dest='backup', help='do not save backup of modified files')
parser.add_argument('--projects', action='store', help='comma separated projects to process (omit to process all projects)')
parser.add_argument('--primary', action='store_true', help='limit checks to primary suite')
parser.add_argument('--patchfile', type=FileType("w"), help='file to which a patch denoting the applied formatting changes is written')
parser.add_argument('--restore', action='store_true', help='restore original files after the formatting job (does not create a backup).')
parser.add_argument('--filelist', type=FileType("r"), help='only format the files listed in the given file')
args = parser.parse_args(args)
if args.eclipse_exe is None:
args.eclipse_exe = os.environ.get('ECLIPSE_EXE')
if args.eclipse_exe is None:
mx.abort('Could not find Eclipse executable. Use -e option or ensure ECLIPSE_EXE environment variable is set.')
if args.restore:
args.backup = False
# Maybe an Eclipse installation dir was specified - look for the executable in it
if isdir(args.eclipse_exe):
args.eclipse_exe = join(args.eclipse_exe, mx.exe_suffix('eclipse'))
mx.warn("The eclipse-exe was a directory, now using " + args.eclipse_exe)
if not os.path.isfile(args.eclipse_exe):
mx.abort('File does not exist: ' + args.eclipse_exe)
if not os.access(args.eclipse_exe, os.X_OK):
mx.abort('Not an executable file: ' + args.eclipse_exe)
filelist = None
if args.filelist:
filelist = [abspath(line.strip()) for line in args.filelist.readlines()]
args.filelist.close()
wsroot = eclipseinit([], buildProcessorJars=False, doFsckProjects=False)
# build list of projects to be processed
if args.projects is not None:
projectsToProcess = [mx.project(name) for name in args.projects.split(',')]
elif args.primary:
projectsToProcess = mx.projects(limit_to_primary=True)
else:
projectsToProcess = mx.projects(opt_limit_to_suite=True)
class Batch:
def __init__(self, settingsDir):
self.path = join(settingsDir, 'org.eclipse.jdt.core.prefs')
with open(join(settingsDir, 'org.eclipse.jdt.ui.prefs')) as fp:
jdtUiPrefs = fp.read()
self.removeTrailingWhitespace = 'sp_cleanup.remove_trailing_whitespaces_all=true' in jdtUiPrefs
if self.removeTrailingWhitespace:
assert 'sp_cleanup.remove_trailing_whitespaces=true' in jdtUiPrefs and 'sp_cleanup.remove_trailing_whitespaces_ignore_empty=false' in jdtUiPrefs
self.cachedHash = None
def __hash__(self):
if not self.cachedHash:
self.cachedHash = (self.read_core_prefs_file(), self.removeTrailingWhitespace).__hash__()
return self.cachedHash
def __eq__(self, other):
if not isinstance(other, Batch):
return False
if self.removeTrailingWhitespace != other.removeTrailingWhitespace:
return False
if self.path == other.path:
return True
return self.read_core_prefs_file() == other.read_core_prefs_file()
def read_core_prefs_file(self):
with open(self.path) as fp:
content = fp.read()
# processAnnotations does not matter for eclipseformat, ignore its value as otherwise we would create extra batches and slow down eclipseformat
content = content.replace('org.eclipse.jdt.core.compiler.processAnnotations=disabled\n', '').replace('org.eclipse.jdt.core.compiler.processAnnotations=enabled\n', '')
return content
class FileInfo:
def __init__(self, path):
self.path = path
with open(path) as fp:
self.content = fp.read()
self.times = (os.path.getatime(path), mx.getmtime(path))
def update(self, removeTrailingWhitespace, restore):
with open(self.path) as fp:
content = fp.read()
file_modified = False # whether the file was modified by formatting
file_updated = False # whether the file is really different on disk after the update
if self.content != content:
# Only apply *after* formatting to match the order in which the IDE does it
if removeTrailingWhitespace:
content, n = re.subn(r'[ \t]+$', '', content, flags=re.MULTILINE)
if n != 0 and self.content == content:
# undo on-disk changes made by the Eclipse formatter
with open(self.path, 'w') as fp:
fp.write(content)
if self.content != content:
rpath = os.path.relpath(self.path, mx.primary_suite().dir)
self.diff = difflib.unified_diff(self.content.splitlines(1), content.splitlines(1), fromfile=join('a', rpath), tofile=join('b', rpath))
if restore:
with open(self.path, 'w') as fp:
fp.write(self.content)
else:
file_updated = True
self.content = content
file_modified = True
if not file_updated and (os.path.getatime(self.path), mx.getmtime(self.path)) != self.times:
# reset access and modification time of file
os.utime(self.path, self.times)
return file_modified
modified = list()
batches = dict() # all sources with the same formatting settings are formatted together
for p in projectsToProcess:
if not p.isJavaProject():
continue
sourceDirs = p.source_dirs()
batch = Batch(join(p.dir, '.settings'))
if not exists(batch.path):
if mx._opts.verbose:
mx.log('[no Eclipse Code Formatter preferences at {0} - skipping]'.format(batch.path))
continue
javafiles = []
for sourceDir in sourceDirs:
for root, _, files in os.walk(sourceDir):
for f in [join(root, name) for name in files if name.endswith('.java')]:
if filelist is None or f in filelist:
javafiles.append(FileInfo(f))
if len(javafiles) == 0:
mx.logv('[no Java sources in {0} - skipping]'.format(p.name))
continue
res = batches.setdefault(batch, javafiles)
if res is not javafiles:
res.extend(javafiles)
mx.log("we have: " + str(len(batches)) + " batches")
batch_num = 0
for batch, javafiles in batches.items():
batch_num += 1
mx.log("Processing batch {0} ({1} files)...".format(batch_num, len(javafiles)))
jdk = mx.get_jdk()
with tempfile.NamedTemporaryFile(mode='w') as tmp_eclipseini:
with open(join(dirname(args.eclipse_exe), join('..', 'Eclipse', 'eclipse.ini') if mx.is_darwin() else 'eclipse.ini'), 'r') as src:
locking_added = False
for line in src.readlines():
tmp_eclipseini.write(line)
if line.strip() == '-vmargs':
tmp_eclipseini.write('-Dosgi.locking=none\n')
locking_added = True
if not locking_added:
tmp_eclipseini.write('-vmargs\n-Dosgi.locking=none\n')
tmp_eclipseini.flush()
for chunk in mx._chunk_files_for_command_line(javafiles, pathFunction=lambda f: f.path):
capture = mx.OutputCapture()
rc = mx.run([args.eclipse_exe,
'--launcher.ini', tmp_eclipseini.name,
'-nosplash',
'-application',
'-consolelog',
'-data', wsroot,
'-vm', jdk.java,
'org.eclipse.jdt.core.JavaCodeFormatter',
'-config', batch.path]
+ [f.path for f in chunk], out=capture, err=capture, nonZeroIsFatal=False)
if rc != 0:
mx.log(capture.data)
mx.abort("Error while running formatter")
for fi in chunk:
if fi.update(batch.removeTrailingWhitespace, args.restore):
modified.append(fi)
mx.log('{0} files were modified'.format(len(modified)))
if len(modified) != 0:
arcbase = mx.primary_suite().dir
if args.backup:
backup = os.path.abspath('eclipseformat.backup.zip')
zf = zipfile.ZipFile(backup, 'w', zipfile.ZIP_DEFLATED)
for fi in modified:
diffs = ''.join(fi.diff)
if args.patchfile:
args.patchfile.write(diffs)
name = os.path.relpath(fi.path, arcbase)
mx.log(' - {0}'.format(name))
mx.log('Changes:')
mx.log(diffs)
if args.backup:
arcname = name.replace(os.sep, '/')
zf.writestr(arcname, fi.content)
if args.backup:
zf.close()
mx.log('Wrote backup of {0} modified files to {1}'.format(len(modified), backup))
if args.patchfile:
mx.log('Wrote patches to {0}'.format(args.patchfile.name))
args.patchfile.close()
return 1
return 0
def _source_locator_memento(deps, jdk=None):
slm = mx.XMLDoc()
slm.open('sourceLookupDirector')
slm.open('sourceContainers', {'duplicates' : 'false'})
javaCompliance = None
sources = []
for dep in deps:
if dep.isLibrary():
if hasattr(dep, 'eclipse.container'):
memento = mx.XMLDoc().element('classpathContainer', {'path' : getattr(dep, 'eclipse.container')}).xml(standalone='no')
slm.element('classpathContainer', {'memento' : memento, 'typeId':'org.eclipse.jdt.launching.sourceContainer.classpathContainer'})
sources.append(getattr(dep, 'eclipse.container') +' [classpathContainer]')
elif dep.get_source_path(resolve=True):
memento = mx.XMLDoc().element('archive', {'detectRoot' : 'true', 'path' : dep.get_source_path(resolve=True)}).xml(standalone='no')
slm.element('container', {'memento' : memento, 'typeId':'org.eclipse.debug.core.containerType.externalArchive'})
sources.append(dep.get_source_path(resolve=True) + ' [externalArchive]')
elif dep.isJdkLibrary():
if jdk is None:
jdk = mx.get_jdk(tag='default')
path = dep.get_source_path(jdk)
if path:
if os.path.isdir(path):
memento = mx.XMLDoc().element('directory', {'nest' : 'false', 'path' : path}).xml(standalone='no')
slm.element('container', {'memento' : memento, 'typeId':'org.eclipse.debug.core.containerType.directory'})
sources.append(path + ' [directory]')
else:
memento = mx.XMLDoc().element('archive', {'detectRoot' : 'true', 'path' : path}).xml(standalone='no')
slm.element('container', {'memento' : memento, 'typeId':'org.eclipse.debug.core.containerType.externalArchive'})
sources.append(path + ' [externalArchive]')
elif dep.isProject():
if not dep.isJavaProject():
continue
memento = mx.XMLDoc().element('javaProject', {'name' : dep.name}).xml(standalone='no')
slm.element('container', {'memento' : memento, 'typeId':'org.eclipse.jdt.launching.sourceContainer.javaProject'})
sources.append(dep.name + ' [javaProject]')
if javaCompliance is None or dep.javaCompliance > javaCompliance:
javaCompliance = dep.javaCompliance
if javaCompliance:
jdkContainer = 'org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/' + _to_EclipseJRESystemLibrary(javaCompliance)
memento = mx.XMLDoc().element('classpathContainer', {'path' : jdkContainer}).xml(standalone='no')
slm.element('classpathContainer', {'memento' : memento, 'typeId':'org.eclipse.jdt.launching.sourceContainer.classpathContainer'})
sources.append(jdkContainer + ' [classpathContainer]')
else:
memento = mx.XMLDoc().element('classpathContainer', {'path' : 'org.eclipse.jdt.launching.JRE_CONTAINER'}).xml(standalone='no')
slm.element('classpathContainer', {'memento' : memento, 'typeId':'org.eclipse.jdt.launching.sourceContainer.classpathContainer'})
sources.append('org.eclipse.jdt.launching.JRE_CONTAINER [classpathContainer]')
slm.close('sourceContainers')
slm.close('sourceLookupDirector')
return slm, sources
### ~~~~~~~~~~~~~ IDE / Eclipse / Netbeans / IntelliJ
def make_eclipse_attach(suite, hostname, port, name=None, deps=None, jdk=None):
"""
Creates an Eclipse launch configuration file for attaching to a Java process.
"""
if deps is None:
deps = []
javaProjects = [p for p in suite.projects if p.isJavaProject()]
if len(javaProjects) == 0:
return None, None
slm, sources = _source_locator_memento(deps, jdk=jdk)
# Without an entry for the "Project:" field in an attach configuration, Eclipse Neon has problems connecting
# to a waiting VM and leaves it hanging. Putting any valid project entry in the field seems to solve it.
firstProjectName = javaProjects[0].name
launch = mx.XMLDoc()
launch.open('launchConfiguration', {'type' : 'org.eclipse.jdt.launching.remoteJavaApplication'})
launch.element('stringAttribute', {'key' : 'org.eclipse.debug.core.source_locator_id', 'value' : 'org.eclipse.jdt.launching.sourceLocator.JavaSourceLookupDirector'})
launch.element('stringAttribute', {'key' : 'org.eclipse.debug.core.source_locator_memento', 'value' : '%s'})
launch.element('booleanAttribute', {'key' : 'org.eclipse.jdt.launching.ALLOW_TERMINATE', 'value' : 'true'})
launch.open('mapAttribute', {'key' : 'org.eclipse.jdt.launching.CONNECT_MAP'})
launch.element('mapEntry', {'key' : 'hostname', 'value' : hostname})
launch.element('mapEntry', {'key' : 'port', 'value' : port})
launch.close('mapAttribute')
launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.PROJECT_ATTR', 'value' : firstProjectName})
launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.VM_CONNECTOR_ID', 'value' : 'org.eclipse.jdt.launching.socketAttachConnector'})
launch.close('launchConfiguration')
launch = launch.xml(newl='\n', standalone='no') % slm.xml(escape=True, standalone='no')
if name is None:
if len(mx.suites()) == 1:
suitePrefix = ''
else:
suitePrefix = suite.name + '-'
name = suitePrefix + 'attach-' + hostname + '-' + port
eclipseLaunches = mx.ensure_dir_exists(join(suite.mxDir, 'eclipse-launches'))
launchFile = join(eclipseLaunches, name + '.launch')
sourcesFile = join(eclipseLaunches, name + '.sources')
mx.update_file(sourcesFile, '\n'.join(sources))
return mx.update_file(launchFile, launch), launchFile
def make_eclipse_launch(suite, javaArgs, jre, name=None, deps=None):
"""
Creates an Eclipse launch configuration file for running/debugging a Java command.
"""
if deps is None:
deps = []
mainClass = None
vmArgs = []
appArgs = []
cp = None
argsCopy = list(reversed(javaArgs))
while len(argsCopy) != 0:
a = argsCopy.pop()
if a == '-jar':
mainClass = '-jar'
appArgs = list(reversed(argsCopy))
break
if a in mx._VM_OPTS_SPACE_SEPARATED_ARG:
assert len(argsCopy) != 0
cp = argsCopy.pop()
vmArgs.append(a)
vmArgs.append(cp)
elif a.startswith('-'):
vmArgs.append(a)
else:
mainClass = a
appArgs = list(reversed(argsCopy))
break
if mainClass is None:
mx.log('Cannot create Eclipse launch configuration without main class or jar file: java ' + ' '.join(javaArgs))
return False
if name is None:
if mainClass == '-jar':
name = basename(appArgs[0])
if len(appArgs) > 1 and not appArgs[1].startswith('-'):
name = name + '_' + appArgs[1]
else:
name = mainClass
name = time.strftime('%Y-%m-%d-%H%M%S_' + name)
if cp is not None:
for e in cp.split(os.pathsep):
for s in mx.suites():
deps += [p for p in s.projects if e == p.output_dir()]
deps += [l for l in s.libs if e == l.get_path(False)]
slm, sources = _source_locator_memento(deps)
launch = mx.XMLDoc()
launch.open('launchConfiguration', {'type' : 'org.eclipse.jdt.launching.localJavaApplication'})
launch.element('stringAttribute', {'key' : 'org.eclipse.debug.core.source_locator_id', 'value' : 'org.eclipse.jdt.launching.sourceLocator.JavaSourceLookupDirector'})
launch.element('stringAttribute', {'key' : 'org.eclipse.debug.core.source_locator_memento', 'value' : '%s'})
launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.JRE_CONTAINER', 'value' : 'org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/' + jre})
launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.MAIN_TYPE', 'value' : mainClass})
launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.PROGRAM_ARGUMENTS', 'value' : ' '.join(appArgs)})
launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.PROJECT_ATTR', 'value' : ''})
launch.element('stringAttribute', {'key' : 'org.eclipse.jdt.launching.VM_ARGUMENTS', 'value' : ' '.join(vmArgs)})
launch.close('launchConfiguration')
launch = launch.xml(newl='\n', standalone='no') % slm.xml(escape=True, standalone='no')
eclipseLaunches = mx.ensure_dir_exists(join(suite.mxDir, 'eclipse-launches'))
launchFile = join(eclipseLaunches, name + '.launch')
sourcesFile = join(eclipseLaunches, name + '.sources')
mx.update_file(sourcesFile, '\n'.join(sources))
return mx.update_file(launchFile, launch)
@mx.command('mx', 'eclipseinit')
def eclipseinit_cli(args):
"""(re)generate Eclipse project configurations and working sets"""
parser = ArgumentParser(prog='mx eclipseinit')
parser.add_argument('--no-build', action='store_false', dest='buildProcessorJars', help='Do not build annotation processor jars.')
parser.add_argument('--no-python-projects', action='store_false', dest='pythonProjects', help='Do not generate PyDev projects for the mx python projects.')
parser.add_argument('-C', '--log-to-console', action='store_true', dest='logToConsole', help='Send builder output to eclipse console.')
parser.add_argument('-f', '--force', action='store_true', dest='force', default=False, help='Ignore timestamps when updating files.')
parser.add_argument('-A', '--absolute-paths', action='store_true', dest='absolutePaths', default=False, help='Use absolute paths in project files.')
args = parser.parse_args(args)
eclipseinit(None, args.buildProcessorJars, logToConsole=args.logToConsole, force=args.force, absolutePaths=args.absolutePaths, pythonProjects=args.pythonProjects)
mx.log('----------------------------------------------')
workspace_dir = os.path.dirname(os.path.abspath(mx.primary_suite().vc_dir))
mx.log('Eclipse project generation successfully completed for:')
mx.log(' ' + (os.linesep + " ").join(sorted([suite.dir for suite in mx.suites(True)])))
mx.log('')
mx.log('The recommended next steps are:')
mx.log(' 1) Open Eclipse with workspace path: {0}'.format(workspace_dir))
mx.log(' 2) Open project import wizard using: File -> Import -> Existing Projects into Workspace -> Next.')
mx.log(' 3) For "select root directory" enter path {0}'.format(workspace_dir))
mx.log(' 4) Make sure "Search for nested projects" is checked and press "Finish".')
mx.log('')
mx.log(' hint) If you select "Close newly imported projects upon completion" then the import is more efficient. ')
mx.log(' Projects needed for development can be opened conveniently using the generated Suite working sets from the context menu.')
mx.log('')
mx.log('Note that setting MX_BUILD_EXPLODED=true can improve Eclipse build times. See "Exploded builds" in the mx README.md.')
mx.log('----------------------------------------------')
if _EclipseJRESystemLibraries:
executionEnvironments = [n for n in _EclipseJRESystemLibraries if n.startswith('JavaSE-')]
installedJREs = [n for n in _EclipseJRESystemLibraries if not n.startswith('JavaSE-')]
if executionEnvironments:
mx.log('Ensure that these Execution Environments have a Compatible JRE in Eclipse (Preferences -> Java -> Installed JREs -> Execution Environments):')
for name in executionEnvironments:
mx.log(' ' + name)
if installedJREs:
mx.log('Ensure that there are Installed JREs with these exact names in Eclipse (Preferences -> Java -> Installed JREs):')
for name in installedJREs:
mx.log(' ' + name)
mx.log('You can set the "JRE name" field for a JDK when initially adding it or later with the "Edit..." button.')
mx.log('See https://help.eclipse.org/photon/topic/org.eclipse.jdt.doc.user/tasks/task-add_new_jre.htm on how to add')
mx.log('a new JDK to Eclipse. Be sure to select "Standard VM" (even on macOS) for the JRE type.')
mx.log('----------------------------------------------')
def eclipseinit(args, buildProcessorJars=True, refreshOnly=False, logToConsole=False, doFsckProjects=True, force=False, absolutePaths=False, pythonProjects=False):
"""(re)generate Eclipse project configurations and working sets"""
for s in mx.suites(True) + [mx._mx_suite]:
_eclipseinit_suite(s, buildProcessorJars, refreshOnly, logToConsole, force, absolutePaths, pythonProjects)
wsroot = generate_eclipse_workingsets()
if doFsckProjects and not refreshOnly:
mx_ideconfig.fsckprojects([])
return wsroot
EclipseLinkedResource = namedtuple('LinkedResource', ['name', 'type', 'location'])
def _eclipse_linked_resource(name, res_type, location):
return EclipseLinkedResource(name, str(res_type), location)
def get_eclipse_project_rel_locationURI(path, eclipseProjectDir):
"""
Gets the URI for a resource relative to an Eclipse project directory (i.e.,
the directory containing the `.project` file for the project). The URI
returned is based on the builtin PROJECT_LOC Eclipse variable.
See http://stackoverflow.com/a/7585095
"""
relpath = os.path.relpath(path, eclipseProjectDir)
names = relpath.split(os.sep)
parents = len([n for n in names if n == '..'])
sep = '/' # Yes, even on Windows...
if parents:
projectLoc = 'PARENT-{}-PROJECT_LOC'.format(parents)
else:
projectLoc = 'PROJECT_LOC'
return sep.join([projectLoc] + [n for n in names if n != '..'])
def _get_eclipse_output_path(project_loc, p, linkedResources=None):
"""
Gets the Eclipse path attribute value for the output of project `p` whose
Eclipse configuration is in the directory `project_loc`.
"""
outputDirRel = os.path.relpath(p.output_dir(), project_loc)
if outputDirRel.startswith('..'):
name = basename(outputDirRel)
if linkedResources is not None:
linkedResources.append(_eclipse_linked_resource(name, IRESOURCE_FOLDER, p.output_dir()))
return name
else:
return outputDirRel
#: Highest Execution Environment defined by most recent Eclipse release.
#: https://wiki.eclipse.org/Execution_Environments
#: https://git.eclipse.org/c/jdt/eclipse.jdt.debug.git/plain/org.eclipse.jdt.launching/plugin.properties
_max_Eclipse_JavaExecutionEnvironment = 15 # pylint: disable=invalid-name
_EclipseJRESystemLibraries = set()
def _to_EclipseJRESystemLibrary(compliance):
"""
Converts a Java compliance value to a JRE System Library that
can be put on a project's Build Path.
"""
if not isinstance(compliance, mx.JavaCompliance):
compliance = mx.JavaCompliance(compliance)
if compliance.value > _max_Eclipse_JavaExecutionEnvironment:
res = 'jdk-' + str(compliance)
else:
res = 'JavaSE-' + str(compliance)
_EclipseJRESystemLibraries.add(res)
return res
RelevantResource = namedtuple('RelevantResource', ['path', 'type'])
# http://grepcode.com/file/repository.grepcode.com/java/eclipse.org/4.4.2/org.eclipse.core/resources/3.9.1/org/eclipse/core/resources/IResource.java#76
IRESOURCE_FILE = 1
IRESOURCE_FOLDER = 2
def _add_eclipse_linked_resources(xml_doc, project_loc, linked_resources, absolutePaths=False):
"""
Adds a ``linkedResources`` element to `xml_doc` for the resources described by `linked_resources`.
:param project_loc: directory containing ``.project`` file containing the content of `xml_doc`
"""
if linked_resources:
xml_doc.open('linkedResources')
for lr in linked_resources:
xml_doc.open('link')
xml_doc.element('name', data=lr.name)
xml_doc.element('type', data=lr.type)
xml_doc.element('locationURI', data=get_eclipse_project_rel_locationURI(lr.location, project_loc) if not absolutePaths else lr.location)
xml_doc.close('link')
xml_doc.close('linkedResources')
def _eclipse_project_rel(project_loc, path, linked_resources, res_type=IRESOURCE_FOLDER):
"""
Converts `path` to be relative to `project_loc`, adding a linked
resource to `linked_resources` if `path` is not under `project_loc`.
:param str res_type: IRESOURCE_FOLDER if path denotes a directory, IRESOURCE_FILE for a regular file
"""
if not path.startswith(project_loc):
name = basename(path)
linked_resources.append(_eclipse_linked_resource(name, res_type, path))
return name
else:
return os.path.relpath(path, project_loc)
def _eclipseinit_project(p, files=None, libFiles=None, absolutePaths=False):
# PROJECT_LOC Eclipse variable
project_loc = mx.ensure_dir_exists(p.dir)
linkedResources = []
out = mx.XMLDoc()
out.open('classpath')
def _add_src_classpathentry(path, attributes=None):
out.open('classpathentry', {'kind' : 'src', 'path' : _eclipse_project_rel(project_loc, path, linkedResources)})
if attributes:
out.open('attributes')
for name, value in attributes.items():
out.element('attribute', {'name' : name, 'value' : value})
out.close('attributes')
out.close('classpathentry')
for src in p.srcDirs:
_add_src_classpathentry(mx.ensure_dir_exists(join(p.dir, src)))
processors = p.annotation_processors()
if processors:
gen_dir = mx.ensure_dir_exists(p.source_gen_dir())
# ignore warnings produced by third-party annotation processors
has_external_processors = any((ap for ap in p.declaredAnnotationProcessors if ap.isLibrary()))
attributes = {'ignore_optional_problems': 'true'} if has_external_processors else None
_add_src_classpathentry(gen_dir, attributes)
if files:
files.append(gen_dir)
if exists(join(p.dir, 'plugin.xml')): # eclipse plugin project
out.element('classpathentry', {'kind' : 'con', 'path' : 'org.eclipse.pde.core.requiredPlugins'})
projectDeps = []
jdk = mx.get_jdk(p.javaCompliance)
def preVisitDep(dep, edge):
if dep.isLibrary() and hasattr(dep, 'eclipse.container'):
container = getattr(dep, 'eclipse.container')
out.element('classpathentry', {'exported' : 'true', 'kind' : 'con', 'path' : container})
# Ignore the dependencies of this library
return False
return True
def processLibraryDep(dep):
assert not hasattr(dep, 'eclipse.container'), dep.name + ' should have been handled in preVisitDep'
path = dep.get_path(resolve=True)
# Relative paths for "lib" class path entries have various semantics depending on the Eclipse
# version being used (e.g. see https://bugs.eclipse.org/bugs/show_bug.cgi?id=274737) so it's
# safest to simply use absolute paths.
# It's important to use dep.suite as the location for when one suite references
# a library in another suite.
path = mx._make_absolute(path, dep.suite.dir)
attributes = {'exported' : 'true', 'kind' : 'lib', 'path' : path}
sourcePath = dep.get_source_path(resolve=True)
if sourcePath is not None:
attributes['sourcepath'] = sourcePath
out.element('classpathentry', attributes)
if libFiles:
libFiles.append(path)
def processJdkLibraryDep(dep):
path = dep.classpath_repr(jdk, resolve=True)
if path:
attributes = {'exported' : 'true', 'kind' : 'lib', 'path' : path}
sourcePath = dep.get_source_path(jdk)
if sourcePath is not None:
attributes['sourcepath'] = sourcePath
out.element('classpathentry', attributes)
if libFiles:
libFiles.append(path)
def processDep(dep, edge):
if dep is p:
return
if dep.isLibrary() or dep.isMavenProject():
processLibraryDep(dep)
elif dep.isJavaProject():
high_bound = dep.javaCompliance._high_bound()
if not high_bound or high_bound >= p.javaCompliance.value:
projectDeps.append(dep)
else:
# Ignore a dep whose highest Java level is less than p's level
pass
elif dep.isNativeProject():
projectDeps.append(dep)
elif dep.isJdkLibrary():
processJdkLibraryDep(dep)
elif dep.isJARDistribution() and isinstance(dep.suite, mx.BinarySuite):
out.element('classpathentry', {'exported' : 'true', 'kind' : 'lib', 'path' : dep.path, 'sourcepath' : dep.sourcesPath})
elif dep.isJreLibrary() or dep.isDistribution():
pass
elif dep.isProject():
mx.logv('ignoring project ' + dep.name + ' for eclipseinit')
else:
mx.abort('unexpected dependency: ' + str(dep))
p.walk_deps(preVisit=preVisitDep, visit=processDep)
# When targeting JDK 8 or earlier, dependencies need to precede the JDK on the Eclipse build path.
# There may be classes in dependencies that are also in the JDK. We want to compile against the
# former. This is the same -Xbootclasspath:/p trick done in JavacLikeCompiler.prepare.
putJREFirstOnBuildPath = p.javaCompliance.value >= 9
allProjectPackages = set()
for dep in projectDeps:
if not dep.isNativeProject():
allProjectPackages.update(dep.defined_java_packages())
if not putJREFirstOnBuildPath:
out.element('classpathentry', {'combineaccessrules' : 'false', 'exported' : 'true', 'kind' : 'src', 'path' : '/' + dep.name})
# Every Java program depends on a JRE
jreSystemLibrary = _to_EclipseJRESystemLibrary(jdk.javaCompliance)
out.open('classpathentry', {'kind' : 'con', 'path' : 'org.eclipse.jdt.launching.JRE_CONTAINER/org.eclipse.jdt.internal.debug.ui.launcher.StandardVMType/' + jreSystemLibrary})
if jdk.javaCompliance >= '9':
out.open('attributes')
out.element('attribute', {'name' : 'module', 'value' : 'true'})
moduleDeps = p.get_concealed_imported_packages(jdk=jdk)
if len(moduleDeps) != 0:
# Ignore modules (such as jdk.internal.vm.compiler) that define packages
# that are also defined by project deps as the latter will have the most
# recent API.
exports = sorted([(module, pkgs) for module, pkgs in moduleDeps.items() if allProjectPackages.isdisjoint(pkgs)])
if exports:
addExportsValue = []
exported_modules = []
for module, pkgs in exports:
addExportsValue.extend([module + '/' + pkg + '=ALL-UNNAMED' for pkg in pkgs])
exported_modules.append(module)
out.element('attribute', {'name' : 'add-exports', 'value' : ':'.join(addExportsValue)})
roots = jdk.get_root_modules()
observable_modules = jdk.get_modules()
default_module_graph = mx_javamodules.get_transitive_closure(roots, observable_modules)
module_graph = mx_javamodules.get_transitive_closure(roots + exported_modules, observable_modules)
if default_module_graph != module_graph:
# https://github.com/eclipse/eclipse.jdt.core/blob/00dd337bcfe08d8b2d60529b0f7874b88e621c06/org.eclipse.jdt.core/model/org/eclipse/jdt/internal/core/JavaProject.java#L704-L715
out.element('attribute', {'name' : 'limit-modules', 'value' : ','.join([m.name for m in module_graph])})
out.close('attributes')
out.close('classpathentry')
if putJREFirstOnBuildPath:
for dep in projectDeps:
if not dep.isNativeProject():
out.element('classpathentry', {'combineaccessrules' : 'false', 'exported' : 'true', 'kind' : 'src', 'path' : '/' + dep.name})
out.element('classpathentry', {'kind' : 'output', 'path' : _get_eclipse_output_path(project_loc, p, linkedResources)})
out.close('classpath')
classpathFile = join(project_loc, '.classpath')
mx.update_file(classpathFile, out.xml(indent='\t', newl='\n'))
if files:
files.append(classpathFile)
csConfig, _, cs_project = p.get_checkstyle_config()
if csConfig:
out = mx.XMLDoc()
dotCheckstyle = join(project_loc, ".checkstyle")
cs_path = _eclipse_project_rel(project_loc, csConfig, linkedResources, IRESOURCE_FILE)
out.open('fileset-config', {'file-format-version' : '1.2.0', 'simple-config' : 'false'})
out.open('local-check-config', {'name' : 'Checks', 'location' : '/' + cs_project.name + '/' + cs_path, 'type' : 'project', 'description' : ''})
out.element('additional-data', {'name' : 'protect-config-file', 'value' : 'false'})
out.close('local-check-config')
out.open('fileset', {'name' : 'all', 'enabled' : 'true', 'check-config-name' : 'Checks', 'local' : 'true'})
out.element('file-match-pattern', {'match-pattern' : r'.*\.java$', 'include-pattern' : 'true'})
out.element('file-match-pattern', {'match-pattern' : p.source_gen_dir_name() + os.sep + '.*', 'include-pattern' : 'false'})
out.element('file-match-pattern', {'match-pattern' : '/package-info.java$', 'include-pattern' : 'false'})
out.close('fileset')
exclude = join(p.dir, '.checkstyle.exclude')
if False and exists(exclude):
out.open('filter', {'name' : 'FilesFromPackage', 'enabled' : 'true'})
with open(exclude) as f:
for line in f:
if not line.startswith('#'):
line = line.strip()
out.element('filter-data', {'value' : line})
out.close('filter')
out.close('fileset-config')
mx.update_file(dotCheckstyle, out.xml(indent=' ', newl='\n'))
if files:
files.append(dotCheckstyle)
else:
# clean up existing .checkstyle file
dotCheckstyle = join(project_loc, ".checkstyle")
if exists(dotCheckstyle):
os.unlink(dotCheckstyle)
out = mx.XMLDoc()
out.open('projectDescription')
out.element('name', data=p.name)
out.element('comment', data='')
out.open('projects')
for dep in projectDeps:
if not dep.isNativeProject():
out.element('project', data=dep.name)
out.close('projects')
out.open('buildSpec')
out.open('buildCommand')
out.element('name', data='org.eclipse.jdt.core.javabuilder')
out.element('arguments', data='')
out.close('buildCommand')
if csConfig:
out.open('buildCommand')
out.element('name', data='net.sf.eclipsecs.core.CheckstyleBuilder')
out.element('arguments', data='')
out.close('buildCommand')
if exists(join(p.dir, 'plugin.xml')): # eclipse plugin project
for buildCommand in ['org.eclipse.pde.ManifestBuilder', 'org.eclipse.pde.SchemaBuilder']:
out.open('buildCommand')
out.element('name', data=buildCommand)
out.element('arguments', data='')
out.close('buildCommand')
out.close('buildSpec')
out.open('natures')
out.element('nature', data='org.eclipse.jdt.core.javanature')
if exists(join(p.dir, 'plugin.xml')): # eclipse plugin project
out.element('nature', data='org.eclipse.pde.PluginNature')
out.close('natures')
_add_eclipse_linked_resources(out, project_loc, linkedResources, absolutePaths)
out.close('projectDescription')
projectFile = join(project_loc, '.project')
mx.update_file(projectFile, out.xml(indent='\t', newl='\n'))
if files:
files.append(projectFile)
# copy a possibly modified file to the project's .settings directory
_copy_eclipse_settings(project_loc, p, files)
if processors:
out = mx.XMLDoc()
out.open('factorypath')
out.element('factorypathentry', {'kind' : 'PLUGIN', 'id' : 'org.eclipse.jst.ws.annotations.core', 'enabled' : 'true', 'runInBatchMode' : 'false'})
processorsPath = mx.classpath_entries(names=processors)
for e in processorsPath:
if e.isDistribution() and not isinstance(e.suite, mx.BinarySuite):
out.element('factorypathentry', {'kind' : 'WKSPJAR', 'id' : '/{0}/{1}'.format(e.name, basename(e.path)), 'enabled' : 'true', 'runInBatchMode' : 'false'})
elif e.isJdkLibrary() or e.isJreLibrary():
path = e.classpath_repr(jdk, resolve=True)
if path:
out.element('factorypathentry', {'kind' : 'EXTJAR', 'id' : path, 'enabled' : 'true', 'runInBatchMode' : 'false'})
else:
out.element('factorypathentry', {'kind' : 'EXTJAR', 'id' : e.classpath_repr(resolve=True), 'enabled' : 'true', 'runInBatchMode' : 'false'})
if p.javaCompliance >= '9':
concealedAPDeps = {}
for dep in mx.classpath_entries(names=processors, preferProjects=True):
if dep.isJavaProject():
concealed = dep.get_concealed_imported_packages(jdk)
if concealed:
for module, pkgs in concealed.items():
concealedAPDeps.setdefault(module, []).extend(pkgs)
if concealedAPDeps:
exports = []
for module, pkgs in concealedAPDeps.items():
for pkg in pkgs:
exports.append('--add-exports=' + module + '/' + pkg + '=ALL-UNNAMED')
mx.warn('Annotation processor(s) for ' + p.name + ' uses non-exported module packages, requiring ' +
'the following to be added to eclipse.ini:\n' +
'\n'.join(exports))
out.close('factorypath')
mx.update_file(join(project_loc, '.factorypath'), out.xml(indent='\t', newl='\n'))
if files:
files.append(join(project_loc, '.factorypath'))
def _capture_eclipse_settings(logToConsole, absolutePaths):
# Capture interesting settings which drive the output of the projects.
# Changes to these values should cause regeneration of the project files.
settings = 'logToConsole=%s\n' % logToConsole
settings = settings + 'absolutePaths=%s\n' % absolutePaths
for name, value in mx_ideconfig._get_ide_envvars().items():
settings = settings + '%s=%s\n' % (name, value)
return settings
def _eclipseinit_suite(s, buildProcessorJars=True, refreshOnly=False, logToConsole=False, force=False, absolutePaths=False, pythonProjects=False):
# a binary suite archive is immutable and no project sources, only the -sources.jar
# TODO We may need the project (for source debugging) but it needs different treatment
if isinstance(s, mx.BinarySuite):
return
suite_config_dir = mx.ensure_dir_exists(s.get_mx_output_dir())
configZip = mx.TimeStampFile(join(suite_config_dir, 'eclipse-config.zip'))
configLibsZip = join(suite_config_dir, 'eclipse-config-libs.zip')
if refreshOnly and not configZip.exists():
return
settingsFile = join(suite_config_dir, 'eclipse-project-settings')
mx.update_file(settingsFile, _capture_eclipse_settings(logToConsole, absolutePaths))
if not force and mx_ideconfig._check_ide_timestamp(s, configZip, 'eclipse', settingsFile):
mx.logv('[Eclipse configurations for {} are up to date - skipping]'.format(s.name))
return
files = []
libFiles = []
if buildProcessorJars:
files += mx._processorjars_suite(s)
for p in s.projects:
code = mx._function_code(p._eclipseinit)
if 'absolutePaths' in code.co_varnames[:code.co_argcount]:
p._eclipseinit(files, libFiles, absolutePaths=absolutePaths)
else:
# Support legacy signature
p._eclipseinit(files, libFiles)
jdk = mx.get_jdk(tag='default')
_, launchFile = make_eclipse_attach(s, 'localhost', '8000', deps=mx.dependencies(), jdk=jdk)
if launchFile:
files.append(launchFile)
# Create an Eclipse project for each distribution that will create/update the archive
# for the distribution whenever any (transitively) dependent project of the
# distribution is updated.
for dist in s.dists:
if not dist.isJARDistribution():
continue
project_loc = dist.get_ide_project_dir()
if not project_loc:
continue
mx.ensure_dir_exists(project_loc)
relevantResources = []
relevantResourceDeps = set(dist.archived_deps())
for d in sorted(relevantResourceDeps):
# Eclipse does not seem to trigger a build for a distribution if the references
# to the constituent projects are of type IRESOURCE_PROJECT.
if d.isJavaProject():
for srcDir in d.srcDirs:
relevantResources.append(RelevantResource('/' + d.name + '/' + srcDir, IRESOURCE_FOLDER))
relevantResources.append(RelevantResource('/' + d.name + '/' + _get_eclipse_output_path(project_loc, d), IRESOURCE_FOLDER))
# make sure there is at least one entry otherwise all resources will be implicitly relevant
if not relevantResources:
relevantResources.append(RelevantResource(get_eclipse_project_rel_locationURI(dist.path, project_loc), IRESOURCE_FOLDER))
use_async_distributions = mx.env_var_to_bool('MX_IDE_ECLIPSE_ASYNC_DISTRIBUTIONS')
# if a distribution is used as annotation processor we need to refresh the project
# in order to make eclipse reload the annotation processor jar on changes.
out = mx.XMLDoc()
out.open('projectDescription')
out.element('name', data=dist.name)
out.element('comment', data='Updates ' + dist.path + ' if a project dependency of ' + dist.name + ' is updated')
out.open('projects')
for d in sorted(relevantResourceDeps):
out.element('project', data=d.name)
out.close('projects')
out.open('buildSpec')
dist.dir = project_loc
builders = _genEclipseBuilder(project_loc, out, dist, 'Create' + dist.name + 'Dist', '-v archive @' + dist.name,
relevantResources=relevantResources,
logToFile=True, refresh=True, isAsync=use_async_distributions,
logToConsole=logToConsole, appendToLogFile=False,
refreshFile='/{0}/{1}'.format(dist.name, basename(dist.path)))
files = files + builders
out.close('buildSpec')
out.open('natures')
out.element('nature', data='org.eclipse.jdt.core.javanature')
out.close('natures')
if dist.definedAnnotationProcessors:
linked_resources = [_eclipse_linked_resource(basename(dist.path), str(IRESOURCE_FILE), dist.path)]
_add_eclipse_linked_resources(out, project_loc, linked_resources, absolutePaths=absolutePaths)
out.close('projectDescription')
projectFile = join(project_loc, '.project')
mx.update_file(projectFile, out.xml(indent='\t', newl='\n'))
files.append(projectFile)
if pythonProjects:
project_loc = s.dir if s is mx._mx_suite else s.mxDir
linked_resources = []
source_path = _eclipse_project_rel(project_loc, s.name if s is mx._mx_suite else s.mxDir, linked_resources)
projectXml = mx.XMLDoc()
projectXml.open('projectDescription')
projectXml.element('name', data=s.name if s is mx._mx_suite else 'mx.' + s.name)
projectXml.element('comment')
projectXml.open('projects')
if s is not mx._mx_suite:
projectXml.element('project', data=mx._mx_suite.name)
processed_suites = set([s.name])
def _mx_projects_suite(visited_suite, suite_import):
if suite_import.name in processed_suites:
return
processed_suites.add(suite_import.name)
dep_suite = mx.suite(suite_import.name)
projectXml.element('project', data='mx.' + suite_import.name)
dep_suite.visit_imports(_mx_projects_suite)
s.visit_imports(_mx_projects_suite)
projectXml.close('projects')
projectXml.open('buildSpec')
projectXml.open('buildCommand')
projectXml.element('name', data='org.python.pydev.PyDevBuilder')
projectXml.element('arguments')
projectXml.close('buildCommand')
projectXml.close('buildSpec')
projectXml.open('natures')
projectXml.element('nature', data='org.python.pydev.pythonNature')
projectXml.close('natures')
_add_eclipse_linked_resources(projectXml, project_loc, linked_resources, absolutePaths=absolutePaths)
projectXml.open('filteredResources')
# Ignore all *.pyc files
projectXml.open('filter')
projectXml.element('id', data='1')
projectXml.element('name')
projectXml.element('type', data='22')
projectXml.open('matcher')
projectXml.element('id', data='org.eclipse.ui.ide.multiFilter')
projectXml.element('arguments', data='1.0-name-matches-false-false-*.pyc')
projectXml.close('matcher')
projectXml.close('filter')
# Ignore all __pycache__directories
projectXml.open('filter')
projectXml.element('id', data='1')
projectXml.element('name')
projectXml.element('type', data='26')
projectXml.open('matcher')
projectXml.element('id', data='org.eclipse.ui.ide.multiFilter')
projectXml.element('arguments', data='1.0-name-matches-false-false-__pycache__')
projectXml.close('matcher')
projectXml.close('filter')
projectXml.close('filteredResources')
projectXml.close('projectDescription')
projectFile = join(project_loc, '.project')
mx.update_file(projectFile, projectXml.xml(indent=' ', newl='\n'))
files.append(projectFile)
pydevProjectXml = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<?eclipse-pydev version="1.0"?>
<pydev_project>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 3.7</pydev_property>
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
<path>/{}</path>
</pydev_pathproperty>
</pydev_project>
""".format(source_path)
pydevProjectFile = join(project_loc, '.pydevproject')
mx.update_file(pydevProjectFile, pydevProjectXml)
files.append(pydevProjectFile)
mx_ideconfig._zip_files(files + [settingsFile], s.dir, configZip.path)
mx_ideconfig._zip_files(libFiles, s.dir, configLibsZip)
def _genEclipseBuilder(eclipseConfigRoot, dotProjectDoc, p, name, mxCommand, refresh=True, refreshFile=None, relevantResources=None, isAsync=False, logToConsole=False, logToFile=False, appendToLogFile=True, xmlIndent='\t', xmlStandalone=None):
externalToolDir = join(eclipseConfigRoot, '.externalToolBuilders')
launchOut = mx.XMLDoc()
consoleOn = 'true' if logToConsole else 'false'
launchOut.open('launchConfiguration', {'type' : 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType'})
launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.core.capture_output', 'value': consoleOn})
launchOut.open('mapAttribute', {'key' : 'org.eclipse.debug.core.environmentVariables'})
for key, value in mx_ideconfig._get_ide_envvars().items():
launchOut.element('mapEntry', {'key' : key, 'value' : value})
launchOut.close('mapAttribute')
if refresh:
if refreshFile is None:
refreshScope = '${project}'
else:
refreshScope = '${working_set:<?xml version="1.0" encoding="UTF-8"?><resources><item path="' + refreshFile + '" type="' + str(IRESOURCE_FILE) + '"/></resources>}'
launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.core.ATTR_REFRESH_RECURSIVE', 'value': 'false'})
launchOut.element('stringAttribute', {'key' : 'org.eclipse.debug.core.ATTR_REFRESH_SCOPE', 'value': refreshScope})
if relevantResources:
# http://grepcode.com/file/repository.grepcode.com/java/eclipse.org/4.4.2/org.eclipse.debug/core/3.9.1/org/eclipse/debug/core/RefreshUtil.java#169
resources = '${working_set:<?xml version="1.0" encoding="UTF-8"?><resources>'
for relevantResource in relevantResources:
resources += '<item path="' + relevantResource.path + '" type="' + str(relevantResource.type) + '"/>'
resources += '</resources>}'
launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_BUILD_SCOPE', 'value': resources})
launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.ui.ATTR_CONSOLE_OUTPUT_ON', 'value': consoleOn})
launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND', 'value': 'true' if isAsync else 'false'})
if logToFile:
logFile = join(externalToolDir, name + '.log')
launchOut.element('stringAttribute', {'key' : 'org.eclipse.debug.ui.ATTR_CAPTURE_IN_FILE', 'value': logFile})
launchOut.element('booleanAttribute', {'key' : 'org.eclipse.debug.ui.ATTR_APPEND_TO_FILE', 'value': 'true' if appendToLogFile else 'false'})
# expect to find the OS command to invoke mx in the same directory
baseDir = dirname(os.path.abspath(__file__))
cmd = 'mx'
if mx.is_windows():
cmd = 'mx.cmd'
cmdPath = join(baseDir, cmd)
if not os.path.exists(cmdPath):
# backwards compatibility for when the commands lived in parent of mxtool
if cmd == 'mx':
cmd = 'mx.sh'
cmdPath = join(dirname(baseDir), cmd)
if not os.path.exists(cmdPath):
mx.abort('cannot locate ' + cmd)
launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_LOCATION', 'value': cmdPath})
launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_RUN_BUILD_KINDS', 'value': 'auto,full,incremental'})
launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS', 'value': mxCommand})
launchOut.element('booleanAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED', 'value': 'true'})
launchOut.element('stringAttribute', {'key' : 'org.eclipse.ui.externaltools.ATTR_WORKING_DIRECTORY', 'value': p.suite.dir})
launchOut.close('launchConfiguration')
mx.ensure_dir_exists(externalToolDir)
launchFile = join(externalToolDir, name + '.launch')
mx.update_file(launchFile, launchOut.xml(indent=xmlIndent, standalone=xmlStandalone, newl='\n'))
dotProjectDoc.open('buildCommand')
dotProjectDoc.element('name', data='org.eclipse.ui.externaltools.ExternalToolBuilder')
dotProjectDoc.element('triggers', data='auto,full,incremental,')
dotProjectDoc.open('arguments')
dotProjectDoc.open('dictionary')
dotProjectDoc.element('key', data='LaunchConfigHandle')
dotProjectDoc.element('value', data='<project>/.externalToolBuilders/' + name + '.launch')
dotProjectDoc.close('dictionary')
dotProjectDoc.open('dictionary')
dotProjectDoc.element('key', data='incclean')
dotProjectDoc.element('value', data='true')
dotProjectDoc.close('dictionary')
dotProjectDoc.close('arguments')
dotProjectDoc.close('buildCommand')
return [launchFile]
def generate_eclipse_workingsets():
"""
Populate the workspace's working set configuration with working sets generated from project data for the primary suite
If the workspace already contains working set definitions, the existing ones will be retained and extended.
In case mx/env does not contain a WORKSPACE definition pointing to the workspace root directory, a parent search from the primary suite directory is performed.
If no workspace root directory can be identified, the primary suite directory is used and the user has to place the workingsets.xml file by hand.
"""
# identify the location where to look for workingsets.xml
wsfilename = 'workingsets.xml'
wsloc = '.metadata/.plugins/org.eclipse.ui.workbench'
if 'WORKSPACE' in os.environ:
expected_wsroot = os.environ['WORKSPACE']
else:
expected_wsroot = mx.primary_suite().dir
wsroot = _find_eclipse_wsroot(expected_wsroot)
if wsroot is None:
# failed to find it
wsroot = expected_wsroot
wsdir = join(wsroot, wsloc)
if not exists(wsdir):
wsdir = wsroot
mx.logv('Could not find Eclipse metadata directory. Please place ' + wsfilename + ' in ' + wsloc + ' manually.')
wspath = join(wsdir, wsfilename)
def _add_to_working_set(key, value):
if key not in workingSets:
workingSets[key] = [value]
else:
workingSets[key].append(value)
# gather working set info from project data
workingSets = dict()
for p in mx.projects():
if not p.isJavaProject():
continue
_add_to_working_set('Suite ' + p.suite.name, p.name)
if p.workingSets is None:
continue
for w in p.workingSets.split(","):
_add_to_working_set(w, p.name)
for dist in mx.distributions():
if not dist.isJARDistribution():
continue
projectDir = dist.get_ide_project_dir()
if not projectDir:
continue
_add_to_working_set('Suite ' + dist.suite.name, dist.name)
# the mx metdata directories are included in the appropriate working sets
_add_to_working_set('MX', 'mxtool')
for suite in mx.suites(True):
_add_to_working_set('MX', basename(suite.mxDir))
_add_to_working_set('Suite ' + suite.name, basename(suite.mxDir))
if exists(wspath):
wsdoc = _copy_workingset_xml(wspath, workingSets)
else:
wsdoc = _make_workingset_xml(workingSets)
if mx.update_file(wspath, wsdoc.xml(newl='\n')):
mx.log('Please restart Eclipse instances for this workspace to see some of the effects.')
return wsroot
def _find_eclipse_wsroot(wsdir):
md = join(wsdir, '.metadata')
if exists(md):
return wsdir
split = os.path.split(wsdir)
if split[0] == wsdir: # root directory
return None
else:
return _find_eclipse_wsroot(split[0])
def _make_workingset_xml(workingSets):
wsdoc = mx.XMLDoc()
wsdoc.open('workingSetManager')
for w in sorted(workingSets.keys()):
_workingset_open(wsdoc, w)
for p in workingSets[w]:
_workingset_element(wsdoc, p)
wsdoc.close('workingSet')
wsdoc.close('workingSetManager')
return wsdoc
def _copy_workingset_xml(wspath, workingSets):
target = mx.XMLDoc()
target.open('workingSetManager')
parser = xml.parsers.expat.ParserCreate()
class ParserState(object):
def __init__(self):
self.current_ws_name = 'none yet'
self.current_ws = None
self.seen_ws = list()
self.seen_projects = list()
self.aggregate_ws = False
self.nested_ws = False
ps = ParserState()
# parsing logic
def _ws_start(name, attributes):
if name == 'workingSet':
if 'name' in attributes:
ps.current_ws_name = attributes['name']
if 'aggregate' in attributes and attributes['aggregate'] == 'true':
ps.aggregate_ws = True
ps.current_ws = None
elif ps.current_ws_name in workingSets:
ps.current_ws = workingSets[ps.current_ws_name]
ps.seen_ws.append(ps.current_ws_name)
ps.seen_projects = list()
else:
ps.current_ws = None
target.open(name, attributes)
parser.StartElementHandler = _ws_item
def _ws_end(name):
closeAndResetHandler = False
if name == 'workingSet':
if ps.aggregate_ws:
if ps.nested_ws:
ps.nested_ws = False
else:
ps.aggregate_ws = False
closeAndResetHandler = True
else:
if not ps.current_ws is None:
for p in ps.current_ws:
if not p in ps.seen_projects:
_workingset_element(target, p)
closeAndResetHandler = True
if closeAndResetHandler:
target.close('workingSet')
parser.StartElementHandler = _ws_start
elif name == 'workingSetManager':
# process all working sets that are new to the file
for w in sorted(workingSets.keys()):
if not w in ps.seen_ws:
_workingset_open(target, w)
for p in workingSets[w]:
_workingset_element(target, p)
target.close('workingSet')
def _ws_item(name, attributes):
if name == 'item':
if ps.current_ws is None:
target.element(name, attributes)
elif not 'elementID' in attributes and 'factoryID' in attributes and 'path' in attributes and 'type' in attributes:
target.element(name, attributes)
p_name = attributes['path'][1:] # strip off the leading '/'
ps.seen_projects.append(p_name)
else:
p_name = attributes['elementID'][1:] # strip off the leading '='
_workingset_element(target, p_name)
ps.seen_projects.append(p_name)
elif name == 'workingSet':
ps.nested_ws = True
target.element(name, attributes)
# process document
parser.StartElementHandler = _ws_start
parser.EndElementHandler = _ws_end
with open(wspath, 'rb') as wsfile:
parser.ParseFile(wsfile)
target.close('workingSetManager')
return target
def _workingset_open(wsdoc, ws):
wsdoc.open('workingSet', {'editPageID': 'org.eclipse.jdt.ui.JavaWorkingSetPage', 'factoryID': 'org.eclipse.ui.internal.WorkingSetFactory', 'id': 'wsid_' + ws, 'label': ws, 'name': ws})
def _workingset_element(wsdoc, p):
wsdoc.element('item', {'elementID': '=' + p, 'factoryID': 'org.eclipse.jdt.ui.PersistableJavaElementFactory'})
### ~~~~~~~~~~~~~ _private, eclipse
def _copy_eclipse_settings(project_loc, p, files=None):
processors = p.annotation_processors()
settingsDir = mx.ensure_dir_exists(join(project_loc, ".settings"))
for name, sources in p.eclipse_settings_sources().items():
out = StringIO()
print('# GENERATED -- DO NOT EDIT', file=out)
for source in sources:
print('# Source:', source, file=out)
with open(source) as f:
print(f.read(), file=out)
if p.javaCompliance:
jc = p.javaCompliance if p.javaCompliance.value < _max_Eclipse_JavaExecutionEnvironment else mx.JavaCompliance(_max_Eclipse_JavaExecutionEnvironment)
content = out.getvalue().replace('${javaCompliance}', str(jc))
else:
content = out.getvalue()
if processors:
content = content.replace('org.eclipse.jdt.core.compiler.processAnnotations=disabled', 'org.eclipse.jdt.core.compiler.processAnnotations=enabled')
mx.update_file(join(settingsDir, name), content)
if files:
files.append(join(settingsDir, name))
| gpl-2.0 | -6,348,510,353,586,996,000 | 47.333333 | 243 | 0.628135 | false |
francisco-dlp/hyperspy | hyperspy/tests/drawing/test_plot_markers.py | 1 | 20671 | # Copyright 2007-2016 The HyperSpy developers
#
# This file is part of HyperSpy.
#
# HyperSpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HyperSpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HyperSpy. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import pytest
from hyperspy.misc.test_utils import update_close_figure, sanitize_dict
from hyperspy.signals import Signal2D, Signal1D, BaseSignal
from hyperspy.utils import markers, stack
from hyperspy.drawing.marker import dict2marker
from hyperspy.datasets.example_signals import EDS_TEM_Spectrum
default_tol = 2.0
baseline_dir = 'plot_markers'
style_pytest_mpl = 'default'
class TestMarkers:
def test_get_data(self):
s = Signal2D(np.zeros([3, 2, 2]))
m = markers.line_segment(x1=list(range(3)),
x2=list(range(3)),
y1=1.3,
y2=1.5)
m.axes_manager = s.axes_manager
assert m.get_data_position('x1') == 0
assert m.get_data_position('y1') == 1.3
s.axes_manager[0].index = 2
assert m.get_data_position('x1') == 2
assert m.get_data_position('y1') == 1.3
def test_iterate_strings(self):
s = Signal2D(np.zeros([3, 2, 2]))
m = markers.text(x=list(range(3)),
y=list(range(3)),
text=['one', 'two', 'three'])
m.axes_manager = s.axes_manager
assert m.get_data_position('text') == 'one'
s.axes_manager[0].index = 2
assert m.get_data_position('text') == 'three'
def test_get_one_string(self):
s = Signal2D(np.zeros([3, 2, 2]))
m = markers.text(x=list(range(3)),
y=list(range(3)),
text='one')
m.axes_manager = s.axes_manager
assert m.get_data_position('text') == 'one'
s.axes_manager[0].index = 2
assert m.get_data_position('text') == 'one'
def test_get_data_array(self):
s = Signal2D(np.zeros([2, 2, 2, 2]))
m = markers.line_segment(x1=[[1.1, 1.2], [1.3, 1.4]], x2=1.1, y1=1.3,
y2=1.5)
m.axes_manager = s.axes_manager
assert m.get_data_position('x1') == 1.1
s.axes_manager[0].index = 1
assert m.get_data_position('x1') == 1.2
s.axes_manager[1].index = 1
assert m.get_data_position('x1') == 1.4
def test_set_get_data(self):
m = markers.point(x=0, y=1.3)
assert m.data['x1'] == 0
assert m.data['y1'] == 1.3
m.add_data(y1=0.3)
assert m.data['x1'] == 0
assert m.data['y1'] == 0.3
m.set_data(y1=1.3)
assert m.data['x1'][()][()] is None
assert m.data['y1'] == 1.3
assert m.data['x1'].dtype == np.dtype('O')
m.add_data(y1=[1, 2])
assert m.data['y1'][()].shape == (2,)
def test_markers_properties(self):
m = markers.text(x=1, y=2, text='a')
m.set_marker_properties(fontsize=30, color='red')
assert m.marker_properties == {'color': 'red', 'fontsize': 30}
def test_auto_update(self):
m = markers.text(y=1, x=2, text='a')
assert m.auto_update is False
m = markers.text(y=[1, 2], x=2, text='a')
assert m.auto_update is True
m.add_data(y1=1)
assert m.auto_update is False
m.add_data(y1=[1, 2])
assert m.auto_update is True
def test_get_data_shape_point(self):
m0 = markers.point(5, 5)
m1 = markers.point((5, 10), (5, 10))
m2 = markers.point(((12, 2, 9), (1, 2, 3)), ((2, 5, 1), (3, 9, 2)))
m3 = markers.vertical_line(((12, 2), (2, 5), (9, 2)))
m4 = markers.point(5, 5)
m4.data['x1'][()] = np.array(None, dtype=np.object)
m4.data['y1'][()] = np.array(None, dtype=np.object)
m5 = markers.vertical_line(9)
m6 = markers.rectangle(1, 5, 6, 8)
m7 = markers.rectangle((1, 2), (5, 6), (6, 7), (8, 9))
m8 = markers.point(
np.arange(256).reshape(2, 2, 2, 2, 2, 2, 2, 2),
np.arange(256).reshape(2, 2, 2, 2, 2, 2, 2, 2))
assert m0._get_data_shape() == ()
assert m1._get_data_shape() == (2,)
assert m2._get_data_shape() == (2, 3)
assert m3._get_data_shape() == (3, 2)
with pytest.raises(ValueError):
assert m4._get_data_shape() == ()
assert m5._get_data_shape() == ()
assert m6._get_data_shape() == ()
assert m7._get_data_shape() == (2,)
assert m8._get_data_shape() == (2, 2, 2, 2, 2, 2, 2, 2)
def test_add_marker_not_plot(self):
# This will do nothing, since plot_marker=False and permanent=False
# So this test will return a _logger warning
s = Signal1D(np.arange(10))
m = markers.point(x=5, y=5)
s.add_marker(m, plot_marker=False)
def test_add_marker_signal1d_navigation_dim(self):
s = Signal1D(np.zeros((3, 50, 50)))
m0 = markers.point(5, 5)
m1 = markers.point((5, 10), (10, 15))
m2 = markers.point(np.zeros((3, 50)), np.zeros((3, 50)))
s.add_marker(m0)
with pytest.raises(ValueError):
s.add_marker(m1)
s.add_marker(m2)
def test_add_marker_signal2d_navigation_dim_vertical_line(self):
s = Signal2D(np.arange(2*3*8*9).reshape(2, 3, 8, 9))
marker_pos_list = [[1, 3, 5], [2, 4, 6]]
m = markers.vertical_line(marker_pos_list)
s.add_marker(m)
s.axes_manager.indices = (0, 1)
for iy, temp_marker_list in enumerate(marker_pos_list):
for ix, value in enumerate(temp_marker_list):
s.axes_manager.indices = (ix, iy)
vertical_line = s._plot.signal_plot.figure.axes[0].lines[1]
assert value == vertical_line.get_data()[0]
def test_add_marker_signal2d_navigation_dim(self):
s = Signal2D(np.zeros((3, 50, 50)))
m0 = markers.point(5, 5)
m1 = markers.point((5, 10), (10, 15))
m2 = markers.point(np.zeros((3, )), np.zeros((3, )))
s.add_marker(m0)
with pytest.raises(ValueError):
s.add_marker(m1)
s.add_marker(m2)
def test_add_markers_as_list(self):
s = Signal1D(np.arange(10))
marker_list = []
for i in range(12):
marker_list.append(markers.point(4, 8))
s.add_marker(marker_list)
class Test_permanent_markers:
def test_add_permanent_marker(self):
s = Signal1D(np.arange(10))
m = markers.point(x=5, y=5)
s.add_marker(m, permanent=True)
assert list(s.metadata.Markers)[0][1] == m
def test_add_permanent_marker_not_plot(self):
s = Signal1D(np.arange(10))
m = markers.point(x=5, y=5)
s.add_marker(m, permanent=True, plot_marker=False)
assert list(s.metadata.Markers)[0][1] == m
def test_remove_permanent_marker_name(self):
s = Signal1D(np.arange(10))
m = markers.point(x=5, y=5)
m.name = 'test'
s.add_marker(m, permanent=True)
assert list(s.metadata.Markers)[0][1] == m
del s.metadata.Markers.test
assert len(list(s.metadata.Markers)) == 0
def test_permanent_marker_names(self):
s = Signal1D(np.arange(10))
m0 = markers.point(x=5, y=5)
m1 = markers.point(x=5, y=5)
m0.name = 'test'
m1.name = 'test'
s.add_marker(m0, permanent=True)
s.add_marker(m1, permanent=True)
assert s.metadata.Markers.test == m0
assert m0.name == 'test'
assert s.metadata.Markers.test1 == m1
assert m1.name == 'test1'
def test_add_permanent_marker_twice(self):
s = Signal1D(np.arange(10))
m = markers.point(x=5, y=5)
s.add_marker(m, permanent=True)
with pytest.raises(ValueError):
s.add_marker(m, permanent=True)
def test_add_permanent_marker_twice_different_signal(self):
s0 = Signal1D(np.arange(10))
s1 = Signal1D(np.arange(10))
m = markers.point(x=5, y=5)
s0.add_marker(m, permanent=True)
with pytest.raises(ValueError):
s1.add_marker(m, permanent=True)
def test_add_several_permanent_markers(self):
s = Signal1D(np.arange(10))
m_point = markers.point(x=5, y=5)
m_line = markers.line_segment(x1=5, x2=10, y1=5, y2=10)
m_vline = markers.vertical_line(x=5)
m_vline_segment = markers.vertical_line_segment(x=4, y1=3, y2=6)
m_hline = markers.horizontal_line(y=5)
m_hline_segment = markers.horizontal_line_segment(x1=1, x2=9, y=5)
m_rect = markers.rectangle(x1=1, x2=3, y1=5, y2=10)
m_text = markers.text(x=1, y=5, text="test")
s.add_marker(m_point, permanent=True)
s.add_marker(m_line, permanent=True)
s.add_marker(m_vline, permanent=True)
s.add_marker(m_vline_segment, permanent=True)
s.add_marker(m_hline, permanent=True)
s.add_marker(m_hline_segment, permanent=True)
s.add_marker(m_rect, permanent=True)
s.add_marker(m_text, permanent=True)
assert len(list(s.metadata.Markers)) == 8
with pytest.raises(ValueError):
s.add_marker(m_rect, permanent=True)
def test_add_markers_as_list(self):
s = Signal1D(np.arange(10))
marker_list = []
for i in range(10):
marker_list.append(markers.point(1, 2))
s.add_marker(marker_list, permanent=True)
assert len(s.metadata.Markers) == 10
def test_add_markers_as_list_add_same_twice(self):
s = Signal1D(np.arange(10))
marker_list = []
for i in range(10):
marker_list.append(markers.point(1, 2))
s.add_marker(marker_list, permanent=True)
with pytest.raises(ValueError):
s.add_marker(marker_list, permanent=True)
def test_add_markers_as_list_add_different_twice(self):
s = Signal1D(np.arange(10))
marker_list0 = []
for i in range(10):
marker_list0.append(markers.point(1, 2))
s.add_marker(marker_list0, permanent=True)
assert len(s.metadata.Markers) == 10
marker_list1 = []
for i in range(10):
marker_list1.append(markers.point(4, 8))
s.add_marker(marker_list1, permanent=True)
assert len(s.metadata.Markers) == 20
def test_add_permanent_marker_signal2d(self):
s = Signal2D(np.arange(100).reshape(10, 10))
m = markers.point(x=5, y=5)
s.add_marker(m, permanent=True)
assert list(s.metadata.Markers)[0][1] == m
def test_deepcopy_permanent_marker(self):
x, y, color, name = 2, 9, 'blue', 'test_point'
s = Signal2D(np.arange(100).reshape(10, 10))
m = markers.point(x=x, y=y, color=color)
m.name = name
s.add_marker(m, permanent=True)
s1 = s.deepcopy()
m1 = s1.metadata.Markers.get_item(name)
assert m.get_data_position('x1') == m1.get_data_position('x1')
assert m.get_data_position('y1') == m1.get_data_position('y1')
assert m.name == m1.name
assert m.marker_properties['color'] == m1.marker_properties['color']
def test_dict2marker(self):
m_point0 = markers.point(x=5, y=5)
m_point1 = markers.point(x=(5, 10), y=(1, 5))
m_line = markers.line_segment(x1=5, x2=10, y1=5, y2=10)
m_vline = markers.vertical_line(x=5)
m_vline_segment = markers.vertical_line_segment(x=4, y1=3, y2=6)
m_hline = markers.horizontal_line(y=5)
m_hline_segment = markers.horizontal_line_segment(x1=1, x2=9, y=5)
m_rect = markers.rectangle(x1=1, x2=3, y1=5, y2=10)
m_text = markers.text(x=1, y=5, text="test")
m_point0_new = dict2marker(m_point0._to_dictionary(), m_point0.name)
m_point1_new = dict2marker(m_point1._to_dictionary(), m_point1.name)
m_line_new = dict2marker(m_line._to_dictionary(), m_line.name)
m_vline_new = dict2marker(m_vline._to_dictionary(), m_vline.name)
m_vline_segment_new = dict2marker(
m_vline_segment._to_dictionary(), m_vline_segment.name)
m_hline_new = dict2marker(m_hline._to_dictionary(), m_hline.name)
m_hline_segment_new = dict2marker(
m_hline_segment._to_dictionary(), m_hline_segment.name)
m_rect_new = dict2marker(m_rect._to_dictionary(), m_rect.name)
m_text_new = dict2marker(m_text._to_dictionary(), m_text.name)
m_point0_dict = sanitize_dict(m_point0._to_dictionary())
m_point1_dict = sanitize_dict(m_point1._to_dictionary())
m_line_dict = sanitize_dict(m_line._to_dictionary())
m_vline_dict = sanitize_dict(m_vline._to_dictionary())
m_vline_segment_dict = sanitize_dict(m_vline_segment._to_dictionary())
m_hline_dict = sanitize_dict(m_hline._to_dictionary())
m_hline_segment_dict = sanitize_dict(m_hline_segment._to_dictionary())
m_rect_dict = sanitize_dict(m_rect._to_dictionary())
m_text_dict = sanitize_dict(m_text._to_dictionary())
m_point0_new_dict = sanitize_dict(m_point0_new._to_dictionary())
m_point1_new_dict = sanitize_dict(m_point1_new._to_dictionary())
m_line_new_dict = sanitize_dict(m_line_new._to_dictionary())
m_vline_new_dict = sanitize_dict(m_vline_new._to_dictionary())
m_vline_segment_new_dict = sanitize_dict(
m_vline_segment_new._to_dictionary())
m_hline_new_dict = sanitize_dict(m_hline_new._to_dictionary())
m_hline_segment_new_dict = sanitize_dict(
m_hline_segment_new._to_dictionary())
m_rect_new_dict = sanitize_dict(m_rect_new._to_dictionary())
m_text_new_dict = sanitize_dict(m_text_new._to_dictionary())
assert m_point0_dict == m_point0_new_dict
assert m_point1_dict == m_point1_new_dict
assert m_line_dict == m_line_new_dict
assert m_vline_dict == m_vline_new_dict
assert m_vline_segment_dict == m_vline_segment_new_dict
assert m_hline_dict == m_hline_new_dict
assert m_hline_segment_dict == m_hline_segment_new_dict
assert m_rect_dict == m_rect_new_dict
assert m_text_dict == m_text_new_dict
def _test_plot_rectange_markers():
# Create test image 100x100 pixels:
im = Signal2D(np.arange(100).reshape([10, 10]))
# Add four line markers:
m1 = markers.line_segment(
x1=2, y1=2, x2=7, y2=2, color='red', linewidth=3)
m2 = markers.line_segment(
x1=2, y1=2, x2=2, y2=7, color='red', linewidth=3)
m3 = markers.line_segment(
x1=2, y1=7, x2=7, y2=7, color='red', linewidth=3)
m4 = markers.line_segment(
x1=7, y1=2, x2=7, y2=7, color='red', linewidth=3)
# Add rectangle marker at same position:
m = markers.rectangle(x1=2, x2=7, y1=2, y2=7,
linewidth=4, color='blue', ls='dotted')
# Plot image and add markers to img:
im.plot()
im.add_marker(m)
im.add_marker(m1)
im.add_marker(m2)
im.add_marker(m3)
im.add_marker(m4)
return im
@pytest.mark.mpl_image_compare(
baseline_dir=baseline_dir, tolerance=default_tol, style=style_pytest_mpl)
def test_plot_rectange_markers():
im = _test_plot_rectange_markers()
return im._plot.signal_plot.figure
@update_close_figure
def test_plot_rectange_markers_close():
return _test_plot_rectange_markers() # return for @update_close_figure
def _test_plot_point_markers():
width = 100
data = np.arange(width * width).reshape((width, width))
s = Signal2D(data)
x, y = 10 * np.arange(4), 15 * np.arange(4)
color = ['yellow', 'green', 'red', 'blue']
for xi, yi, c in zip(x, y, color):
m = markers.point(x=xi, y=yi, color=c)
s.add_marker(m)
return s
@pytest.mark.mpl_image_compare(
baseline_dir=baseline_dir, tolerance=default_tol, style=style_pytest_mpl)
def test_plot_point_markers():
s = _test_plot_point_markers()
return s._plot.signal_plot.figure
@update_close_figure
def test_plot_point_markers_close():
return _test_plot_point_markers()
def _test_plot_text_markers():
s = Signal1D(np.arange(100).reshape([10, 10]))
s.plot(navigator='spectrum')
for i in range(s.axes_manager.shape[0]):
m = markers.text(y=s.sum(-1).data[i] + 5, x=i, text='abcdefghij'[i])
s.add_marker(m, plot_on_signal=False)
x = s.axes_manager.shape[-1] / 2 # middle of signal plot
m = markers.text(x=x, y=s.inav[x].data + 2, text=[i for i in 'abcdefghij'])
s.add_marker(m)
return s
@pytest.mark.mpl_image_compare(
baseline_dir=baseline_dir, tolerance=default_tol, style=style_pytest_mpl)
def test_plot_text_markers_nav():
s = _test_plot_text_markers()
return s._plot.navigator_plot.figure
@pytest.mark.mpl_image_compare(
baseline_dir=baseline_dir, tolerance=default_tol, style=style_pytest_mpl)
def test_plot_text_markers_sig():
s = _test_plot_text_markers()
return s._plot.signal_plot.figure
@update_close_figure
def test_plot_text_markers_close():
return _test_plot_text_markers()
def _test_plot_line_markers():
im = Signal2D(np.arange(100 * 100).reshape((100, 100)))
m0 = markers.vertical_line_segment(x=20, y1=30, y2=70, linewidth=4,
color='red', linestyle='dotted')
im.add_marker(m0)
m1 = markers.horizontal_line_segment(x1=30, x2=20, y=80, linewidth=8,
color='blue', linestyle='-')
im.add_marker(m1)
m2 = markers.vertical_line(50, linewidth=12, color='green')
im.add_marker(m2)
m3 = markers.horizontal_line(50, linewidth=10, color='yellow')
im.add_marker(m3)
return im
@pytest.mark.mpl_image_compare(
baseline_dir=baseline_dir, tolerance=default_tol, style=style_pytest_mpl)
def test_plot_line_markers():
im = _test_plot_line_markers()
return im._plot.signal_plot.figure
@update_close_figure
def test_plot_line_markers_close():
return _test_plot_line_markers()
@pytest.mark.mpl_image_compare(
baseline_dir=baseline_dir, tolerance=default_tol, style=style_pytest_mpl)
def test_plot_eds_lines():
a = EDS_TEM_Spectrum()
s = stack([a, a * 5])
s.plot(True)
s.axes_manager.navigation_axes[0].index = 1
return s._plot.signal_plot.figure
def test_iterate_markers():
from skimage.feature import peak_local_max
import scipy.misc
ims = BaseSignal(scipy.misc.face()).as_signal2D([1, 2])
index = np.array([peak_local_max(im.data, min_distance=100,
num_peaks=4) for im in ims])
# Add multiple markers
for i in range(4):
xs = index[:, i, 1]
ys = index[:, i, 0]
m = markers.point(x=xs, y=ys, color='red')
ims.add_marker(m, plot_marker=True, permanent=True)
m = markers.text(x=10 + xs, y=10 + ys, text=str(i), color='k')
ims.add_marker(m, plot_marker=True, permanent=True)
xs = index[:, :, 1]
ys = index[:, :, 0]
m = markers.rectangle(np.min(xs, 1),
np.min(ys, 1),
np.max(xs, 1),
np.max(ys, 1),
color='green')
ims.add_marker(m, plot_marker=True, permanent=True)
for im in ims:
m_original = ims.metadata.Markers
m_iterated = im.metadata.Markers
for key in m_original.keys():
mo = m_original[key]
mi = m_iterated[key]
assert mo.__class__.__name__ == mi.__class__.__name__
assert mo.name == mi.name
assert mo.get_data_position('x1') == mi.get_data_position('x1')
assert mo.get_data_position('y1') == mi.get_data_position('y1')
assert mo.get_data_position('text') == mi.get_data_position('text')
assert mo.marker_properties['color'] == \
mi.marker_properties['color']
@update_close_figure
def test_plot_eds_markers_close():
s = EDS_TEM_Spectrum()
s.plot(True)
return s
def test_plot_eds_markers_no_energy():
s = EDS_TEM_Spectrum()
del s.metadata.Acquisition_instrument.TEM.beam_energy
s.plot(True)
| gpl-3.0 | -3,551,092,346,901,716,000 | 37.421933 | 79 | 0.58928 | false |
ClaudioNahmad/Servicio-Social | Parametros/CosmoMC/prerrequisitos/plc-2.0/.waf-1.8.8-eab538dea2e33915d3770ff3f393b18b/waflib/Tools/cxx.py | 1 | 1212 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! https://waf.io/book/index.html#_obtaining_the_waf_file
from waflib import TaskGen,Task,Utils
from waflib.Tools import c_preproc
from waflib.Tools.ccroot import link_task,stlink_task
@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
def cxx_hook(self,node):
return self.create_compiled_task('cxx',node)
if not'.c'in TaskGen.task_gen.mappings:
TaskGen.task_gen.mappings['.c']=TaskGen.task_gen.mappings['.cpp']
class cxx(Task.Task):
run_str='${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CPPFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()}'
vars=['CXXDEPS']
ext_in=['.h']
scan=c_preproc.scan
class cxxprogram(link_task):
run_str='${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB}'
vars=['LINKDEPS']
ext_out=['.bin']
inst_to='${BINDIR}'
class cxxshlib(cxxprogram):
inst_to='${LIBDIR}'
class cxxstlib(stlink_task):
pass
| gpl-3.0 | 5,481,147,303,242,751,000 | 45.615385 | 298 | 0.695545 | false |
Oxygem/canaryd | canaryd/__main__.py | 1 | 3515 | # canaryd
# File: canaryd/__main__.py
# Desc: entry point for canaryd
import logging
import signal
from time import time
from canaryd_packages import click
from canaryd.daemon import run_daemon
from canaryd.log import logger, setup_logging, setup_logging_from_settings
from canaryd.plugin import (
get_and_prepare_working_plugins,
get_plugin_states,
)
from canaryd.remote import backoff, ping, shutdown, sync_states
from canaryd.settings import ensure_config_directory, get_settings
from canaryd.version import __version__
class GracefulExitRequested(Exception):
pass
def handle_graceful_quit(signum, frame):
raise GracefulExitRequested('yawn')
@click.command(context_settings={'help_option_names': ['-h', '--help']})
@click.option('-v', '--verbose', is_flag=True)
@click.option('-d', '--debug', is_flag=True)
@click.version_option(
version=__version__,
prog_name='canaryd',
message='%(prog)s: v%(version)s',
)
def main(verbose, debug):
'''
Run the canaryd daemon.
'''
log_level = setup_logging(verbose, debug)
logger.info('Starting canaryd v{0}'.format(__version__))
logger.info('Log level set to: {0}'.format(
logging.getLevelName(log_level),
))
# Ensure the config directory exists
ensure_config_directory()
# Load the settings, using our config file if provided
settings = get_settings()
# Setup any log file/syslog
setup_logging_from_settings(settings)
if not settings.api_key or not settings.server_id:
logger.critical('Missing api_key and/or server_id in config file')
return
# Initial ping for API presence
logger.info('Ping API...')
backoff(
ping, settings,
error_message='Could not ping',
max_wait=settings.collect_interval_s,
)
# Load the plugin list
plugins = get_and_prepare_working_plugins(settings)
# Get the initial state
logger.info('Getting initial state...')
start_time = time()
states = get_plugin_states(plugins, settings)
# Filter out the non-working plugins and wrap as a (command, data) tuple
# we don't track errors on the initial sync because often canaryd starts
# early on a server meaning some things aren't up. The next state collection
# will collect and sync these.
working_states = []
for plugin, status_data in states:
status, data = status_data
if status is not True:
continue
working_states.append((plugin, ('SYNC', data)))
# Sync this state and get settings
logger.info('Syncing initial state...')
remote_settings = backoff(
sync_states, working_states, settings,
error_message='Could not sync state',
max_wait=settings.collect_interval_s,
)
# Update settings w/remote ones
settings.update(remote_settings)
# Run the loop
logger.info('Starting daemon loop...')
# Make previous states dict
previous_states = dict(
(plugin, status_data[1])
for plugin, status_data in working_states
)
# Now that we've settings - setup graceful (clean shutdown) exit handling
signal.signal(signal.SIGTERM, handle_graceful_quit)
signal.signal(signal.SIGINT, handle_graceful_quit)
try:
run_daemon(previous_states, settings, start_time=start_time)
except GracefulExitRequested:
shutdown(settings) # we're exiting, so only one shot at this
try:
main()
except Exception:
# TODO: public Sentry logging
raise
| mit | -8,296,029,549,149,108,000 | 26.460938 | 80 | 0.675676 | false |
DavideCanton/Python3 | quadtree/gui.py | 1 | 1615 | __author__ = 'davide'
from random import randint
from quadtree import QuadTree, Rect
import pygame
from pygame.constants import *
from pygame.color import THECOLORS
from pygame.draw import rect, circle, line
W = 800
H = 600
R = 2
N = 100
def col(name):
"""
@type name: str
@return the color as a tuple
"""
return THECOLORS[name]
def draw(surf, qt):
"""
@param surf: the surface
@type surf: pygame.Surface
@param qt: quadtree
@type qt: QuadTree
"""
for node in qt:
rb = node.bounds
rect_ = pygame.Rect(rb.x, rb.y, rb.w, rb.h)
if node.val:
circle(surf, col("red"), node.val[0], R)
rect(surf, col("black"), rect_, 1)
def main():
pygame.init()
screen = pygame.display.set_mode((W, H))
clock = pygame.time.Clock()
data = [(randint(0, W), randint(0, H)) for _ in range(N)]
qt = QuadTree([], W, H)
i = 0
going = True
while True:
for event in pygame.event.get():
if event.type == QUIT:
going = False
elif event.type == KEYDOWN and event.key == K_ESCAPE:
going = False
elif (event.type == KEYDOWN
and event.key == K_F4
and event.mod & KMOD_ALT):
going = False
if not going:
break
if i < len(data):
qt.add_node(data[i])
qt.assert_correct()
screen.fill(col("white"))
draw(screen, qt)
pygame.display.flip()
clock.tick(10)
i += 1
if __name__ == "__main__":
main() | gpl-3.0 | -9,208,982,842,127,232,000 | 19.987013 | 65 | 0.52322 | false |
joshuamckenty/yolo-octo-wookie | nova/cloudpipe/pipelib.py | 1 | 3846 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
CloudPipe - Build a user-data payload zip file, and launch
an instance with it.
"""
import logging
import os
import tempfile
import base64
from zipfile import ZipFile, ZIP_DEFLATED
from nova import exception
from nova import flags
from nova.auth import users
from nova import utils
from nova.endpoint import api
FLAGS = flags.FLAGS
flags.DEFINE_string('boot_script_template',
utils.abspath('cloudpipe/bootscript.sh'),
'Template for script to run on cloudpipe instance boot')
class CloudPipe(object):
def __init__(self, cloud_controller):
self.controller = cloud_controller
self.manager = users.UserManager.instance()
def launch_vpn_instance(self, project_id):
logging.debug( "Launching VPN for %s" % (project_id))
project = self.manager.get_project(project_id)
# Make a payload.zip
tmpfolder = tempfile.mkdtemp()
filename = "payload.zip"
zippath = os.path.join(tmpfolder, filename)
z = ZipFile(zippath, "w", ZIP_DEFLATED)
z.write(FLAGS.boot_script_template,'autorun.sh')
z.close()
key_name = self.setup_keypair(project.project_manager_id, project_id)
zippy = open(zippath, "r")
context = api.APIRequestContext(handler=None, user=project.project_manager, project=project)
reservation = self.controller.run_instances(context,
# run instances expects encoded userdata, it is decoded in the get_metadata_call
# autorun.sh also decodes the zip file, hence the double encoding
user_data=zippy.read().encode("base64").encode("base64"),
max_count=1,
min_count=1,
instance_type='m1.tiny',
image_id=FLAGS.vpn_image_id,
key_name=key_name,
security_groups=["vpn-secgroup"])
zippy.close()
def setup_keypair(self, user_id, project_id):
key_name = '%s%s' % (project_id, FLAGS.vpn_key_suffix)
try:
private_key, fingerprint = self.manager.generate_key_pair(user_id, key_name)
try:
key_dir = os.path.join(FLAGS.keys_path, user_id)
if not os.path.exists(key_dir):
os.makedirs(key_dir)
with open(os.path.join(key_dir, '%s.pem' % key_name),'w') as f:
f.write(private_key)
except:
pass
except exception.Duplicate:
pass
return key_name
# def setup_secgroups(self, username):
# conn = self.euca.connection_for(username)
# try:
# secgroup = conn.create_security_group("vpn-secgroup", "vpn-secgroup")
# secgroup.authorize(ip_protocol = "udp", from_port = "1194", to_port = "1194", cidr_ip = "0.0.0.0/0")
# secgroup.authorize(ip_protocol = "tcp", from_port = "80", to_port = "80", cidr_ip = "0.0.0.0/0")
# secgroup.authorize(ip_protocol = "tcp", from_port = "22", to_port = "22", cidr_ip = "0.0.0.0/0")
# except:
# pass
| apache-2.0 | 5,067,773,451,058,629,000 | 37.46 | 114 | 0.629485 | false |
imec-myhdl/pycontrol-gui | BlockEditor/supsisim/pysim.py | 1 | 1404 | #!/usr/bin/python
# aim for python 2/3 compatibility
from __future__ import (division, print_function, absolute_import,
unicode_literals)
from Qt import QtGui, QtWidgets, QtCore # see https://github.com/mottosso/Qt.py
import sys
import os
#try:
# sip.setapi('QString', 1)
#except ValueError:
# sip.setapi('QString', 2)
import threading
#import autopep8
from supsisim.pyEdit import SupsiSimMainWindow
from supsisim.library import Library
class supsisimul(threading.Thread):
def __init__(self, filename = 'untitled', runflag = False):
threading.Thread.__init__(self)
if filename!='untitled':
self.fname = QtCore.QFileInfo(filename)
self.mypath = str(self.fname.absolutePath())
self.fname = str(self.fname.baseName())
else:
self.fname = 'untitled'
self.mypath = os.getcwd()
self.runflag = runflag
def run(self):
app = QtWidgets.QApplication(sys.argv)
library = Library()
library.setGeometry(20, 100, 400, 768)
library.show()
main = SupsiSimMainWindow(library, self.fname, self.mypath, self.runflag)
main.setGeometry(500,100,1024,768)
main.show()
ret = app.exec_()
app.deleteLater()
def supsim(fn = 'untitled'):
th = supsisimul(fn)
th.start()
| lgpl-2.1 | 6,907,061,759,329,544,000 | 26.529412 | 81 | 0.608974 | false |
ddaan/django-arctic | tests/test_layout_mixin.py | 1 | 4936 | import pytest
from collections import OrderedDict
from arctic.mixins import LayoutMixin
from articles.forms import ArticleForm
from tests.conftest import get_form
from tests.factories import ArticleFactory
@pytest.fixture
def layout():
class Layout(LayoutMixin):
layout = None
def __init__(self):
self.object = ArticleFactory()
self.form = ArticleForm(instance=self.object)
self.get_form = get_form(self.form)
return Layout()
pytestmark = pytest.mark.django_db
def test_layout_example_1(layout):
layout.layout = ['title|8']
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] == '8'
def test_layout_example_2(layout):
layout.layout = [['title|3', 'title', 'title']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0][0]['name'] == 'title'
assert layout[0]['rows'][0][0]['column'] == '3'
assert layout[0]['rows'][0][1]['name'] == 'title'
assert layout[0]['rows'][0][1]['column'] == '4'
assert layout[0]['rows'][0][2]['name'] == 'title'
assert layout[0]['rows'][0][2]['column'] == '5'
def test_layout_example_3a(layout):
layout.layout = [['title|3', 'title', 'title', 'category', 'category']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0][0]['name'] == 'title'
assert layout[0]['rows'][0][0]['column'] == '3'
assert layout[0]['rows'][0][1]['name'] == 'title'
assert layout[0]['rows'][0][1]['column'] == '2'
assert layout[0]['rows'][0][2]['name'] == 'title'
assert layout[0]['rows'][0][2]['column'] == '2'
assert layout[0]['rows'][0][3]['name'] == 'category'
assert layout[0]['rows'][0][3]['column'] == '2'
assert layout[0]['rows'][0][4]['name'] == 'category'
assert layout[0]['rows'][0][4]['column'] == '3'
def test_layout_example_3b(layout):
layout.layout = ['title|3', 'title', 'title', ['category', 'category']]
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] is None
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is False
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] == '3'
assert layout[0]['rows'][1]['name'] == 'title'
assert layout[0]['rows'][1]['column'] is None
assert layout[0]['rows'][2]['name'] == 'title'
assert layout[0]['rows'][2]['column'] is None
assert layout[0]['rows'][3][0]['name'] == 'category'
assert layout[0]['rows'][3][0]['column'] == '6'
assert layout[0]['rows'][3][1]['name'] == 'category'
assert layout[0]['rows'][3][1]['column'] == '6'
def test_layout_example_4(layout):
layout.layout = OrderedDict([('-fieldset',
['title',
'title',
['category', 'updated_at|4']]),
('fieldset2|test description',
[['title|7', 'category']]),
('fieldset3',
['published'])])
layout = layout.get_layout()
assert layout[0]['fieldset']['title'] == 'fieldset'
assert layout[0]['fieldset']['description'] is None
assert layout[0]['fieldset']['collapsible'] is True
assert layout[0]['rows'][0]['name'] == 'title'
assert layout[0]['rows'][0]['column'] is None
assert layout[0]['rows'][1]['name'] == 'title'
assert layout[0]['rows'][1]['column'] is None
assert layout[0]['rows'][2][0]['name'] == 'category'
assert layout[0]['rows'][2][0]['column'] == '8'
assert layout[0]['rows'][2][1]['name'] == 'updated_at'
assert layout[0]['rows'][2][1]['column'] == '4'
assert layout[1]['fieldset']['title'] == 'fieldset2'
assert layout[1]['fieldset']['description'] == 'test description'
assert layout[1]['fieldset']['collapsible'] is False
assert layout[1]['rows'][0][0]['name'] == 'title'
assert layout[1]['rows'][0][0]['column'] == '7'
assert layout[1]['rows'][0][1]['name'] == 'category'
assert layout[1]['rows'][0][1]['column'] == '5'
assert layout[2]['fieldset']['title'] == 'fieldset3'
assert layout[2]['fieldset']['description'] is None
assert layout[2]['fieldset']['collapsible'] is False
assert layout[2]['rows'][0]['name'] == 'published'
assert layout[2]['rows'][0]['column'] is None
| mit | 3,694,713,306,793,887,000 | 38.174603 | 75 | 0.569692 | false |
bhermanmit/openmc | setup.py | 1 | 1926 | #!/usr/bin/env python
import glob
import numpy as np
try:
from setuptools import setup
have_setuptools = True
except ImportError:
from distutils.core import setup
have_setuptools = False
try:
from Cython.Build import cythonize
have_cython = True
except ImportError:
have_cython = False
kwargs = {'name': 'openmc',
'version': '0.8.0',
'packages': ['openmc', 'openmc.data', 'openmc.mgxs', 'openmc.model',
'openmc.stats'],
'scripts': glob.glob('scripts/openmc-*'),
# Metadata
'author': 'Will Boyd',
'author_email': '[email protected]',
'description': 'OpenMC Python API',
'url': 'https://github.com/mit-crpg/openmc',
'classifiers': [
'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Programming Language :: Python',
'Topic :: Scientific/Engineering'
]}
if have_setuptools:
kwargs.update({
# Required dependencies
'install_requires': ['six', 'numpy>=1.9', 'h5py', 'matplotlib'],
# Optional dependencies
'extras_require': {
'decay': ['uncertainties'],
'pandas': ['pandas>=0.17.0'],
'sparse' : ['scipy'],
'vtk': ['vtk', 'silomesh'],
'validate': ['lxml']
},
# Data files
'package_data': {
'openmc.data': ['mass.mas12', 'fission_Q_data_endfb71.h5']
},
})
# If Cython is present, add resonance reconstruction capability
if have_cython:
kwargs.update({
'ext_modules': cythonize('openmc/data/reconstruct.pyx'),
'include_dirs': [np.get_include()]
})
setup(**kwargs)
| mit | -8,548,611,093,835,224,000 | 28.181818 | 78 | 0.545691 | false |
rdo-management/tuskar-ui | tuskar_ui/utils/utils.py | 1 | 4279 | # -*- coding: utf8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import csv
from itertools import izip
import re
from django.utils.translation import ugettext_lazy as _
CAMEL_RE = re.compile(r'([A-Z][a-z]+|[A-Z]+(?=[A-Z\s]|$))')
def de_camel_case(text):
"""Convert CamelCase names to human-readable format."""
return ' '.join(w.strip() for w in CAMEL_RE.split(text) if w.strip())
def list_to_dict(object_list, key_attribute='id'):
"""Converts an object list to a dict
:param object_list: list of objects to be put into a dict
:type object_list: list
:param key_attribute: object attribute used as index by dict
:type key_attribute: str
:return: dict containing the objects in the list
:rtype: dict
"""
return dict((getattr(o, key_attribute), o) for o in object_list)
def length(iterator):
"""A length function for iterators
Returns the number of items in the specified iterator. Note that this
function consumes the iterator in the process.
"""
return sum(1 for _item in iterator)
def check_image_type(image, type):
"""Check if image 'type' property matches passed-in type.
If image has no 'type' property' return True, as we cannot
be sure what type of image it is.
"""
return (image.properties.get('type', type) == type)
def filter_items(items, **kwargs):
"""Filters the list of items and returns the filtered list.
Example usage:
>>> class Item(object):
... def __init__(self, index):
... self.index = index
... def __repr__(self):
... return '<Item index=%d>' % self.index
>>> items = [Item(i) for i in range(7)]
>>> list(filter_items(items, index=1))
[<Item index=1>]
>>> list(filter_items(items, index__in=(1, 2, 3)))
[<Item index=1>, <Item index=2>, <Item index=3>]
>>> list(filter_items(items, index__not_in=(1, 2, 3)))
[<Item index=0>, <Item index=4>, <Item index=5>, <Item index=6>]
"""
for item in items:
for name, value in kwargs.items():
if name.endswith('__in'):
if getattr(item, name[:-len('__in')]) not in value:
break
elif name.endswith('__not_in'):
if getattr(item, name[:-len('__not_in')]) in value:
break
else:
if getattr(item, name) != value:
break
else:
yield item
def safe_int_cast(value):
try:
return int(value)
except (TypeError, ValueError):
return 0
def parse_csv_file(csv_file):
"""Parses given CSV file.
If there is no error, it returns list of dicts. When something went wrong,
list is empty, but warning contains appropriate information about
possible problems.
"""
parsed_data = []
for row in csv.reader(csv_file):
try:
driver = row[0].strip()
except IndexError:
raise ValueError(_("Unable to parse the CSV file."))
if driver in ('pxe_ssh', 'pxe_ipmitool'):
node_keys = (
'mac_addresses', 'cpu_arch', 'cpus', 'memory_mb', 'local_gb')
if driver == 'pxe_ssh':
driver_keys = (
'driver', 'ssh_address', 'ssh_username',
'ssh_key_contents'
)
elif driver == 'pxe_ipmitool':
driver_keys = (
'driver', 'ipmi_address', 'ipmi_username',
'ipmi_password'
)
node = dict(izip(driver_keys+node_keys, row))
parsed_data.append(node)
else:
raise ValueError(_("Unknown driver: %s.") % driver)
return parsed_data
| apache-2.0 | -1,079,681,464,923,149,700 | 29.564286 | 78 | 0.580743 | false |
Edraak/edraak-platform | lms/djangoapps/edraak_university/tests/tests.py | 1 | 23553 | """
All sort of tests for the University ID app.
"""
import datetime
import pytz
from django.db.models import Q
from django.test import TestCase, override_settings
from django.core.urlresolvers import NoReverseMatch, reverse
from django.conf import settings
from django.contrib.auth.models import AnonymousUser
from bs4 import BeautifulSoup
from mock import Mock, patch
import ddt
from edraak_tests.tests.helpers import ModuleStoreLoggedInTestCase
from openedx.core.djangoapps.course_groups.cohorts import (
add_user_to_cohort,
get_cohort,
set_course_cohorted,
DEFAULT_COHORT_NAME,
)
from openedx.core.djangoapps.course_groups.tests.helpers import CohortFactory
from openedx.core.djangoapps.course_groups.models import CourseUserGroup
from pkg_resources import iter_entry_points # pylint: disable=no-name-in-module
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from xmodule.tabs import CourseTabList
from student.models import CourseEnrollment
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from student.models import UserProfile
from edraak_university.tab import UniversityIDTab
from edraak_university import helpers
from edraak_university.forms import UniversityIDStudentForm
from edraak_university.models import UniversityID, UniversityIDSettings, DEFAULT_COHORT_NAMES
from edraak_university.tests.factories import UniversityIDFactory
class SettingsTest(TestCase):
"""
Sanity checks to ensure correct model configuration.
"""
def test_url_configs(self):
try:
url = reverse('edraak_university:id', kwargs={'course_id': 'edx/demo/2017'})
self.assertTrue(url.startswith('/university/'), 'Should setup the URL correctly')
except NoReverseMatch as exception:
self.fail('The urls are not configured for this module {}'.format(exception))
def test_enabled_in_test(self):
feature = settings.FEATURES.get('EDRAAK_UNIVERSITY_APP')
self.assertTrue(feature, 'The app should be enabled in tests')
self.assertIn('edraak_university', settings.INSTALLED_APPS)
def test_disabled_export_modifications(self):
feature = settings.FEATURES.get('EDRAAK_UNIVERSITY_CSV_EXPORT')
self.assertFalse(feature, 'The export IDs should be disabled by default')
def test_default_cohort_name(self):
"""
Just in case edX decided to do something crazy with this, again!
For more info, check the old way of getting the default cohort:
- https://github.com/Edraak/edx-platform/pull/288/files#diff-77d729b2747c0a082c632262ceadb69bR7
"""
self.assertIn(DEFAULT_COHORT_NAME, DEFAULT_COHORT_NAMES)
def test_tab_installation(self):
course_tabs = {
entry_point.name: entry_point.load()
for entry_point in iter_entry_points(group='openedx.course_tab')
}
self.assertIn('university_id', course_tabs,
'Course tab is not installed, run `$ pip install -r requirements/edx/local.txt` '
'to solve the problem')
tab_class_name = course_tabs['university_id'].type
self.assertEquals(tab_class_name, 'university_id', 'Should have the required tab, with a correct tab.type')
class UniversityIDSettingsModelTest(ModuleStoreTestCase):
def test_unicode(self):
course = CourseFactory.create(org='ORG', number='CS', run='2020')
obj = UniversityIDSettings(course_key=course.id)
self.assertEquals(unicode(obj), 'ORG/CS/2020')
@ddt.ddt
class UniversityTabIDTest(ModuleStoreTestCase):
"""
Unit and integration tests for the tab.
"""
# This patch disables `SafeCookieData.parse` able to use client.login() without strict checking and errors
# Other than this, it is not needed in our tests
CUSTOM_MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware' if cls.endswith('SafeSessionMiddleware') else cls
for cls in settings.MIDDLEWARE_CLASSES
]
def test_xmodule_field(self):
course = CourseFactory.create()
self.assertTrue(hasattr(course, 'enable_university_id'),
'The course should have an xmodule enable_university_id field in its field definitions')
self.assertFalse(course.enable_university_id, 'The feature should be disabled on courses by default')
@ddt.data(True, False)
def test_is_enabled_not_logged_in(self, enable_university_id):
course = CourseFactory.create()
course.enable_university_id = enable_university_id
tab = UniversityIDTab(tab_dict={})
self.assertFalse(tab.is_enabled(course, user=None), 'Should be disabled for all non-logged-in users')
@ddt.data(True, False)
def test_is_enabled_not_enrolled(self, enable_university_id):
user = UserFactory.create()
course = CourseFactory.create()
course.enable_university_id = enable_university_id
tab = UniversityIDTab(tab_dict={})
self.assertFalse(tab.is_enabled(course, user), 'Should be disabled for all non-enrolled')
@ddt.unpack
@ddt.data(
{'urlconf': 'lms.urls', 'should_enable': False, 'msg': 'Should be disable when user=None in LMS'},
{'urlconf': 'cms.urls', 'should_enable': True, 'msg': 'Should be enabled when user=None in CMS'},
)
def test_enable_if_no_user_cms(self, urlconf, should_enable, msg):
"""
Ensures that the tab is enabled on CMS when no user is provided.
"""
with override_settings(ROOT_URLCONF=urlconf):
with patch.dict(settings.FEATURES, EDRAAK_UNIVERSITY_APP=True):
self.assertEquals(settings.ROOT_URLCONF, urlconf) # Sanity check
course = Mock(enable_university_id=True)
tab = UniversityIDTab(tab_dict={})
self.assertEquals(tab.is_enabled(course, user=None), should_enable, msg=msg)
@ddt.unpack
@ddt.data(
{'course_enable_university_id': False, 'should_tab_be_enabled': False},
{'course_enable_university_id': True, 'should_tab_be_enabled': True},
)
def test_is_enabled_enrolled(self, course_enable_university_id, should_tab_be_enabled):
user = UserFactory.create()
course = CourseFactory.create()
course.enable_university_id = course_enable_university_id
with patch.object(CourseEnrollment, 'is_enrolled', return_value=True):
tab = UniversityIDTab(tab_dict={})
self.assertEqual(tab.is_enabled(course, user), should_tab_be_enabled,
'Should only be enabled when `enable_university_id` is, even for enrolled users')
with patch.dict(settings.FEATURES, EDRAAK_UNIVERSITY_APP=False):
self.assertFalse(tab.is_enabled(course, user),
msg='Setting `EDRAAK_UNIVERSITY_APP=False` should disable the tab regardless.')
def test_is_added_in_courseware_tabs(self):
tabs_list = CourseTabList()
tabs_list.from_json([])
# The tab should be added anyway,
# unfortunately the platform don't have a dynamic loading so far
# check the `CourseTabList` class for more details about this problem
course = CourseFactory.create()
course.enable_university_id = False
tabs_list.initialize_default(course)
course_tab_types = [tab.type for tab in course.tabs]
self.assertIn('university_id', course_tab_types)
self.assertLess(course_tab_types.index('progress'), course_tab_types.index('university_id'),
'Should appear after the progress tab')
def quick_login(self, username, password):
"""
Quick login, without having to go through the whole edX login process.
"""
self.client.login(username=username, password=password)
response = self.client.get(reverse('dashboard'))
self.assertContains(response, 'Skip to main', msg_prefix='Should be logged in')
@override_settings(MIDDLEWARE_CLASSES=CUSTOM_MIDDLEWARE_CLASSES)
def make_course_request(self, enable_university_id):
"""
Requests a course page with a logged and enrolled user.
"""
course = CourseFactory.create(enable_university_id=enable_university_id)
password = 'It is me!'
user = UserFactory.create(password=password)
user.save()
enrollment = CourseEnrollmentFactory.create(course_id=course.id, user=user)
enrollment.save()
self.quick_login(user.username, password)
return self.client.get(reverse('progress', args=[unicode(course.id)]))
def test_if_tab_shown_in_response(self):
res = self.make_course_request(enable_university_id=True)
self.assertContains(res, 'University ID',
msg_prefix='Advanced settings is enabled, therefore should show the tab')
self.assertContains(res, '/university/id/', msg_prefix='The link should appear')
def test_if_tab_is_hidden_in_response(self):
res = self.make_course_request(enable_university_id=False)
self.assertNotContains(res, 'University ID',
msg_prefix='Advanced settings is disabled, therefore should NOT show the tab')
self.assertNotContains(res, '/university/id/', msg_prefix='The link should not appear')
class IsStudentFormDisabledHelperTest(ModuleStoreTestCase):
TODAY = datetime.datetime.now(pytz.UTC)
LAST_WEEK = TODAY - datetime.timedelta(days=7)
NEXT_WEEK = TODAY + datetime.timedelta(days=7)
def setUp(self):
super(IsStudentFormDisabledHelperTest, self).setUp()
self.course = CourseFactory.create()
self.user = UserFactory.create()
self.enrollment = CourseEnrollmentFactory.create(user=self.user, course_id=self.course.id)
# Instructor haven't edited the information yet
self.uid = UniversityID(can_edit=True, user=self.user, course_key=self.course.id)
self.uid.save()
def assertReturnValue(self, expected, msg=None):
actual = helpers.is_student_form_disabled(self.user, self.course.id)
self.assertEquals(actual, expected, msg)
def test_happy_scenario(self):
self.assertReturnValue(False)
def test_with_can_edit_equals_false(self):
"""
The form is should be disabled the instructor edited the student data.
"""
self.uid.can_edit = False
self.uid.save()
self.assertReturnValue(True, 'Should be disabled when the instructor updates the information')
def test_with_future_registration_end_date(self):
"""
The form is should be disabled when registration end date (is not null) and (already passed).
"""
uid_settings = UniversityIDSettings(registration_end_date=self.NEXT_WEEK, course_key=self.course.id)
uid_settings.save()
self.assertReturnValue(False, 'Future registration end date should NOT disable the form')
def test_with_past_registration_end_date(self):
"""
The form is should be disabled when registration end date (is not null) and (already passed).
"""
uid_settings = UniversityIDSettings(registration_end_date=self.LAST_WEEK, course_key=self.course.id)
uid_settings.save()
self.assertReturnValue(True, 'Past registration end date should disable the form')
def test_with_null_registration_end_date(self):
"""
The form is should be disabled when registration end date (is not null) and (already passed).
"""
uid_settings = UniversityIDSettings(registration_end_date=None, course_key=self.course.id)
uid_settings.save()
self.assertReturnValue(False, 'NULL registration end date should NOT disable the form')
def test_not_enrolled_user(self):
enrollment = CourseEnrollment.objects.get(user=self.user, course_id=self.course.id)
enrollment.delete()
self.assertReturnValue(True, 'Should be disabled for un-enrolled users')
@ddt.ddt
class HelpersTest(ModuleStoreLoggedInTestCase):
"""
Tests for the UniversityID helper functions.
"""
LOGIN_STAFF = False
ENROLL_USER = True
def test_is_feature_enabled_helper(self):
with patch.dict(settings.FEATURES, EDRAAK_UNIVERSITY_APP=True):
self.assertTrue(helpers.is_feature_enabled(), 'Should respect the feature')
with patch.dict(settings.FEATURES, EDRAAK_UNIVERSITY_APP=False):
self.assertFalse(helpers.is_feature_enabled(), 'Should respect the feature')
with patch.object(settings, 'FEATURES', {}):
self.assertFalse(helpers.is_feature_enabled(), 'Should default to False when the feature is missing')
@patch.dict(settings.FEATURES, {'EDRAAK_UNIVERSITY_APP': True, 'EDRAAK_UNIVERSITY_CSV_EXPORT': False})
@ddt.data(False, True)
def test_is_csv_export_enabled_on_course_helper_disabled(self, course_enable_university_id):
course = Mock(enable_university_id=course_enable_university_id)
is_export_enabled = helpers.is_csv_export_enabled_on_course(course)
self.assertFalse(is_export_enabled, 'Export should be disabled when the feature flag is')
@patch.dict(settings.FEATURES, {'EDRAAK_UNIVERSITY_APP': True, 'EDRAAK_UNIVERSITY_CSV_EXPORT': True})
def test_is_csv_export_enabled_on_course_helper_enabled(self):
course_disabled = Mock(enable_university_id=False)
self.assertFalse(helpers.is_csv_export_enabled_on_course(course_disabled),
msg='Export should be disabled when the feature flag is')
course_enabled = Mock(enable_university_id=True)
self.assertTrue(helpers.is_csv_export_enabled_on_course(course_enabled),
msg='Export should be enabled when all three features are enabled')
def test_get_university_id_helper(self):
course = CourseFactory.create()
user = UserFactory.create()
self.assertIsNone(helpers.get_university_id(AnonymousUser(), unicode(course.id)))
self.assertIsNone(helpers.get_university_id(user, unicode(course.id)))
UniversityID.objects.create(
course_key=unicode(course.id),
user=user,
)
self.assertIsNotNone(helpers.get_university_id(user, unicode(course.id)), 'Should return an ID')
self.assertEquals(helpers.get_university_id(user, unicode(course.id)).user_id, user.id,
'Should return the correct user_id')
@patch('edraak_university.helpers.has_access', return_value=False)
def test_university_id_is_required_helper_for_user(self, _):
"""
Tests for both has_valid_university_id and university_id_is_required.
"""
self.assertFalse(helpers.university_id_is_required(Mock(), Mock(enable_university_id=False)),
'The feature is disabled, so the ID should not be required anyway')
with patch.dict(settings.FEATURES, {'EDRAAK_UNIVERSITY_APP': False}):
self.assertFalse(helpers.university_id_is_required(Mock(), Mock(enable_university_id=True)),
'The platform-specific feature flag is disabled, so the ID should not be required')
with patch('edraak_university.helpers.get_university_id', return_value=None):
self.assertTrue(helpers.university_id_is_required(Mock(), Mock(enable_university_id=True)),
'The user have no ID and the feature is enabled, so the ID is required.')
@patch('edraak_university.helpers.has_access', return_value=True)
def test_university_id_is_required_helper_for_staff(self, _):
"""
Tests for university_id_is_required for staff users.
"""
with patch('edraak_university.helpers.get_university_id', return_value=None):
self.assertFalse(
helpers.university_id_is_required(Mock(), Mock(enable_university_id=True)),
'The STAFF user have no ID, so the ID is NOT required.',
)
@ddt.ddt
class UniversityIDFormTest(ModuleStoreTestCase):
"""
Unit tests for the UniversityIDStudentForm class.
"""
def setUp(self):
super(UniversityIDFormTest, self).setUp()
self.course = CourseFactory.create()
set_course_cohorted(course_key=self.course.id, cohorted=True)
# Initialize the default group!
default_cohort = get_cohort(user=self.user, course_key=self.course.id)
self.assertEquals(default_cohort.name, DEFAULT_COHORT_NAME) # Sanity-check
self.cohort, _created = CourseUserGroup.create(
name='Cohort_A',
course_id=self.course.id,
)
def get_form(self, overrides=None):
"""
Get an populated form.
"""
params = {
# Initially clean params
'full_name': 'Mahmoud Salam',
'university_id': '2010-12-05',
'cohort': self.cohort.id,
}
if overrides:
# May add validation errors for testing purposes
params.update(overrides)
form = UniversityIDStudentForm(self.course.id, data=params)
return form
def test_initial_data_are_valid(self):
form = self.get_form()
default_cohort = CourseUserGroup.objects.get(~Q(name=self.cohort.name), course_id=self.course.id)
self.assertEquals(default_cohort.name, DEFAULT_COHORT_NAME)
custom_cohort = CourseUserGroup.objects.get(course_id=self.course.id, name=self.cohort.name)
self.assertEquals(custom_cohort.group_type, CourseUserGroup.COHORT)
# Sanity check
cohorts = UniversityID.get_cohorts_for_course(self.course.id)
self.assertListEqual(list(cohorts), [custom_cohort])
self.assertEquals(form.errors, {})
self.assertTrue(form.is_valid())
@ddt.unpack
@ddt.data(
{'field_name': 'full_name', 'bad_value': '', 'issue': 'is empty'},
{'field_name': 'full_name', 'bad_value': 'a', 'issue': 'is too short'},
{'field_name': 'full_name', 'bad_value': 'a' * 60, 'issue': 'is too long'},
{'field_name': 'university_id', 'bad_value': '123', 'issue': 'is too short'},
{'field_name': 'university_id', 'bad_value': '2011 501', 'issue': 'has a space'},
{'field_name': 'university_id', 'bad_value': 'a' * 100, 'issue': 'is too long'},
{'field_name': 'university_id', 'bad_value': '2011/500', 'issue': 'has a special char'},
{'field_name': 'cohort', 'bad_value': '', 'issue': 'is empty'},
)
def test_field_validators(self, field_name, bad_value, issue):
invalid_params = {field_name: bad_value}
form = self.get_form(invalid_params)
self.assertFalse(form.is_valid(), 'Form is valid, but {field_name} {issue}'.format(
field_name=field_name,
issue=issue,
))
self.assertIn(field_name, form.errors)
self.assertEquals(len(form.errors[field_name]), 1)
def test_as_div(self):
form = self.get_form({
'full_name': '',
})
self.assertFalse(form.is_valid(), 'The full_name is empty, the form should not be valid')
# Emulate an HTML root element
wrapped = u'<body>{}</body>'.format(form.as_div())
soup = BeautifulSoup(wrapped, 'html.parser')
full_name_elem = next(iter(soup.body.children))
self.assertEquals(full_name_elem.name, 'div', 'Should contain <div> instead of <p> tags')
label_elem = full_name_elem.label
errors_elem = full_name_elem.ul
children = [elem for elem in full_name_elem.children]
self.assertLess(children.index(label_elem), children.index(errors_elem),
'<label> should display before the <ul class="errors"> tag.')
class UniversityIDModelTest(ModuleStoreTestCase):
"""
Tests for the UniversityID model class.
"""
def setUp(self):
super(UniversityIDModelTest, self).setUp()
self.course = CourseFactory.create(
org='a',
number='b',
run='c',
)
self.cohort = CohortFactory.create(
course_id=self.course.id,
)
set_course_cohorted(course_key=self.course.id, cohorted=True)
self.model = UniversityIDFactory.create(
user__username='username1',
user__email='[email protected]',
user__profile__name='Mike Wazowski',
course_key=self.course.id,
university_id='201711201',
)
self.profile = UserProfile.objects.get(user=self.model.user)
def test_default_cohort(self):
self.assertEquals(self.model.get_cohort().name, DEFAULT_COHORT_NAME)
self.assertEquals(0, self.cohort.users.count())
def test_with_custom_cohort(self):
add_user_to_cohort(self.cohort, self.model.user.email)
self.assertEquals(1, self.cohort.users.count())
self.assertEquals(self.model.get_cohort().id, self.cohort.id)
def test_set_cohort(self):
self.model.set_cohort(self.cohort)
self.assertEquals(self.model.get_cohort().id, self.cohort.id)
self.model.set_cohort(self.cohort) # Set cohort twice, it should be fine!
def test_remove_from_cohort(self):
self.model.set_cohort(self.cohort)
self.assertNotEqual(self.model.get_cohort().name, DEFAULT_COHORT_NAME)
self.model.remove_from_cohort()
self.assertEquals(self.model.get_cohort().name, DEFAULT_COHORT_NAME)
# Removing from a cohort multiple times should be fine!
self.model.remove_from_cohort()
def test_unicode(self):
self.assertEquals(unicode(self.model), 'username1 - a/b/c - 201711201')
def test_full_name(self):
self.assertEquals(self.model.get_full_name(), 'Mike Wazowski')
self.profile.delete()
self.assertIsNone(self.model.get_full_name())
def test_get_email(self):
self.assertEquals(self.model.get_email(), '[email protected]')
def test_get_marked_university_ids(self):
uni_ids = [
'20-{}'.format(i)
for i in ['01a', '10x', '03', '04M ', '04m', '04M\t', '10x ', '02t']
]
course_key = CourseFactory.create().id
for uni_id in uni_ids:
model = UniversityIDFactory.create(
course_key=course_key,
university_id=uni_id,
)
model.save()
marked = UniversityID.get_marked_university_ids(course_key=course_key)
self.assertEquals(len(marked), len(uni_ids))
# Should sort the UniversityIDs
# Should not mark unique IDs
self.assertListEqual(
list1=[
[u'20-01a', False],
[u'20-02t', False],
[u'20-03', False],
],
list2=[
[obj.university_id, obj.is_conflicted]
for obj in marked[:3]
],
)
# Should mark conflicted
self.assertListEqual(
list1=[
[u'20-04M\t', True],
[u'20-04M ', True],
[u'20-04m', True],
[u'20-10x', True],
[u'20-10x ', True],
],
list2=[
[obj.university_id, obj.is_conflicted]
for obj in marked[3:]
],
)
| agpl-3.0 | 3,524,811,865,322,005,500 | 40.176573 | 115 | 0.647646 | false |
nonZero/OpenCommunity | src/issues/views.py | 1 | 44889 | from django.contrib.auth.views import redirect_to_login
from django.db.models.aggregates import Max
from django.http.response import HttpResponse, HttpResponseBadRequest, \
HttpResponseForbidden
from django.shortcuts import get_object_or_404, render, redirect
from django.template import RequestContext
from django.template.loader import render_to_string
from django.views.generic import ListView
from django.views.generic.base import View
from django.views.generic.detail import DetailView, SingleObjectMixin
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from issues import models, forms
from issues.forms import CreateIssueForm, CreateProposalForm, EditProposalForm, \
UpdateIssueForm, EditProposalTaskForm, AddAttachmentForm, \
UpdateIssueAbstractForm, CreateProposalVoteArgumentForm
from issues.models import ProposalType, Issue, IssueStatus, ProposalVote, \
Proposal, ProposalVoteBoard, ProposalVoteValue, VoteResult, ProposalVoteArgument, ProposalVoteArgumentRanking
from meetings.models import Meeting
from oc_util.templatetags.opencommunity import minutes, board_voters_on_proposal
from ocd.base_views import AjaxFormView, json_response, CommitteeMixin
from ocd.validation import enhance_html
from ocd.base_managers import ConfidentialSearchQuerySet
from shultze_vote import send_issue_ranking
from acl.default_roles import DefaultGroups
from users.permissions import has_community_perm, has_committee_perm
from haystack.inputs import AutoQuery
import json
import mimetypes
from datetime import date
class IssueMixin(CommitteeMixin):
model = models.Issue
def get_queryset(self):
return self.model.objects.object_access_control(
user=self.request.user,
committee=self.committee).filter(committee=self.committee,
active=True)
class ProposalMixin(IssueMixin):
model = models.Proposal
def get_queryset(self):
return self.model.objects.object_access_control(
user=self.request.user,
committee=self.committee).filter(issue=self.issue,
active=True)
@property
def issue(self):
return get_object_or_404(models.Issue, committee=self.committee,
pk=self.kwargs['issue_id'])
def _can_complete_task(self):
o = self.get_object()
if self.request.user == o.assigned_to_user:
return True
return has_community_perm(self.request.user, self.committee.community,
'issues.edittask_proposal')
class IssueList(IssueMixin, ListView):
required_permission = 'viewopen_issue'
def get_queryset(self):
return super(IssueList, self).get_queryset().exclude(
status=IssueStatus.ARCHIVED).order_by('-created_at')
def get_context_data(self, **kwargs):
d = super(IssueList, self).get_context_data(**kwargs)
available_ids = set([x.id for x in self.get_queryset()])
if d['committee'].issue_ranking_enabled:
d['sorted_issues'] = super(IssueList, self).get_queryset().exclude(
status=IssueStatus.ARCHIVED).order_by('-order_by_votes')
if 'vote_ranking' in d['cperms'] and self.request.user.is_authenticated():
my_ranking = models.IssueRankingVote.objects.filter(
voted_by=self.request.user,
issue__committee_id=d['committee'].id) \
.order_by('rank')
d['my_vote'] = [i.issue for i in my_ranking if i.issue.active and \
i.issue.status != IssueStatus.ARCHIVED]
d['my_non_ranked'] = [i for i in self.get_queryset() \
if i not in d['my_vote']]
for obj in self.object_list:
obj.restricted_proposals = \
obj.proposals.object_access_control(
user=self.request.user, committee=self.committee)
for ai in obj.agenda_items.all():
ai.restricted_proposals = ai.proposals(
user=self.request.user, committee=self.committee)
ai.restricted_accepted_proposals = ai.accepted_proposals(
user=self.request.user, committee=self.committee)
return d
required_permission_for_post = 'vote_ranking'
def post(self, request, *args, **kwargs):
# TODO: check post permission for user and for each issue
send_issue_ranking(request)
return json_response({'res': 'ok', })
class IssueDetailView(IssueMixin, DetailView):
def get_required_permission(self):
o = self.get_object()
return 'viewclosed_issue' if o.is_published else 'viewopen_issue'
def get_context_data(self, **kwargs):
d = super(IssueDetailView, self).get_context_data(**kwargs)
m_id = self.request.GET.get('m_id', None)
d['form'] = forms.CreateIssueCommentForm()
d['proposal_form'] = forms.CreateProposalForm(committee=self.committee)
if m_id:
d['meeting'] = get_object_or_404(Meeting, id=m_id,
committee=self.committee)
a = d['meeting'].agenda.object_access_control(
user=self.request.user, committee=self.committee).all()
d['meeting_active_issues'] = [ai.issue for ai in a if
ai.issue.active]
else:
d['meeting'] = None
if self.request.GET.get('s', None) == '1':
d['all_issues'] = self.get_queryset().exclude(
status=IssueStatus.ARCHIVED).order_by('-created_at')
o = self.get_object()
if o.is_current and self.request.user in o.committee.upcoming_meeting_participants.all() and has_committee_perm(
self.request.user, self.committee, 'proposal_board_vote_self'):
d['can_board_vote_self'] = True
d['proposals'] = self.object.proposals.object_access_control(
user=self.request.user, committee=self.committee).open()
d['upcoming_issues'] = self.object.committee.upcoming_issues(
user=self.request.user, committee=self.committee)
d['agenda_items'] = self.object.agenda_items.all()
for ai in d['agenda_items']:
ai.accepted_proposals = ai.accepted_proposals(
user=self.request.user, committee=self.committee)
ai.rejected_proposals = ai.rejected_proposals(
user=self.request.user, committee=self.committee)
ai.proposals = ai.proposals(
user=self.request.user, committee=self.committee)
return d
required_permission_for_post = 'add_issuecomment'
def post(self, request, *args, **kwargs):
form = forms.CreateIssueCommentForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
i = self.get_object()
comment_id = request.POST.get('comment_id', None)
try:
c = i.comments.get(pk=int(comment_id))
c.content = enhance_html(form.cleaned_data['content'])
c.save()
return json_response({'comment_id': c.id})
except:
c = i.comments.create(content=enhance_html(form.cleaned_data['content']),
created_by=request.user)
return json_response({'comment_id': c.id})
# if comment_id == '':
# c = i.comments.create(content=enhance_html(form.cleaned_data['content']),
# created_by=request.user)
#
# self.object = i # this makes the next line work
# context = self.get_context_data(object=i, c=c)
# return render(request, 'issues/_comment.html', context)
# else:
# c = i.comments.get(pk=int(comment_id))
# c.content=enhance_html(form.cleaned_data['content'])
# return json_response({'comment_id': c.id})
class IssueCommentMixin(CommitteeMixin):
model = models.IssueComment
def get_required_permission(self):
o = self.get_object()
return 'editopen_issuecomment' if o.issue.is_upcoming else 'editclosed_issuecomment'
def get_queryset(self):
return models.IssueComment.objects.filter(issue__committee=self.committee)
class IssueCommentDeleteView(IssueCommentMixin, DeleteView):
def post(self, request, *args, **kwargs):
o = self.get_object()
o.active = 'undelete' in request.POST
o.save()
return HttpResponse(int(o.active))
class IssueCommentEditView(IssueCommentMixin, UpdateView):
form_class = forms.EditIssueCommentForm
def form_valid(self, form):
c = self.get_object()
c.update_content(form.instance.version, self.request.user,
form.cleaned_data['content'])
context = self.get_context_data(object=c.issue, c=c)
return render(self.request, 'issues/_comment.html', context)
def form_invalid(self, form):
return HttpResponse("")
def get_form_kwargs(self):
d = super(IssueCommentEditView, self).get_form_kwargs()
d['prefix'] = 'ic%d' % self.get_object().id
return d
class IssueCreateView(AjaxFormView, IssueMixin, CreateView):
form_class = CreateIssueForm
template_name = "issues/issue_create_form.html"
reload_on_success = True
def get_required_permission(self):
return 'editagenda_community' if self.upcoming else 'add_issue'
upcoming = False
def form_valid(self, form):
form.instance.committee = self.committee
form.instance.created_by = self.request.user
form.instance.status = IssueStatus.IN_UPCOMING_MEETING if \
self.upcoming else IssueStatus.OPEN
if self.upcoming:
max_upcoming = Issue.objects.filter(
committee=self.committee).aggregate(x=Max(
'order_in_upcoming_meeting'))['x']
form.instance.order_in_upcoming_meeting = max_upcoming + 1 \
if max_upcoming else 1
return super(IssueCreateView, self).form_valid(form)
def get_form_kwargs(self):
kwargs = super(IssueCreateView, self).get_form_kwargs()
kwargs.update({'committee': self.committee})
return kwargs
def get_success_url(self):
url = super(IssueCreateView, self).get_success_url()
if not self.upcoming:
url += '?s=1'
return url
class IssueEditView(AjaxFormView, IssueMixin, UpdateView):
required_permission = 'editopen_issue'
form_class = UpdateIssueForm
reload_on_success = True
def form_valid(self, form):
if self.reload_on_success:
return super(IssueEditView, self).form_valid(form)
else:
self.object = form.save()
return render(self.request, 'issues/_issue_title.html',
self.get_context_data())
def get_form_kwargs(self):
kwargs = super(IssueEditView, self).get_form_kwargs()
kwargs.update({'committee': self.committee})
return kwargs
class IssueEditAbstractView(AjaxFormView, IssueMixin, UpdateView):
required_permission = 'editopen_issue'
form_class = UpdateIssueAbstractForm
def form_valid(self, form):
self.object = form.save()
return render(self.request, 'issues/_issue-abstract.html',
self.get_context_data())
class IssueCompleteView(IssueMixin, SingleObjectMixin, View):
required_permission = 'add_meeting'
def post(self, request, *args, **kwargs):
o = self.get_object()
# TODO: verify that issue is in active meeting
if request.POST.get('complete') == '1':
o.completed = True
elif request.POST.get('undo_complete') == '1':
o.completed = False
if o.status == IssueStatus.ARCHIVED:
o.status = o.statuses.OPEN
elif request.POST.get('archive') == '1':
# TODO: check if issue can be closed
o.completed = True
o.status = IssueStatus.ARCHIVED
o.save()
return HttpResponse("-")
class IssueSetLengthView(IssueMixin, SingleObjectMixin, View):
required_permission = 'editagenda_community'
def post(self, request, *args, **kwargs):
o = self.get_object()
s = request.POST.get('length', '').strip()
if s:
try:
t = int(s)
if not 0 <= t <= 360:
raise ValueError('Illegal Value')
except ValueError:
return HttpResponseBadRequest("Bad Request")
else:
t = None
o.length_in_minutes = t
o.save()
return HttpResponse(minutes(t) or "--:--")
class IssueDeleteView(AjaxFormView, IssueMixin, DeleteView):
def get_required_permission(self):
o = self.get_object()
if o.is_published:
return 'editclosed_issue'
return 'add_issue' if o.created_by == self.request.user else 'editopen_issue'
def get_success_url(self):
return "" if self.issue.active else "-"
def delete(self, request, *args, **kwargs):
o = self.get_object()
o.active = False
o.save()
o.active_proposals().update(active=False)
return HttpResponse("-")
class AttachmentCreateView(AjaxFormView, IssueMixin, CreateView):
model = models.IssueAttachment
form_class = AddAttachmentForm
required_permission = 'editopen_issue'
reload_on_success = True
@property
def issue(self):
return get_object_or_404(models.Issue, committee=self.committee, pk=self.kwargs['pk'])
def form_valid(self, form):
form.instance.created_by = self.request.user
form.instance.issue = self.issue
return super(AttachmentCreateView, self).form_valid(form)
class AttachmentDeleteView(AjaxFormView, CommitteeMixin, DeleteView):
model = models.IssueAttachment
required_permission = 'editopen_issue'
@property
def issue(self):
return get_object_or_404(models.Issue, pk=self.kwargs['issue_id'])
def delete(self, request, *args, **kwargs):
o = self.get_object()
o.file.delete(save=False)
o.delete()
return HttpResponse("")
class AttachmentDownloadView(CommitteeMixin, SingleObjectMixin, View):
model = models.IssueAttachment
def get_required_permission(self):
o = self.get_object().issue
return 'viewclosed_issue' if o.is_published else 'viewopen_issue'
def get(self, request, *args, **kwargs):
o = self.get_object()
filename = o.file.name.split('/')[-1]
mime_type = mimetypes.guess_type(filename, True)[0] or "text/plain"
response = HttpResponse(o.file, content_type=mime_type)
response['Content-Disposition'] = 'attachment; filename=%s' % filename.encode('utf-8')
return response
class ProposalCreateView(AjaxFormView, ProposalMixin, CreateView):
reload_on_success = True
def get_required_permission(self):
return 'editclosedproposal' if self.issue.status == IssueStatus.ARCHIVED else 'add_proposal'
form_class = CreateProposalForm
def get_context_data(self, **kwargs):
context = super(ProposalCreateView, self).get_context_data(**kwargs)
context['issue'] = self.issue
return context
def form_valid(self, form):
form.instance.created_by = self.request.user
form.instance.issue = self.issue
if self.reload_on_success:
return super(ProposalCreateView, self).form_valid(form)
else:
self.object = form.save()
return render(self.request, 'issues/_proposal.html',
self.get_context_data(proposal=self.object))
def get_success_url(self):
return self.issue.get_absolute_url()
def get_form_kwargs(self):
d = super(ProposalCreateView, self).get_form_kwargs()
d['prefix'] = 'proposal'
d['committee'] = self.committee
d['initial'] = {'issue': self.issue}
return d
class ProposalDetailView(ProposalMixin, DetailView):
def get_required_permission(self):
p = self.get_object()
return 'viewclosed_proposal' if p.decided_at_meeting else 'viewopen_proposal'
def get_required_permission_for_post(self):
p = self.get_object()
return 'acceptclosed_proposal' if p.decided_at_meeting else 'acceptopen_proposal'
def board_votes_dict(self):
total_votes = 0
votes_dict = {'sums': {}, 'total': total_votes, 'per_user': {}}
pro_count = 0
con_count = 0
neut_count = 0
# Board vote permission
board_attending = self.get_object().issue.committee.meeting_participants()
for u in board_attending:
# check u has perm for board vote
vote = ProposalVoteBoard.objects.filter(proposal=self.get_object(), user=u)
if vote.exists():
votes_dict['per_user'][u] = vote[0]
if vote[0].value == 1:
pro_count += 1
total_votes += 1
elif vote[0].value == -1:
con_count += 1
total_votes += 1
elif vote[0].value == 0:
neut_count += 1
else:
votes_dict['per_user'][u] = None
neut_count += 1
votes_dict['sums']['pro_count'] = pro_count
votes_dict['sums']['con_count'] = con_count
votes_dict['sums']['neut_count'] = neut_count
votes_dict['total'] = total_votes
return votes_dict
def _init_board_votes(self, board_attending):
p = self.get_object()
for b in board_attending:
ProposalVoteBoard.objects.create(proposal=p, user=b,
voted_by_chairman=True)
def get_context_data(self, **kwargs):
"""add meeting for the latest straw voting result
add 'previous_res' var if found previous registered results for this meeting
"""
context = super(ProposalDetailView, self).get_context_data(**kwargs)
m_id = self.request.GET.get('m_id', None)
o = self.get_object()
if m_id:
context['meeting_context'] = get_object_or_404(Meeting, id=m_id,
committee=self.committee)
participants = context['meeting_context'].participants.all()
else:
context['meeting_context'] = None
participants = o.issue.committee.upcoming_meeting_participants.all()
try:
group = self.request.user.memberships.get(committee=self.issue.committee).default_group_name
except:
group = ""
board_votes = ProposalVoteBoard.objects.filter(proposal=o)
board_attending = board_voters_on_proposal(o)
is_current = o.issue.is_current
context['res'] = o.get_straw_results()
results = VoteResult.objects.filter(proposal=o) \
.order_by('-meeting__held_at')
if o.issue.is_upcoming and \
self.committee.upcoming_meeting_is_published and \
self.committee.straw_vote_ended:
context['meeting'] = self.committee.draft_meeting()
else:
if results.count():
context['meeting'] = results[0].meeting
else:
context['meeting'] = None
if not board_votes.exists():
self._init_board_votes(board_attending)
show_to_member = group == DefaultGroups.MEMBER and o.decided_at_meeting
show_to_board = (group == DefaultGroups.BOARD or \
group == DefaultGroups.SECRETARY) and \
(is_current or o.decided_at_meeting)
show_to_chairman = group == DefaultGroups.CHAIRMAN and o.decided
show_board_vote_result = o.register_board_votes and \
board_votes.exclude(
value=ProposalVoteValue.NEUTRAL).count() and \
(show_to_member or show_to_board or show_to_chairman)
context['issue_frame'] = self.request.GET.get('s', None)
context['board_attending'] = board_attending
context['user_vote'] = o.board_vote_by_member(self.request.user.id)
context['show_board_vote_result'] = show_board_vote_result
context['chairman_can_vote'] = is_current and not o.decided
context['board_votes'] = self.board_votes_dict()
context['can_board_vote_self'] = is_current and not o.decided and has_committee_perm(self.request.user,
self.committee,
'proposal_board_vote_self')\
and self.request.user in board_attending
rel_proposals = self.object.issue.proposals
context['proposals'] = rel_proposals.object_access_control(
user=self.request.user, committee=self.committee)
return context
def post(self, request, *args, **kwargs):
""" Used to change a proposal status (accept/reject)
or a proposal's property completed/not completed
"""
p = self.get_object()
v = request.POST.get('accepted', None)
if v:
v = int(v)
if v not in [
p.statuses.ACCEPTED,
p.statuses.REJECTED,
p.statuses.IN_DISCUSSION
]:
return HttpResponseBadRequest("Bad value for accepted POST parameter")
p.status = v
p.save()
return redirect(p)
class ProposalEditView(AjaxFormView, ProposalMixin, UpdateView):
form_class = EditProposalForm
reload_on_success = True
def get_required_permission(self):
o = self.get_object()
return 'editclosed_proposal' if o.decided_at_meeting else 'edittask_proposal'
def get_form_kwargs(self):
d = super(ProposalEditView, self).get_form_kwargs()
d['prefix'] = 'proposal'
d['committee'] = self.committee
return d
class ProposalEditTaskView(ProposalMixin, UpdateView):
form_class = EditProposalTaskForm
def get_queryset(self):
return super(ProposalEditTaskView, self).get_queryset().filter(type=ProposalType.TASK)
def get_required_permission(self):
o = self.get_object()
return 'editclosed_proposal' if o.decided_at_meeting else 'editopen_proposal'
class ProposalCompletedTaskView(ProposalMixin, UpdateView):
""" update a task as completed / un-completed
"""
def post(self, request, *args, **kwargs):
if not self._can_complete_task():
return HttpResponseForbidden("403 Unauthorized")
p = self.get_object()
completed = request.POST.get('completed', None)
if completed:
p.task_completed = completed == '1'
p.save()
return redirect(p)
class ProposalDeleteView(AjaxFormView, ProposalMixin, DeleteView):
def get_required_permission(self):
o = self.get_object()
if o.decided_at_meeting:
return 'editclosed_issue'
return 'add_proposal' if o.created_by == self.request.user else 'editopen_proposal'
def get_success_url(self):
return "" if self.issue.active else "-"
def delete(self, request, *args, **kwargs):
o = self.get_object()
o.active = False
o.save()
return HttpResponse("-")
class VoteResultsView(CommitteeMixin, DetailView):
model = models.Proposal
def get(self, request, *args, **kwargs):
meeting = None
meeting_id = request.GET.get('meeting_id', None)
p = self.get_object()
if meeting_id:
meeting = get_object_or_404(Meeting, id=int(meeting_id))
res = p.get_straw_results(meeting.id)
else:
meeting = self.committee.draft_meeting()
res = p.get_straw_results()
panel = render_to_string('issues/_proposal_vote_results.html',
RequestContext(request, {
'meeting': meeting,
'res': res,
'proposal': p,
}))
return HttpResponse(panel)
class ProposalVoteMixin(CommitteeMixin):
VOTE_OK = 0
VOTE_VER_ERR = 1
VOTE_OVERRIDE_ERR = 2
def _do_vote(self, vote_class, proposal, user_id, value, is_board, voter_group):
if is_board:
# verify
if not voter_group or voter_group == DefaultGroups.MEMBER \
or proposal.decided:
return (None, self.VOTE_VER_ERR)
by_chairman = voter_group == DefaultGroups.CHAIRMAN
vote, created = vote_class.objects.get_or_create(proposal_id=proposal.id,
user_id=user_id)
if not created and by_chairman and not vote.voted_by_chairman:
# don't allow chairman vote override a board member existing vote!
return (vote, self.VOTE_OVERRIDE_ERR)
vote.value = value
if is_board:
vote.voted_by_chairman = by_chairman
vote.save()
return (vote, self.VOTE_OK)
def _vote_values_map(self, key):
vote_map = {
'pro': 1,
'con': -1,
'neut': 0,
'reset': -2,
}
if type(key) != int:
try:
return vote_map[key]
except KeyError:
return None
else:
for k, val in vote_map.items():
if key == val:
return k
return None
class ProposalVoteView(ProposalVoteMixin, DetailView):
required_permission_for_post = 'vote'
model = models.Proposal
def post(self, request, *args, **kwargs):
is_board = request.POST.get('board', False)
user_id = request.POST.get('user', request.user.id)
voter_id = request.user.id
voter_group = 'board' if has_committee_perm(request.user, self.committee, 'proposal_board_vote') else ''
# voter_group = request.user.get_default_group(self.committee.community) \
# if request.user.is_authenticated() \
# else ''
val = request.POST['val']
if is_board:
# vote for board member by chairman or board member
vote_class = ProposalVoteBoard
else:
# straw vote by member
vote_class = ProposalVote
proposal = self.get_object()
pid = proposal.id
vote_panel_tpl = 'issues/_vote_panel.html' if val == 'reset' \
else 'issues/_vote_reset_panel.html'
res_panel_tpl = 'issues/_board_vote_res.html' if is_board \
else 'issues/_vote_reset_panel.html'
vote_response = {
'result': 'ok',
'html': render_to_string(res_panel_tpl,
{
'proposal': proposal,
'committee': self.committee,
'vote_status': val,
}),
}
value = ''
if val == 'reset':
vote = get_object_or_404(vote_class,
proposal_id=pid, user_id=user_id)
vote.delete()
related_arguments = ProposalVoteArgumentRanking.objects.filter(user=request.user,
argument__proposal_vote__proposal=proposal)
if related_arguments.count():
related_arguments.delete()
vote_response['html'] = render_to_string(vote_panel_tpl,
{
'proposal': proposal,
'committee': self.committee
})
return json_response(vote_response)
else:
value = self._vote_values_map(val)
if value == None:
return HttpResponseBadRequest('vote value not valid')
vote, valid = self._do_vote(vote_class, proposal, user_id, value,
is_board, voter_group)
if valid == ProposalVoteMixin.VOTE_OK:
vote_response['html'] = render_to_string(res_panel_tpl,
{
'proposal': proposal,
'committee': self.committee,
'vote_status': val,
'user': self.request.user
})
if is_board and voter_group == DefaultGroups.CHAIRMAN:
vote_response['sum'] = render_to_string('issues/_member_vote_sum.html',
{
'proposal': proposal,
'committee': self.committee,
'board_attending': board_voters_on_proposal(proposal),
})
else:
vote_response['result'] = 'err'
if valid == ProposalVoteMixin.VOTE_OVERRIDE_ERR:
vote_response['override_fail'] = [{'uid': user_id,
'val': self._vote_values_map(vote.value),
}]
return json_response(vote_response)
def get(self, request, *args, **kwargs):
voter_id = request.user.id
if not request.user.is_authenticated():
return redirect_to_login(request.build_absolute_uri())
is_board = request.GET.get('board', False)
voter_group = DefaultGroups.MEMBER
val = request.GET['val']
vote_class = ProposalVote
proposal = self.get_object()
value = self._vote_values_map(val)
if value == None:
return redirect(proposal)
vote, valid = self._do_vote(vote_class, proposal, voter_id, value,
is_board, voter_group)
return redirect(proposal)
class MultiProposalVoteView(ProposalVoteMixin, DetailView):
required_permission_for_post = 'chairman_vote'
model = models.Proposal
def post(self, request, *args, **kwargs):
voted_ids = json.loads(request.POST['users'])
proposal = self.get_object()
pid = proposal.id
voter_group = request.user.get_default_group(self.committee.community) \
if request.user.is_authenticated() \
else ''
val = request.POST['val']
value = self._vote_values_map(val)
if value == None:
return HttpResponseBadRequest('vote value not valid')
vote_failed = []
for user_id in voted_ids:
vote, valid = self._do_vote(ProposalVoteBoard, proposal,
user_id, value, True, voter_group)
if valid == ProposalVoteMixin.VOTE_OVERRIDE_ERR:
vote_failed.append({'uid': user_id, 'val': self._vote_values_map(vote.value), })
return json_response({
'result': 'ok',
'html': render_to_string('issues/_vote_reset_panel.html',
{
'proposal': proposal,
'committee': self.committee,
}),
'override_fail': vote_failed,
'sum': render_to_string('issues/_member_vote_sum.html',
{
'proposal': proposal,
'committee': self.committee,
'board_attending': board_voters_on_proposal(proposal),
})
})
class RankingVoteMixin(ProposalVoteMixin):
VOTE_OK = 0
VOTE_VER_ERR = 1
def _do_vote(self, vote_class, argument, user_id, value):
try:
vote = vote_class.objects.get(argument_id=argument.id,
user_id=user_id)
if argument.proposal_vote.value != ProposalVote.objects.get(user__id=user_id,
proposal=argument.proposal_vote.proposal).value:
return vote, self.VOTE_VER_ERR
if vote.value == value:
vote.delete()
else:
vote.value = value
vote.save()
except vote_class.DoesNotExist:
vote = vote_class.objects.create(argument_id=argument.id,
user_id=user_id,
value=value)
except vote_class.MultipleObjectsReturned:
# Should not happen
raise
except ProposalVote.DoesNotExist:
# Should not happen
raise
return vote, self.VOTE_OK
def _vote_values_map(self, key):
vote_map = {
'up': 1,
'down': -1,
}
if type(key) != int:
try:
return vote_map[key]
except KeyError:
return None
else:
for k, val in vote_map.items():
if key == val:
return k
return None
class ArgumentRankingVoteView(RankingVoteMixin, DetailView):
required_permission_for_post = 'vote'
model = models.ProposalVoteArgument
def post(self, request, *args, **kwargs):
user_id = request.POST.get('user', request.user.id)
val = request.POST['val']
vote_class = ProposalVoteArgumentRanking
argument = self.get_object()
vote_response = {
'result': 'ok',
}
value = self._vote_values_map(val)
if not value:
return HttpResponseBadRequest('vote value not valid')
vote, valid = self._do_vote(vote_class, argument, user_id, value)
if valid != RankingVoteMixin.VOTE_OK:
vote_response['result'] = 'err'
return HttpResponse(argument.argument_score)
def up_down_vote(request, committee_id, arg_id):
if request.method != "POST":
raise Exception("Must be POST")
argument = models.ProposalVoteArgument.objects.get(pk=arg_id)
val = request.POST['val']
value = 1 if val == 'up' else -1
try:
vote = models.ProposalVoteArgumentRanking.objects.get(argument=argument, user=request.user)
if vote.value == value:
vote.delete()
else:
vote.value = value
vote.save()
except ProposalVoteArgumentRanking.DoesNotExist:
obj = models.ProposalVoteArgumentRanking(argument=argument, user=request.user, value=value)
obj.save()
up_votes = ProposalVoteArgumentRanking.objects.filter(argument=argument, value=1).count()
down_votes = ProposalVoteArgumentRanking.objects.filter(argument=argument, value=-1).count()
total_votes = up_votes - down_votes
return HttpResponse(total_votes)
class ProposalVoteArgumentCreateView(CreateView):
model = models.ProposalVoteArgument
form_class = CreateProposalVoteArgumentForm
fields = ['argument', 'proposal_vote', 'created_by']
template_name = 'issues/proposal_vote_argument_form.html'
def get_success_url(self):
return ""
# def form_valid(self, form):
# form.instance.proposal_vote = ProposalVote.objects.get(pk=self.kwargs['vote_id'])
# form.instance.created_by = self.request.user
# return super(ProposalVoteArgumentCreateView, self).form_valid(form)
#
# def form_invalid(self, form):
# return HttpResponse("000")
def post(self, request, *args, **kwargs):
form = forms.CreateProposalVoteArgumentForm(request.POST)
if not form.is_valid():
return HttpResponseBadRequest()
proposal_vote = ProposalVote.objects.get(pk=self.kwargs['vote_id'])
a = ProposalVoteArgument.objects.create(argument=form.cleaned_data['argument'],
created_by=request.user, proposal_vote=proposal_vote)
self.object = a
context = self.get_context_data(arg=a, proposal=proposal_vote.proposal)
if proposal_vote.value == 1:
return render(request, 'issues/_pro_argument.html', context)
else:
return render(request, 'issues/_con_argument.html', context)
class ProposalMoreArgumentsView(DetailView):
model = models.Proposal
template_name = 'issues/_more_arguments_box.html'
def get_context_data(self, **kwargs):
d = super(ProposalMoreArgumentsView, self).get_context_data(**kwargs)
d['proposal'] = self.get_object()
d['user'] = self.request.user
return d
class ProposalArgumentsView(DetailView):
model = models.Proposal
template_name = 'issues/_vote_arguments.html'
context_object_name = 'proposal'
def get_context_data(self, **kwargs):
context = super(ProposalArgumentsView, self).get_context_data(**kwargs)
context['proposal'] = self.get_object()
context['user'] = self.request.user
return context
class ProposalVoteArgumentUpdateView(UpdateView):
model = models.ProposalVoteArgument
fields = ['argument', ]
def post(self, request, *args, **kwargs):
a = self.get_object()
if request.POST.get('argument', None):
a.argument = request.POST.get('argument')
a.save()
return HttpResponse(a.argument)
else:
return HttpResponse("")
class ProposalVoteArgumentDeleteView(DeleteView):
model = models.ProposalVoteArgument
success_url = ""
def post(self, request, *args, **kwargs):
o = self.get_object()
arg_id = o.id
o.delete()
return HttpResponse(arg_id)
def get_argument_value(request, committee_id, arg_id):
""" Return the value of the argument for editing """
arg_value = models.ProposalVoteArgument.objects.get(pk=arg_id)
return HttpResponse(arg_value.argument)
class ChangeBoardVoteStatusView(ProposalMixin, UpdateView):
required_permission_for_post = 'chairman_vote'
model = models.Proposal
def post(self, request, *args, **kwargs):
p = self.get_object()
if request.POST.get('val', None):
p.register_board_votes = request.POST.get('val') == '1'
p.save()
return json_response({'result': 'ok'})
else:
return json_response({'result': 'err'})
class AssignmentsView(ProposalMixin, ListView):
required_permission = 'viewopen_issue'
template_name = 'issues/assignment_list.html'
paginate_by = 75
def __init__(self, **kwargs):
super(AssignmentsView, self).__init__(**kwargs)
self.status = ''
def _get_order(self):
order_by = self.request.GET.get('ord', 'date')
if order_by == 'date':
order_by = '-due_by'
return order_by
def _add_status_qs(self, sqs):
self.status = self.request.GET.get('status', '')
if self.status:
if self.status == 'completed':
sqs = sqs.filter(task_completed=True)
else:
sqs = sqs.filter(task_completed=False)
if self.status == 'opened':
sqs = sqs.exclude(due_by__lt=date.today())
elif self.status == 'late':
sqs = sqs.filter(due_by__lt=date.today())
return sqs
def get_queryset(self):
term = self.request.GET.get('q', '').strip()
sqs = ConfidentialSearchQuerySet().models(Proposal).object_access_control(
user=self.request.user, committee=self.committee).filter(
active=True, committee=self.committee.id,
status=Proposal.statuses.ACCEPTED,
type=ProposalType.TASK).order_by(self._get_order())
sqs = self._add_status_qs(sqs)
if term:
sqs = sqs.filter(content=AutoQuery(term)) \
.filter_or(assignee__contains=term)
return sqs.load_all()
def get_context_data(self, **kwargs):
d = super(AssignmentsView, self).get_context_data(**kwargs)
search_query = self.request.GET.get('q', '').strip()
d['late'] = [p.id for p in list(self.get_queryset()) \
if not p.object.task_completed and p.due_by \
and p.due_by.date() < date.today()]
d['query'] = search_query
d['ord'] = self._get_order()
d['status'] = self.status
d['filter_as_link'] = d['is_paginated'] or d['status']
d['extra_arg'] = '&ord=' + d['ord'] + '&q=' + d['query'] + '&status=' + self.status
return d
class RulesMixin(CommitteeMixin):
def _get_rule_queryset(self):
qs = Proposal.objects.object_access_control(user=self.request.user,
committee=self.committee).filter(
active=True, issue__committee=self.committee,
status=Proposal.statuses.ACCEPTED,
type=ProposalType.RULE)
return qs
class ProceduresView(RulesMixin, ProposalMixin, ListView):
required_permission = 'viewopen_issue'
template_name = 'issues/procedure_list.html'
context_object_name = 'procedure_list'
paginate_by = 75
def __init__(self, **kwargs):
self.order_by = 'date'
super(ProceduresView, self).__init__(**kwargs)
def get_queryset(self):
term = self.request.GET.get('q', '').strip()
if not term:
# try search by tag
term = self.request.GET.get('t', '').strip()
self.order_by = self.request.GET.get('ord', 'date')
ord_term = '-decided_at' if self.order_by == 'date' else 'title'
sqs = ConfidentialSearchQuerySet().object_access_control(
user=self.request.user, committee=self.committee).filter(
active=True, committee=self.committee.id,
status=Proposal.statuses.ACCEPTED,
type=ProposalType.RULE).order_by(ord_term)
if term:
sqs = sqs.filter(content=AutoQuery(term))
return sqs.load_all()
def get_context_data(self, **kwargs):
def _sort_by_popularity(a, b):
return cmp(a[1], b[1])
d = super(ProceduresView, self).get_context_data(**kwargs)
alltags = {}
for p in self._get_rule_queryset():
for t in p.tags.names():
n = alltags.setdefault(t, 0)
alltags[t] = n + 1
sorted_tags = sorted(alltags.items(), _sort_by_popularity, reverse=True)
search_query = self.request.GET.get('q', '').strip()
tag_query = self.request.GET.get('t', '').strip()
d['sorted_tags'] = sorted_tags
d['query'] = search_query or tag_query
d['extra_arg'] = '&ord=' + self.order_by + '&q=' + d['query']
d['ord'] = self.order_by
d['active_tag'] = tag_query
d['tags_as_links'] = (not search_query and d['is_paginated']) or len(d['object_list']) == 0
return d
class AutoCompleteTagView(CommitteeMixin, View):
required_permission = 'editopen_issue'
def get(self, request, *args, **kwargs):
tag = request.GET.get('tag', '')
tag = tag.split(',')[-1].strip()
print 'T: ', tag
if not tag:
return HttpResponse(json.dumps([]))
json_tags = []
tags = set()
proposals = Proposal.objects.filter(
active=True, issue__committee=self.committee,
type=ProposalType.RULE)
for p in proposals:
tags.update(t for t in p.tags.names() if t.startswith(tag))
for t in tags:
json_tags.append({'tokens': [t, ], 'value': t})
# context = self.get_context_data(object_list=proposals)
return HttpResponse(json.dumps(json_tags), {'content_type': 'application/json'})
| bsd-3-clause | 5,895,209,078,495,821,000 | 37.366667 | 121 | 0.57239 | false |
ctogle/dilapidator | test/geometry/vec3_tests.py | 1 | 16823 | import dilap.geometry.tools as dpr
from dilap.geometry.vec3 import vec3
from dilap.geometry.quat import quat
import dilap.core.plotting as dtl
import matplotlib.pyplot as plt
import unittest,numpy,math,random
#python3 -m unittest discover -v ./ "*tests.py"
class test_vec3(unittest.TestCase):
# given a vec3, an op, and a res, verify that the op
# does not return an independent object, and that the result is correct
# NOTE: this is for methods which return vec3 objects
def same(self,op,one,res,*args,**kwargs):
self.assertTrue(one is one)
opres = one.__getattribute__(op)(*args,**kwargs)
self.assertTrue(opres is one)
self.assertEqual(one,res)
# given a vec3, an op, and a res, verify that the op
# does return an independent object, and that the result is correct
# NOTE: this is for methods which return vec3 objects
def diff(self,op,one,res,*args,**kwargs):
self.assertTrue(one is one)
opres = one.__getattribute__(op)(*args,**kwargs)
self.assertFalse(opres is one)
self.assertEqual(opres,res)
# given a vec3, an op, and a res,
# verify that the op does return the correct result
# verify the op does not modify the input vector
def comp(self,op,one,res,*args,**kwargs):
cp = one.cp()
opres = one.__getattribute__(op)(*args,**kwargs)
self.assertTrue(dpr.isnear(opres,res))
self.assertEqual(one,cp)
def setUp(self):
self.origin = vec3(0,0,0)
self.one = vec3(1,1,1)
self.x = vec3(1,0,0)
self.y = vec3(0,1,0)
self.z = vec3(0,0,1)
self.basis = [self.x,self.y,self.z]
rd = random.random
self.r1 = vec3(rd()*10,rd()*10,rd()*10)
self.r2 = vec3(rd()*10,rd()*10,rd()*10)
self.r3 = vec3(rd()*10,rd()*10,rd()*10)
self.r4 = vec3(rd()*10,rd()*10,rd()*10)
self.rds = [self.r1,self.r2,self.r3,self.r4]
self.each = [self.origin,self.one]+self.basis+self.rds
def test_cp(self):
for e in self.each:self.diff('cp',e,e)
def test_cpxy(self):
for e in self.each:self.diff('cpxy',e,vec3(e.x,e.y,0))
#def test_cpr(self):
#def test_cpf(self):
def test_d(self):
for e in self.each:self.comp('d',e,e.mag(),self.origin)
for e in self.each:self.comp('d',e,0,e)
self.comp('d',self.x,math.sqrt(2),self.y)
self.comp('d',self.y,math.sqrt(2),self.z)
self.comp('d',self.z,math.sqrt(2),self.x)
def test_dxy(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(1,1,2),vec3(1,2,1),vec3(1,2,4)
self.assertTrue(dpr.isnear(v1.dxy(v1),0))
self.assertTrue(dpr.isnear(v1.dxy(v2),0))
self.assertTrue(dpr.isnear(v1.dxy(v3),1))
self.assertTrue(dpr.isnear(v1.dxy(v4),1))
self.assertTrue(dpr.isnear(v2.dxy(v3),1))
self.assertTrue(dpr.isnear(v2.dxy(v4),1))
self.assertTrue(dpr.isnear(v3.dxy(v4),0))
def test_ang(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(-1,1,0),vec3(-1,0,0),vec3(1,1,1)
self.assertTrue(dpr.isnear(v1.ang(v1),0))
self.assertTrue(dpr.isnear(v1.ang(v2),dpr.PI2))
self.assertTrue(dpr.isnear(v2.ang(v1),dpr.PI2))
self.assertTrue(dpr.isnear(v1.ang(v3),3*dpr.PI4))
self.assertTrue(dpr.isnear(v3.ang(v1),3*dpr.PI4))
self.assertTrue(dpr.isnear(v1.ang(v4),numpy.arctan(1.0/math.sqrt(2))))
self.assertTrue(dpr.isnear(v4.ang(v1),numpy.arctan(1.0/math.sqrt(2))))
v1.ang(vec3(0,0,0))
def test_sang(self):
p1,p2,p3,p4 = vec3(1,1,0),vec3(0,1,0),vec3(0,-1,0),vec3(0,0,1)
pn = vec3(0,0,1)
self.assertEqual(dpr.isnear(p1.sang(p2,pn), dpr.PI4),1)
self.assertEqual(dpr.isnear(p2.sang(p1,pn), dpr.PI4),0)
self.assertEqual(dpr.isnear(p2.sang(p1,pn),-dpr.PI4),1)
self.assertEqual(dpr.isnear(p2.sang(p3,pn), dpr.PI ),1)
self.assertEqual(dpr.isnear(p3.sang(p1,pn),dpr.threePI4),1)
def test_angxy(self):
v1,v2,v3,v4 = vec3(1,1,2),vec3(-1,1,-1),vec3(-1,0,0),vec3(1,1,1)
self.assertTrue(dpr.isnear(v1.angxy(v1),0))
self.assertTrue(dpr.isnear(v1.angxy(v2),dpr.PI2))
self.assertTrue(dpr.isnear(v2.angxy(v1),dpr.PI2))
self.assertTrue(dpr.isnear(v1.angxy(v3),3*dpr.PI4))
self.assertTrue(dpr.isnear(v3.angxy(v1),3*dpr.PI4))
self.assertTrue(dpr.isnear(v1.angxy(v4),0))
self.assertTrue(dpr.isnear(v4.angxy(v1),0))
'''#
def test_sang_xy(self):
p1,p2 = vec3(1,1,1),vec3(0,1,0)
meth,nr = gtl.sang_xy,gtl.isnear
self.assertEqual(nr(meth(p1,p2), gtl.PI4),1)
self.assertEqual(nr(meth(p2,p1), gtl.PI4),0)
self.assertEqual(nr(meth(p2,p1),-gtl.PI4),1)
def test_xang_xy(self):
p1,p2,p3 = vec3(1, 1,1),vec3(0, 1,0),vec3(0,-1,0)
meth,nr = gtl.xang_xy,gtl.isnear
self.assertEqual(nr(meth(p1),gtl.PI4),1)
self.assertEqual(nr(meth(p2),gtl.PI4),0)
self.assertEqual(nr(meth(p2),gtl.PI2),1)
self.assertEqual(nr(meth(p3),gtl.threePI2),1)
'''#
def test_dot(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(-1,1,0),vec3(-1,0,0),vec3(1,1,1)
self.assertEqual(dpr.isnear(v1.dot(v1), 2),1)
self.assertEqual(dpr.isnear(v1.dot(v2), 0),1)
self.assertEqual(dpr.isnear(v1.dot(v3),-1),1)
self.assertEqual(dpr.isnear(v1.dot(v4), 2),1)
self.assertEqual(dpr.isnear(v2.dot(v2), 2),1)
self.assertEqual(dpr.isnear(v2.dot(v3), 1),1)
self.assertEqual(dpr.isnear(v2.dot(v4), 0),1)
self.assertEqual(dpr.isnear(v3.dot(v3), 1),1)
self.assertEqual(dpr.isnear(v3.dot(v4),-1),1)
self.assertEqual(dpr.isnear(v4.dot(v4), 3),1)
def test_crs(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(-1,1,0),vec3(-1,0,0),vec3(1,1,1)
self.assertEqual(v1.crs(v1),vec3(0,0,0))
self.assertEqual(v2.crs(v2),vec3(0,0,0))
self.assertEqual(v3.crs(v3),vec3(0,0,0))
self.assertEqual(v4.crs(v4),vec3(0,0,0))
self.assertEqual(v1.crs(v2),vec3(0,0,2))
self.assertEqual(v1.crs(v3),vec3(0,0,1))
self.assertEqual(v1.crs(v4),vec3(1,-1,0))
self.assertEqual(v2.crs(v3),vec3(0,0,1))
self.assertEqual(v2.crs(v4),vec3(1,1,-2))
self.assertEqual(v3.crs(v4),vec3(0,1,-1))
self.assertEqual(v1.crs(v2),v2.crs(v1).flp())
self.assertEqual(v1.crs(v3),v3.crs(v1).flp())
self.assertEqual(v1.crs(v4),v4.crs(v1).flp())
def test_prj(self):
p1,pn1 = vec3(-1,1,0),vec3(-1,0,0)
p2,pn2 = vec3(3,-5,2),vec3(0,1,0)
v1,v2,v3 = vec3(1,1,0),vec3(2,-10,5),vec3(0,1,-1)
self.assertEqual(v1.cp().prj(p1,pn1),vec3(-1,1,0))
self.assertEqual(v2.cp().prj(p1,pn1),vec3(-1,-10,5))
self.assertEqual(v3.cp().prj(p1,pn1),vec3(-1,1,-1))
self.assertEqual(v1.cp().prj(p2,pn2),vec3(1,-5,0))
self.assertEqual(v2.cp().prj(p2,pn2),vec3(2,-5,5))
self.assertEqual(v3.cp().prj(p2,pn2),vec3(0,-5,-1))
self.assertTrue(v1.prj(p1,pn1) is v1)
#def test_prjps(self):
#def test_baryxy(self):
def test_inneighborhood(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(1,2,0),vec3(1,2,1),vec3(1,1,1)
self.assertEqual(v1.inneighborhood(v2,1.00),0)
self.assertEqual(v1.inneighborhood(v2,1.01),1)
self.assertEqual(v1.inneighborhood(v3,1.00),0)
self.assertEqual(v1.inneighborhood(v3,1.01),0)
self.assertEqual(v1.inneighborhood(v4,1.00),0)
self.assertEqual(v1.inneighborhood(v4,1.01),1)
def test_isnear(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(1,1,0.1),vec3(1,1,1),vec3(1.000001,1,1)
self.assertEqual(v1.isnear(v1),1)
self.assertEqual(v3.isnear(v3),1)
self.assertEqual(v1.isnear(v2),0)
self.assertEqual(v2.isnear(v1),0)
self.assertEqual(v1.isnear(v3),0)
self.assertEqual(v2.isnear(v3),0)
self.assertEqual(v2.isnear(v4),0)
self.assertEqual(v3.isnear(v4),1)
def test_isnearxy(self):
v1,v2,v3,v4 = vec3(1,1,0),vec3(1.1,1,0.1),vec3(1,1,1),vec3(1.000001,1,1)
self.assertEqual(v1.isnearxy(v1),1)
self.assertEqual(v3.isnearxy(v3),1)
self.assertEqual(v1.isnearxy(v2),0)
self.assertEqual(v2.isnearxy(v1),0)
self.assertEqual(v1.isnearxy(v3),1)
self.assertEqual(v2.isnearxy(v3),0)
self.assertEqual(v2.isnearxy(v4),0)
self.assertEqual(v3.isnearxy(v4),1)
def test_inbxy(self):
py = vec3(1,1,0).sq(2,2)
self.assertFalse(vec3(-1,-1,0).inbxy(py))
self.assertFalse(vec3(0,0,0).inbxy(py))
self.assertFalse(vec3(1,0,0).inbxy(py))
self.assertTrue(vec3(1,1,0).inbxy(py))
def test_intrixy(self):
a = vec3(-93.6169662475586, 46.23309326171875, 0.0)
b = vec3(28.083663940429688, 48.28422546386719, 0.0)
c = vec3(25.696874618530273, 48.28422546386719, 0.0)
p = vec3(-93.34214782714844, 43.73178482055664, 0.0)
i = p.intrixy(a,b,c)
self.assertFalse(i)
ax = dtl.plot_axes_xy(100)
ax = dtl.plot_points_xy((a,b,c,p),ax)
ax = dtl.plot_polygon_xy((a,b,c),ax,lw = 2,col = 'g')
plt.show()
#def test_onsxy(self):
'''#
def test_onseg_xy(self):
p1,p2,p3 = vec3(1,1,0),vec3(0,1,0),vec3(2,2,0)
s1,s2 = vec3(0,0,0),vec3(2,2,0)
self.assertEqual(gtl.onseg_xy(p1,s1,s2),1)
self.assertEqual(gtl.onseg_xy(p2,s1,s2),0)
self.assertEqual(gtl.onseg_xy(p3,s1,s2),1)
def test_inseg_xy(self):
p1,p2,p3 = vec3(1,1,0),vec3(0,1,0),vec3(2,2,0)
s1,s2 = vec3(0,0,0),vec3(2,2,0)
self.assertEqual(gtl.inseg_xy(p1,s1,s2),1)
self.assertEqual(gtl.inseg_xy(p2,s1,s2),0)
self.assertEqual(gtl.inseg_xy(p3,s1,s2),0)
'''#
def test_onbxy(self):
py = vec3(1,1,0).sq(2,2)
self.assertFalse(vec3(-1,-1,0).onbxy(py))
self.assertTrue(vec3(0,0,0).onbxy(py))
self.assertTrue(vec3(1,0,0).onbxy(py))
self.assertFalse(vec3(1,1,0).onbxy(py))
self.assertTrue(vec3(2,0,0).onbxy(py))
#def test_onpxy(self):
def test_mag2(self):
v1,v2,v3 = vec3(1,0,0),vec3(1,1,1),vec3(2,5,11)
self.assertEqual(dpr.isnear(v1.mag2(),1),1)
self.assertEqual(dpr.isnear(v2.mag2(),3),1)
self.assertEqual(dpr.isnear(v3.mag2(),150),1)
def test_mag(self):
v1,v2,v3 = vec3(1,0,0),vec3(1,1,1),vec3(2,5,11)
self.assertEqual(dpr.isnear(v1.mag(),1),1)
self.assertEqual(dpr.isnear(v2.mag(),math.sqrt(3)),1)
self.assertEqual(dpr.isnear(v3.mag(),math.sqrt(150)),1)
def test_nrm(self):
v1,v2,v3 = vec3(1,0,0),vec3(1,2,5),vec3(10,20,50)
self.assertTrue(v1.nrm() == v1)
self.assertTrue(v1.nrm() is v1)
self.assertTrue(v2.nrm() == v3.nrm())
self.assertFalse(v2.nrm() is v3.nrm())
self.assertFalse(v2.nrm() == v1.nrm())
def test_trn(self):
v1,v2 = vec3(-1,2,5),vec3(-12,24,60)
self.assertEqual(v1.cp().trn(v2),vec3(-13,26,65))
self.assertEqual(v2.cp().trn(v1),vec3(-13,26,65))
self.assertTrue(v1.trn(v2) is v1)
def test_xtrn(self):
v1 = vec3(-1,2,5)
self.assertEqual(v1.cp().xtrn(2),vec3(1,2,5))
self.assertEqual(v1.xtrn(2),vec3(1,2,5))
self.assertEqual(v1.xtrn(-5),vec3(-4,2,5))
self.assertTrue(v1.xtrn(2) is v1)
def test_ytrn(self):
v1 = vec3(-1,2,5)
self.assertEqual(v1.cp().ytrn(2),vec3(-1,4,5))
self.assertEqual(v1.ytrn(2),vec3(-1,4,5))
self.assertEqual(v1.ytrn(-5),vec3(-1,-1,5))
self.assertTrue(v1.ytrn(2) is v1)
def test_ztrn(self):
v1 = vec3(-1,2,5)
self.assertEqual(v1.cp().ztrn(2),vec3(-1,2,7))
self.assertEqual(v1.ztrn(2),vec3(-1,2,7))
self.assertEqual(v1.ztrn(-5),vec3(-1,2,2))
self.assertTrue(v1.ztrn(2) is v1)
def test_scl(self):
v1,v2,v3 = vec3(1,1,0),vec3(2,-10,5),vec3(0,1,-1)
self.assertEqual(v1*v1,vec3(1,1,0))
self.assertEqual(v2*v2,vec3(4,100,25))
self.assertEqual(v3*v3,vec3(0,1,1))
self.assertEqual(v1*v2,vec3(2,-10,0))
self.assertEqual(v1*v3,vec3(0,1,0))
self.assertEqual(v2*v1,vec3(2,-10,0))
self.assertEqual(v2*v3,vec3(0,-10,-5))
self.assertEqual(v3*v1,vec3(0,1,0))
self.assertEqual(v3*v2,vec3(0,-10,-5))
self.assertTrue(v1.scl(v2) is v1)
self.assertEqual(v1,vec3(2,-10,0))
def test_uscl(self):
v1,v2 = vec3(-1,2,5),vec3(-12,24,60)
self.assertTrue(v1.uscl(12) == v2)
self.assertTrue(v1.uscl(12) is v1)
self.assertFalse(v1.uscl(12) is v2)
self.assertTrue(v1.uscl(12) == v1)
def test_xscl(self):
self.same('xscl',self.one,vec3(4,1,1),4)
self.same('xscl',self.origin,vec3(0,0,0),4)
self.same('xscl',self.z,vec3(0,0,1),4)
def test_yscl(self):
self.same('yscl',self.one,vec3(1,4,1),4)
self.same('yscl',self.origin,vec3(0,0,0),4)
self.same('yscl',self.z,vec3(0,0,1),4)
def test_zscl(self):
self.same('zscl',self.one,vec3(1,1,4),4)
self.same('zscl',self.origin,vec3(0,0,0),4)
self.same('zscl',self.z,vec3(0,0,4),4)
def test_rot(self):
v1,v2 = vec3(0,2,0),vec3(-2,0,0)
q1 = quat(0,0,0,0).av(dpr.PI2,vec3(0,0,1))
q2 = quat(0,0,0,0).av(0,vec3(0,0,1))
self.assertEqual(v1.rot(q1),v2)
self.assertEqual(v1.cp().rot(q2),v1)
#def test_fulc(self):
#def test_cowxy(self):
def test_xrot(self):
self.same('xrot',self.origin,vec3(0,0,0),dpr.PI2)
self.same('xrot',self.one,vec3(1,-1,1),dpr.PI2)
self.same('xrot',self.x,vec3(1,0,0),dpr.PI2)
self.same('xrot',self.y,vec3(0,0,1),dpr.PI2)
self.same('xrot',self.z,vec3(0,-1,0),dpr.PI2)
def test_yrot(self):
self.same('yrot',self.origin,vec3(0,0,0),dpr.PI2)
self.same('yrot',self.one,vec3(1,1,-1),dpr.PI2)
self.same('yrot',self.x,vec3(0,0,-1),dpr.PI2)
self.same('yrot',self.y,vec3(0,1,0),dpr.PI2)
self.same('yrot',self.z,vec3(1,0,0),dpr.PI2)
def test_zrot(self):
self.same('zrot',self.origin,vec3(0,0,0),dpr.PI2)
self.same('zrot',self.one,vec3(-1,1,1),dpr.PI2)
self.same('zrot',self.x,vec3(0,1,0),dpr.PI2)
self.same('zrot',self.y,vec3(-1,0,0),dpr.PI2)
self.same('zrot',self.z,vec3(0,0,1),dpr.PI2)
def test_flp(self):
v1,v2 = vec3(-1,-2,-5),vec3(1,2,5)
self.assertTrue(v1.flp() == v1)
self.assertFalse(v1.cp() == v1.flp())
self.assertTrue(v1.flp() is v1)
self.assertFalse(v1.flp() is v2)
self.assertTrue(v1.flp() == v2)
def test_tov(self):
v1,v2 = vec3(1,-2,1),vec3(1,2,5)
self.assertEqual(v1.tov(v1),vec3(0,0,0))
self.assertEqual(v1.tov(v2),vec3(0, 4, 4))
self.assertEqual(v2.tov(v1),vec3(0,-4,-4))
self.assertEqual(v2.tov(v2),vec3(0,0,0))
def test_tovxy(self):
v1,v2 = vec3(1,-2,1),vec3(1,2,5)
self.assertEqual(v1.tovxy(v1),vec3(0,0,0))
self.assertEqual(v1.tovxy(v2),vec3(0, 4,0))
self.assertEqual(v2.tovxy(v1),vec3(0,-4,0))
self.assertEqual(v2.tovxy(v2),vec3(0,0,0))
def test_mid(self):
v1,v2 = vec3(0,2,0),vec3(-1,0,1)
v3,v4 = vec3(-0.5,1,0.5),vec3(-0.75,0.5,0.75)
self.assertEqual(v1.mid(v2),v3)
self.assertEqual(v2.mid(v3),v4)
def test_lerp(self):
v1,v2 = vec3(0,2,0),vec3(-1,0,1)
v3,v4 = vec3(-0.75,0.5,0.75),vec3(0,2,0)
self.assertEqual(v1.lerp(v2,0.75),v3)
self.assertFalse(v1.lerp(v2,0) is v1)
self.assertEqual(v1.lerp(v2,0),v1)
self.assertFalse(v1.lerp(v2,1) is v2)
self.assertEqual(v1.lerp(v2,1),v2)
def test_pline(self):
pline = self.origin.pline(self.one,2)
d1 = self.origin.d(pline[0])
d2 = pline[0].d(pline[1])
d3 = pline[1].d(self.one)
self.assertEqual(len(pline),2)
self.assertTrue(dpr.isnear(d1,d2))
self.assertTrue(dpr.isnear(d2,d3))
self.assertTrue(self.origin.mid(self.one),pline[0].mid(pline[1]))
def test_spline(self):
e = vec3(10,10,1)
t1,t2 = vec3(1,0,0),vec3(0,-1,0)
pline = self.origin.spline(e,t1,t2,5)
ax = dtl.plot_axes(10)
ax = dtl.plot_edges(pline,ax,lw = 3,col = 'b')
plt.show()
def test_pring(self):
p,r,n = vec3(0,0,0),4,8
ps = p.pring(r,n)
pm = ps[0].mid(ps[1])
alpha = numpy.pi*(2.0/n)
self.assertTrue(len(ps) == n)
self.assertTrue(p.d(ps[0].mid(ps[1])) == r)
self.assertTrue(dpr.isnear(ps[0].d(ps[1]),2*r*numpy.tan(alpha/2.0)))
#def test_sq(self):
#def test_com(self):
if __name__ == '__main__':
unittest.main()
| mit | -3,662,958,737,619,010,600 | 36.88964 | 80 | 0.579326 | false |
csadorf/signac | signac/common/validate.py | 1 | 1660 | # Copyright (c) 2017 The Regents of the University of Michigan
# All rights reserved.
# This software is licensed under the BSD 3-Clause License.
import logging
from .configobj.validate import Validator
from .configobj.validate import VdtValueError
logger = logging.getLogger(__name__)
def version(value, *args, **kwargs):
try:
if isinstance(value, str):
return tuple((int(v) for v in value.split(',')))
else:
return tuple((int(v) for v in value))
except Exception:
raise VdtValueError(value)
def mongodb_uri(value, *args, **kwargs):
if isinstance(value, list):
value = ','.join(value)
if not value.startswith('mongodb://'):
value = 'mongodb://' + value
try:
import pymongo
except ImportError:
logger.debug("Install pymongo to validate database configurations!")
else:
try:
pymongo.uri_parser.parse_uri(value)
except pymongo.errors.InvalidURI:
raise VdtValueError(value)
return value
def password(value, *args, **kwargs):
return value
def get_validator():
return Validator({
'version': version,
'mongodb_uri': mongodb_uri,
'password': password,
})
cfg = """
workspace_dir = string(default='workspace')
project = string()
signac_version = version(default='0,1,0')
[General]
default_host = string()
[hosts]
[[__many__]]
url = mongodb_uri(default='localhost')
auth_mechanism = option('none', 'SCRAM-SHA-1', default='none')
username = string()
password = password()
db_auth = string(default='admin')
[[[password_config]]]
salt = string()
rounds = integer()
"""
| bsd-3-clause | 8,211,791,766,496,821,000 | 22.714286 | 76 | 0.640964 | false |
zacharylawrence/ENEE408I-Team-9 | pi/arduino.py | 1 | 4379 | #!/usr/bin/env python
# encoding: utf-8
"""
Control All Arduino Functions
"""
from pymata_aio.pymata3 import PyMata3
from pymata_aio.constants import Constants
import constants
class Arduino():
# Define Pin Constants
# SPI (for pixy) uses pins 10-13
_MOTOR1 = 3
_MOTOR1_DIR_A = 2
_MOTOR1_DIR_B = 4
_MOTOR2 = 6
_MOTOR2_DIR_A = 7
_MOTOR2_DIR_B = 8
# Note: ping sensor shouldn't have to be PWM
_PING = 5
_SERVO = 9
# Analog Pins
_IR_LEFT = 0
_IR_MID = 1
_IR_RIGHT = 2
def __init__(self):
# Instantiate the pymata_core API
self.board = PyMata3(sleep_tune=0)
# Set the pin modes
self.board.set_pin_mode(self._MOTOR1, Constants.PWM)
self.board.set_pin_mode(self._MOTOR1_DIR_A, Constants.OUTPUT)
self.board.set_pin_mode(self._MOTOR1_DIR_B, Constants.OUTPUT)
self.board.set_pin_mode(self._MOTOR2, Constants.PWM)
self.board.set_pin_mode(self._MOTOR2_DIR_A, Constants.OUTPUT)
self.board.set_pin_mode(self._MOTOR2_DIR_B, Constants.OUTPUT)
self.board.set_pin_mode(self._IR_LEFT, Constants.INPUT)
self.board.enable_analog_reporting(self._IR_LEFT)
self.board.set_pin_mode(self._IR_MID, Constants.INPUT)
self.board.enable_analog_reporting(self._IR_MID)
self.board.set_pin_mode(self._IR_RIGHT, Constants.INPUT)
self.board.enable_analog_reporting(self._IR_RIGHT)
self.board.sonar_config(self._PING, self._PING)
self.board.pixy_init(max_blocks=constants.MAX_PIXY_BLOCKS)
self.board.keep_alive(period=2)
self.board.servo_config(self._SERVO)
# Set default state
self.set_motors(0, 0)
self.open_claw()
def set_motors(self, motor1, motor2):
if (motor1 < -1 or motor1 > 1 or motor2 < -1 or motor2 > 1):
raise ValueError("set_motor called with (motor1=" + str(motor1) + ") and (motor2=" + str(motor2) + ")")
# print("Setting Motor 1 to: " + str(motor1))
# print("Setting Motor 2 to: " + str(motor2))
# Set motor directions
self.board.digital_write(self._MOTOR1_DIR_A, 0 if (motor1 < 0) else 1)
self.board.digital_write(self._MOTOR1_DIR_B, 1 if (motor1 < 0) else 0)
self.board.digital_write(self._MOTOR2_DIR_A, 1 if (motor2 < 0) else 0)
self.board.digital_write(self._MOTOR2_DIR_B, 0 if (motor2 < 0) else 1)
# Set motor speeds
self.board.analog_write(self._MOTOR1, int(abs(motor1) * 255))
self.board.analog_write(self._MOTOR2, int(abs(motor2) * 255))
def set_servo(self, servo):
self.board.analog_write(self._SERVO, servo)
def close_claw(self):
self.board.analog_write(self._SERVO, 100) # Used to be 75
self.board.sleep(constants.CLOSE_CLAW_PAUSE)
def open_claw(self):
self.board.analog_write(self._SERVO, 150)
self.board.sleep(constants.OPEN_CLAW_PAUSE)
# Get the ping sensor's distance in cm
def get_ping(self):
return self.board.sonar_data_retrieve(self._PING)
# Returns the value from the pixy camera
def get_pixy_blocks(self):
blocks = self.board.pixy_get_blocks()
print(blocks)
if len(blocks) > 0 and not "signature" in blocks[0]:
print("Malformed pixy block!!")
return None
# for block_index in range(len(blocks)):
# block = blocks[block_index]
# print(" block {}: sig: {} x: {} y: {} width: {} height: {}".format(
# block_index, block["signature"], block["x"], block["y"], block["width"], block["height"]))
# print("\n")
return blocks
def print_ir(self):
print(str(self.board.analog_read(self._IR_LEFT)) + " | " +
str(self.board.analog_read(self._IR_MID)) + " | " +
str(self.board.analog_read(self._IR_RIGHT)) + " | " +
str(self.get_ping()))
def get_ir_left(self):
return self.board.analog_read(self._IR_LEFT)
def get_ir_mid(self):
return self.board.analog_read(self._IR_MID)
def get_ir_right(self):
return self.board.analog_read(self._IR_RIGHT)
def ir_wall(self):
return (self.get_ir_left() >= constants.IR_WALL_THRESHOLD or
self.get_ping() <= constants.PING_WALL_THRESHOLD or
self.get_ir_right() >= constants.IR_WALL_THRESHOLD)
def ir_wall_target(self):
return (self.get_ir_left() >= constants.IR_WALL_THRESHOLD or
self.get_ir_mid() >= constants.IR_WALL_THRESHOLD or
self.get_ir_right() >= constants.IR_WALL_THRESHOLD)
def shutdown(self):
# Reset the board and exit
self.board.shutdown()
| mit | -2,159,931,330,257,782,800 | 29.622378 | 109 | 0.651975 | false |
imran1008/pce | files/lio.py | 1 | 18155 | #!/bin/python
import os
import re
import subprocess
import sys
re_ls_obj = re.compile('.*o- ([^ ]*) .*')
re_tpg_num = re.compile('.*o- tpg([0-9]*).*')
re_lun_num = re.compile('.*o- lun([0-9]*).*fileio/([^ ]*)\s.*')
def save_config(env):
subprocess.call(['targetcli', 'saveconfig'], stdout=open(os.devnull, 'wb'), env=env)
def ls(path, filter, env):
out = subprocess.Popen(["targetcli", "ls", path, "1"], stdout=subprocess.PIPE, env=env)
first_skipped = False
obj_list = []
for bytes in out.stdout:
if first_skipped == False:
first_skipped = True
continue
line = bytes.decode('utf-8')
if filter:
filtered_obj_name = filter.match(line).groups()
obj_list.append(filtered_obj_name)
else:
obj_name = re_ls_obj.match(line).group(1)
obj_list.append(obj_name)
return obj_list
def get_tpgs(target, env):
return ls('/iscsi/' + target, re_tpg_num, env)
def ensure_root():
if os.getuid() != 0:
print("targetcli requires root privileges")
exit(1)
def find_unused_lun(state, installed_luns):
if not ('empty_slot' in state):
state['empty_slot'] = 1
state['search_installed_luns'] = True
# If we already know that we won't find an unused slot in the installed
# LUNs, we can just increment the empty_slot count by one and return it
if not state['search_installed_luns']:
empty_slot = state['empty_slot']
state['empty_slot'] += 1
return empty_slot
did_full_scan = True
current_lun = 0
# Locate the first LUN slot that is unused
for lun,_ in installed_luns:
current_lun = int(lun)
# If the current lun in the list is less than our starting point,
# we skip the iteration
if current_lun < state['empty_slot']:
continue
# The slot we were checking is filled, we need to move to the next
# slot and try again
elif current_lun == state['empty_slot']:
state['empty_slot'] += 1
# We found a gap!
else:
did_full_scan = False
break
if did_full_scan:
state['search_installed_luns'] = False
empty_slot = state['empty_slot']
state['empty_slot'] += 1
return empty_slot
# Target configuration
def print_target_usage(arg0):
print("usage: " + arg0 + " target add [<target>]")
print(" " + arg0 + " target remove <target>\n")
print("If the 'target' name is not specified when adding a new object,")
print("an auto-generated name will be used")
def process_target(sub_cmd, target, env):
subprocess.call(['targetcli', 'set', 'global', 'auto_add_default_portal=false'], env=env)
subprocess.call(['targetcli', 'set', 'global', 'auto_add_mapped_luns=true'], env=env)
if sub_cmd == 'add':
ensure_root()
if target != None:
subprocess.call(['targetcli', '/iscsi', 'create', target], env=env)
else:
subprocess.call(['targetcli', '/iscsi', 'create'], env=env)
elif sub_cmd == 'remove':
ensure_root()
subprocess.call(['targetcli', '/iscsi', 'delete', target], env=env)
else:
exit(1)
# LUN mapping configuration
def print_map_usage(arg0):
print("usage: " + arg0 + " <path> <target> (<backstore name> <backstore size>)...\n")
print("The 'backstore name' and 'backstore size' must be provided in pairs")
def parse_input_list(args):
input_list = []
name = None
size = None
for arg in args:
if not name:
name = arg
else:
size = arg
input_list.append({
'name': name,
'size': size
})
name = None
size = None
return input_list
def process_map(path, target, dry_run, input_list, env):
# Get the first TPG of the target
tpgs = get_tpgs(target, env)
tpg = tpgs[0][0]
# Get a list of LUNs of the first TPG
installed_luns = ls('/iscsi/' + target + '/tpg' + tpg + '/luns', re_lun_num, env)
# Create a list of additions.
search_state = {}
additions = []
for backstore in input_list:
found = False
for lun in installed_luns:
if backstore['name'] == lun[1]:
found = True
break
if not found:
empty_slot = find_unused_lun(search_state, installed_luns)
additions.append([empty_slot, backstore])
# Create a list of removals.
removals = installed_luns[:]
for backstore in input_list:
for x in removals:
if x[1] == backstore['name']:
removals.remove(x)
break
# Add new LUNs
if len(additions) > 0:
print("Add LUNs:")
for addition in additions:
lun = str(addition[0])
name = addition[1]['name']
size = addition[1]['size']
print('[' + lun + "] = " + name)
if not dry_run:
process_backstore('add', path, name, size, env)
process_lun('add', target, lun, name, None, None, env)
# Remove LUNs
if len(removals) > 0:
print("Removed LUNs:")
for removal in removals:
lun = str(removal[0])
name = removal[1]
print('[' + lun + "] = " + name)
if not dry_run:
process_backstore('remove', path, name, None, env)
return not(dry_run)
# Backstore configuration
def print_backstore_usage(arg0):
print("usage: " + arg0 + " backstore add <path> <backstore> [<size>]")
print(" " + arg0 + " backstore remove <path> <backstore>")
print(" " + arg0 + " backstore rename <path> <target> <old name> <new name>")
print(" " + arg0 + " backstore copy <path> <target> <old name> <new name>")
def process_backstore(sub_cmd, path, backstore, size, env):
if sub_cmd == 'add':
ensure_root()
if size != None:
subprocess.call(['targetcli', '/backstores/fileio', 'create', backstore, path + '/' + backstore, size],
env=env)
else:
subprocess.call(['targetcli', '/backstores/fileio', 'create', backstore, path + '/' + backstore],
env=env)
elif sub_cmd == 'remove':
ensure_root()
subprocess.call(['targetcli', '/backstores/fileio', 'delete', backstore], env=env)
os.remove(path + '/' + backstore)
else:
print_backstore_usage(argv[0])
exit(1)
def process_copy(path, target, old_name, new_name, remove_old, env):
# Get the first TPG of the target
tpgs = get_tpgs(target, env)
tpg = tpgs[0][0]
# Get a list of LUNs of the first TPG
installed_luns = ls('/iscsi/' + target + '/tpg' + tpg + '/luns', re_lun_num, env)
# Ensure the new name is unique
for lun in installed_luns:
if new_name == lun[1]:
print("Error: The destination object already exists!")
exit(1)
# Find an empty LUN slot or get a new one
search_state = {}
lun = find_unused_lun(search_state, installed_luns)
# Perform a copy-on-write duplicate of the backstore
subprocess.call(['cp', '--reflink=auto', path + '/' + old_name, path + '/' + new_name])
# Add the new backstore
process_backstore('add', path, new_name, None, env)
process_lun('add', target, str(lun), new_name, None, None, env)
# Remove the old backstore
if remove_old:
process_backstore('remove', path, old_name, None, env)
print("\nIMPORTANT!!")
print("------------")
print("Update your config.py to reflect the new name")
# LUN configuration
def print_lun_usage(arg0):
print("usage: " + arg0 + " lun <add/remove> <target> <lun> [<backstore>] [<initiator_iqn> <mapped_lun>]\n")
print("The 'backstore' object name must be specified when adding a LUN. The object name must correspond")
print("to a fileio object. If the 'initiator_iqn' and 'mapped_lun' aren't specified, the lun mapping")
print(" will be applied to all initiators")
def process_lun(sub_cmd, target, lun, backstore, initiator_iqn, mapped_lun, env):
if sub_cmd == 'add':
ensure_root()
tpgs = get_tpgs(target, env)
if initiator_iqn != None and mapped_lun != None:
for tpg in tpgs:
subprocess.call(['targetcli',
'/iscsi/' + target + '/tpg' + tpg[0] + '/luns',
'create',
'/backstores/fileio/' + backstore,
lun,
'false'],
stdout=open(os.devnull, 'wb'), env=env)
subprocess.call(['targetcli',
'/iscsi/' + target + '/tpg' + tpg[0] + '/acls/' + initiator_iqn,
'create',
mapped_lun,
lun],
stdout=open(os.devnull, 'wb'), env=env)
else:
for tpg in tpgs:
subprocess.call(['targetcli',
'/iscsi/' + target + '/tpg' + tpg[0] + '/luns',
'create',
'/backstores/fileio/' + backstore,
lun],
stdout=open(os.devnull, 'wb'), env=env)
elif sub_cmd == 'remove':
ensure_root()
tpgs = get_tpgs(target, env)
for tpg in tpgs:
subprocess.call(['targetcli', '/iscsi/' + target + '/tpg' + tpg[0] + '/luns', 'delete', lun],
stdout=open(os.devnull, 'wb'), env=env)
else:
exit(1)
# Portal configuration
def print_portal_usage(arg0):
print("usage: " + arg0 + " portal <add/remove> <target> <tpg> <ip-address>")
def process_portal(sub_cmd, target, tpg, ip, env):
if sub_cmd == 'add':
ensure_root()
subprocess.call(['targetcli', '/iscsi/' + target + '/tpg' + tpg + '/portals', 'create', ip, '3260'], env=env)
elif sub_cmd == 'remove':
ensure_root()
subprocess.call(['targetcli', '/iscsi/' + target + '/tpg' + tpg + '/portals', 'delete', ip, '3260'], env=env)
else:
exit(1)
# Initiator configuration
def print_acl_usage(arg0):
print("usage: " + arg0 + " acl <add/remove> <initiator> [<userid>] [<password>] [<in_userid>] [<in_password>]\n")
print("The credentials are required only when adding a new ACL object")
def set_acl_auth(target, initiator, tpg, name, value, env):
subprocess.call(['targetcli',
'/iscsi/' + target + '/tpg' + tpg + '/acls/' + initiator,
'set',
'auth',
name + '=' + value], env=env)
def process_acl(sub_cmd, target, initiator, userid, password, in_userid, in_password, env):
if sub_cmd == 'add':
ensure_root()
tpgs = get_tpgs(target, env)
for tpg in tpgs:
tpg_num = tpg[0]
subprocess.call(['targetcli', '/iscsi/' + target + '/tpg' + tpg_num + '/acls', 'create', initiator], env=env)
set_acl_auth(target, initiator, tpg_num, 'userid', userid, env)
set_acl_auth(target, initiator, tpg_num, 'password', password, env)
set_acl_auth(target, initiator, tpg_num, 'mutual_userid', in_userid, env)
set_acl_auth(target, initiator, tpg_num, 'mutual_password', in_password, env)
elif sub_cmd == 'remove':
ensure_root()
tpgs = get_tpgs(target, env)
for tpg in tpgs:
subprocess.call(['targetcli', '/iscsi/' + target + '/tpg' + tpg[0] + '/acls', 'delete', initiator], env=env)
else:
exit(1)
# Target Portal Group configuration
def print_tpg_usage(arg0):
print("usage: " + arg0 + " tpg <add/remove> <target> <tpg>")
def process_tpg(sub_cmd, target, tpg, env):
if sub_cmd == 'add':
ensure_root()
subprocess.call(['targetcli', '/iscsi/' + target, 'create', tpg], env=env)
subprocess.call(['targetcli', '/iscsi/' + target + '/tpg' + tpg, 'set', 'attribute', 'authentication=1'], env=env)
elif sub_cmd == 'remove':
ensure_root()
subprocess.call(['targetcli', '/iscsi/' + target, 'delete', tpg], env=env)
else:
exit(1)
def print_general_usage(arg0):
print("usage: " + arg0 + " <command>\n")
print(" target add/remove a target")
print(" backstore add/remove/rename a backstore disk")
print(" tpg add/remove a TPG for the specified target")
print(" portal add/remove a portal in the specified TPG")
print(" acl add/remove an ACL for all TPGs in the specified target")
print(" lun add/remove a LUN for all ACLs in the specified target")
print(" map update the lun mappings to the specified backstores")
def main(argv):
env = os.environ
env["TERM"] = "dumb"
arg0 = argv[0]
if len(argv) < 2:
print_general_usage(arg0)
exit(1)
cmd = argv[1]
if cmd == 'lun':
if len(argv) < 3:
print_lun_usage(arg0)
exit(1)
sub_cmd = argv[2]
target = argv[3]
lun = argv[4]
backstore = None
initiator_iqn = None
mapped_lun = None
if sub_cmd == 'add':
if len(argv) == 6:
backstore = argv[5]
elif len(argv == 8):
backstore = argv[5]
initiator_iqn = argv[6]
mapped_lun = argv[7]
else:
print_lun_usage(arg0)
exit(1)
elif sub_cmd == 'remove':
if len(argv) != 5:
print_lun_usage(arg0)
exit(1)
else:
print_lun_usage(arg0)
exit(1)
process_lun(sub_cmd, target, lun, backstore, initiator_iqn, mapped_lun, env)
save_config(env)
elif cmd == 'backstore':
if len(argv) < 5:
print_backstore_usage(arg0)
exit(1)
sub_cmd = argv[2]
path = argv[3]
if sub_cmd == 'add':
backstore = argv[4]
size = None
if len(argv) >= 6:
size = argv[5]
process_backstore(sub_cmd, path, backstore, size, env)
elif sub_cmd == 'remove':
backstore = argv[4]
process_backstore(sub_cmd, path, backstore, None, env)
elif sub_cmd == 'rename':
target = argv[4]
old_name = argv[5]
new_name = argv[6]
process_copy(path, target, old_name, new_name, True, env)
elif sub_cmd == 'copy':
target = argv[4]
old_name = argv[5]
new_name = argv[6]
process_copy(path, target, old_name, new_name, False, env)
else:
print_backstore_usage(arg0)
exit(1)
save_config(env)
elif cmd == 'portal':
if len(argv) < 3:
print_portal_usage(arg0)
exit(1)
sub_cmd = argv[2]
if sub_cmd != 'add' and sub_cmd != 'remove':
print_portal_usage(arg0)
exit(1)
target = argv[3]
tpg = argv[4]
ip = argv[5]
process_portal(sub_cmd, target, tpg, ip, env)
save_config(env)
elif cmd == 'acl':
if len(argv) < 3:
print_acl_usage(arg0)
exit(1)
sub_cmd = argv[2]
target = argv[3]
initiator = argv[4]
userid = None
password = None
in_userid = None
in_password = None
if sub_cmd == 'add':
if len(argv) != 9:
print_acl_usage(arg0)
exit(1)
userid = argv[5]
password = argv[6]
in_userid = argv[7]
in_password = argv[8]
elif sub_cmd == 'remove':
if len(argv) != 5:
print_acl_usage(arg0)
exit(1)
else:
print_acl_usage(arg0)
exit(1)
process_acl(sub_cmd, target, initiator, userid, password, in_userid, in_password, env)
save_config(env)
elif cmd == 'tpg':
if len(argv) < 3:
print_tpg_usage(arg0)
exit(1)
sub_cmd = argv[2]
target = argv[3]
tpg = argv[4]
if sub_cmd == 'add':
if len(argv) != 5:
print_tpg_usage(argv[0])
exit(1)
elif sub_cmd == 'remove':
if len(argv) != 5:
print_tpg_usage(argv[0])
exit(1)
else:
print_tpg_usage(argv[0])
exit(1)
process_tpg(sub_cmd, target, tpg, env)
save_config(env)
elif cmd == 'target':
if len(argv) < 3:
print_target_usage(arg0)
exit(1)
sub_cmd = argv[2]
target = None
if sub_cmd == 'add':
if len(argv) >= 4:
target = argv[3]
elif sub_cmd == 'remove':
if len(argv) != 4:
print_target_usage(argv[0])
exit(1)
target = argv[3]
else:
print_target_usage(argv[0])
exit(1)
process_target(sub_cmd, target, env)
save_config(env)
elif cmd == 'map':
if len(argv) < 7 or (len(argv) - 5) % 2 == 1:
print_map_usage(argv[0])
exit(1)
# Get the LIO path to the backstores
path = argv[2]
# Get the target IQN
target = argv[3]
# Check if we should do a dry run
dry_run = argv[4] != 'false'
# Parse the input list into a table
input_list = parse_input_list(argv[5:])
if process_map(path, target, dry_run, input_list, env):
save_config(env)
else:
print_general_usage(arg0)
if __name__ == "__main__":
main(sys.argv)
| gpl-3.0 | 5,293,990,632,733,308,000 | 29.258333 | 122 | 0.516497 | false |
mareknetusil/twist | cbc/twist/function_spaces.py | 1 | 3099 | import fenics
from cbc.common import create_dirichlet_conditions
class FunctionSpace_U():
"""
Discrete function space for the displacement U
"""
def __init__(self, mesh, element_type, element_degree, pbc=None):
if not pbc:
self.space = fenics.VectorFunctionSpace(mesh, element_type,
element_degree)
else:
self.space = fenics.VectorFunctionSpace(mesh, element_type,
element_degree,
constrained_domain=pbc)
self._unknown_displacement = fenics.Function(self.space)
self._test_displacement = fenics.TestFunction(self.space)
self._trial_displacement = fenics.TrialFunction(self.space)
@property
def unknown_displacement(self):
return self._unknown_displacement
@property
def test_displacement(self):
return self._test_displacement
@property
def trial_displacement(self):
return self._trial_displacement
def create_dirichlet_conditions(self, problem):
self.bcu = create_dirichlet_conditions(problem.dirichlet_values(),
problem.dirichlet_boundaries(),
self.space)
return self.bcu
class FunctionSpace_UP():
"""
Discrete space for the (U,P)-mixed formulation
"""
def __init__(self, mesh, element_type, element_degree, pbc=None):
if not pbc:
vector = fenics.VectorFunctionSpace(mesh, element_type,
element_degree)
scalar = fenics.FunctionSpace(mesh, element_type,
element_degree - 1)
else:
vector = fenics.VectorFunctionSpace(mesh, element_type,
element_degree,
constrained_domain=pbc)
scalar = fenics.FunctionSpace(mesh, element_type,
element_degree - 1,
constrained_domain=pbc)
self.space = fenics.MixedFunctionSpace([vector, scalar])
self._unknown_vector = fenics.Function(self.space)
(self._test_displacement, self._test_pressure) \
= fenics.TestFunctions(self.space)
self._trial_vector = fenics.TrialFunction(self.space)
self.bcu = None
@property
def unknown_vector(self):
return self._unknown_vector
@property
def test_vector(self):
return self._test_displacement, self._test_pressure
@property
def trial_vector(self):
return self._trial_vector
def create_dirichlet_conditions(self, problem):
self.bcu = create_dirichlet_conditions(problem.dirichlet_values(),
problem.dirichlet_boundaries(),
self.space.sub(0))
return self.bcu
| gpl-3.0 | -1,240,472,357,221,405,700 | 36.792683 | 78 | 0.539852 | false |
TeamSpen210/srctools | srctools/sndscript.py | 1 | 12138 | """Reads and writes Soundscripts."""
from enum import Enum
from chunk import Chunk as WAVChunk
from srctools import Property, conv_float
from typing import (
Optional, Union, TypeVar, Callable,
List, Tuple, Dict,
TextIO, IO,
)
__all__ = [
'SND_CHARS', 'Pitch', 'VOL_NORM', 'Channel', 'Level',
'Sound', 'wav_is_looped',
]
# All the prefixes wavs can have.
SND_CHARS = '*@#<>^)}$!?'
class Pitch(float, Enum):
"""The constants permitted for sound pitches."""
PITCH_NORM = 100.0
PITCH_LOW = 95.0
PITCH_HIGH = 120.0
def __str__(self) -> str:
return self.name
class VOLUME(Enum):
"""Special value, substitutes default volume (usually 1)."""
VOL_NORM = 'VOL_NORM'
def __str__(self) -> str:
return self.name
VOL_NORM = VOLUME.VOL_NORM
# Old compatibility values, replaced by soundlevel.
ATTENUATION = {
'ATTN_NONE': 0,
'ATTN_NORM': 0.8,
'ATTN_IDLE': 2.0,
'ATTN_STATIC': 1.25,
'ATTN_RICOCHET': 1.5,
'ATTN_GUNFIRE': 0.27,
}
class Channel(Enum):
"""Different categories of sounds."""
DEFAULT = "CHAN_AUTO"
GUNFIRE = "CHAN_WEAPON"
VOICE = "CHAN_VOICE"
TF2_ANNOUNCER = "CHAN_VOICE2"
ITEMS = "CHAN_ITEM"
BODY = "CHAN_BODY"
STREAMING = "CHAN_STREAM"
CON_CMD = "CHAN_REPLACE"
BACKGROUND = "CHAN_STATIC"
PLAYER_VOICE = "CHAN_VOICE_BASE"
#CHAN_USER_BASE+<number>
#Custom channels can be defined here.
class Level(Enum):
"""Soundlevel constants - attenuation."""
SNDLVL_NONE = 'SNDLVL_NONE'
SNDLVL_20dB = 'SNDLVL_20dB'
SNDLVL_25dB = 'SNDLVL_25dB'
SNDLVL_30dB = 'SNDLVL_30dB'
SNDLVL_35dB = 'SNDLVL_35dB'
SNDLVL_40dB = 'SNDLVL_40dB'
SNDLVL_45dB = 'SNDLVL_45dB'
SNDLVL_50dB = 'SNDLVL_50dB'
SNDLVL_55dB = 'SNDLVL_55dB'
SNDLVL_IDLE = 'SNDLVL_IDLE'
SNDLVL_65dB = 'SNDLVL_65dB'
SNDLVL_STATIC = 'SNDLVL_STATIC'
SNDLVL_70dB = 'SNDLVL_70dB'
SNDLVL_NORM = 'SNDLVL_NORM'
SNDLVL_80dB = 'SNDLVL_80dB'
SNDLVL_TALKING = 'SNDLVL_TALKING'
SNDLVL_85dB = 'SNDLVL_85dB'
SNDLVL_90dB = 'SNDLVL_90dB'
SNDLVL_95dB = 'SNDLVL_95dB'
SNDLVL_100dB = 'SNDLVL_100dB'
SNDLVL_105dB = 'SNDLVL_105dB'
SNDLVL_110dB = 'SNDLVL_110dB'
SNDLVL_120dB = 'SNDLVL_120dB'
SNDLVL_125dB = 'SNDLVL_125dB'
SNDLVL_130dB = 'SNDLVL_130dB'
SNDLVL_GUNFIRE = 'SNDLVL_GUNFIRE'
SNDLVL_140dB = 'SNDLVL_140dB'
SNDLVL_145dB = 'SNDLVL_145dB'
SNDLVL_150dB = 'SNDLVL_150dB'
SNDLVL_180dB = 'SNDLVL_180dB'
def __str__(self) -> str:
return self.name
EnumType = TypeVar('EnumType', bound=Enum)
def split_float(
val: str,
enum: Callable[[str], Union[float, EnumType]],
default: Union[float, EnumType],
name: str,
) -> Tuple[Union[float, EnumType], Union[float, EnumType]]:
"""Handle values which can be either single or a low, high pair of numbers.
If single, low and high are the same.
enum is a Enum with values to match text constants, or a converter function
returning enums or raising ValueError, KeyError or IndexError.
The name is used for error handling.
"""
if isinstance(val, list):
raise ValueError(f'Property block used for option in {name} sound!')
if ',' in val:
s_low, s_high = val.split(',')
try:
low = enum(s_low.upper())
except (LookupError, ValueError):
low = conv_float(s_low, default)
try:
high = enum(s_high.upper())
except (LookupError, ValueError):
high = conv_float(s_high, default)
return low, high
else:
try:
out = enum(val.upper())
except (LookupError, ValueError):
out = conv_float(val, default)
return out, out
def join_float(val: Tuple[float, float]) -> str:
"""Reverse split_float()."""
low, high = val
if low == high:
return str(low)
else:
return '{!s},{!s}'.format(low, high)
def wav_is_looped(file: IO[bytes]) -> bool:
"""Check if the provided wave file contains loop cue points.
This code is partially copied from wave.Wave_read.initfp().
"""
first = WAVChunk(file, bigendian=False)
if first.getname() != b'RIFF':
raise ValueError('File does not start with RIFF id.')
if first.read(4) != b'WAVE':
raise ValueError('Not a WAVE file.')
while True:
try:
chunk = WAVChunk(file, bigendian=False)
except EOFError:
return False
if chunk.getname() == b'cue ':
return True
chunk.skip()
class Sound:
"""Represents a single soundscript."""
stack_start: Property
stack_update: Property
stack_stop: Property
def __init__(
self,
name: str,
sounds: List[str],
volume: Union[Tuple[Union[float, VOLUME], Union[float, VOLUME]], float, VOLUME]=(VOL_NORM, VOL_NORM),
channel: Channel=Channel.DEFAULT,
level: Union[Tuple[Union[float, Level], Union[float, Level]], float, Level]=(Level.SNDLVL_NORM, Level.SNDLVL_NORM),
pitch: Union[Tuple[Union[float, Pitch], Union[float, Pitch]], float, Pitch]=(Pitch.PITCH_NORM, Pitch.PITCH_NORM),
# Operator stacks
stack_start: Optional[Property]=None,
stack_update: Optional[Property]=None,
stack_stop: Optional[Property]=None,
use_v2: bool=False,
) -> None:
"""Create a soundscript."""
self.name = name
self.sounds = sounds
self.channel = channel
self.force_v2 = use_v2
if isinstance(volume, tuple):
self.volume = volume
else:
self.volume = volume, volume
if isinstance(level, tuple):
self.level = level
else:
self.level = level, level
if isinstance(pitch, tuple):
self.pitch = pitch
else:
self.pitch = pitch, pitch
self.stack_start = Property('', []) if stack_start is None else stack_start
self.stack_update = Property('', []) if stack_update is None else stack_update
self.stack_stop = Property('', []) if stack_stop is None else stack_stop
def __repr__(self) -> str:
res = f'Sound({self.name!r}, {self.sounds}, volume={self.volume}, channel={self.channel}, level={self.level}, pitch={self.pitch}'
if self.force_v2 or self.stack_start or self.stack_update or self.stack_stop:
res += f', stack_start={self.stack_start!r}, stack_update={self.stack_update!r}, stack_stop={self.stack_stop!r})'
else:
res += ')'
return res
@classmethod
def parse(cls, file: Property) -> Dict[str, 'Sound']:
"""Parses a soundscript file.
This returns a dict mapping casefolded names to Sounds.
"""
sounds = {}
for snd_prop in file:
volume = split_float(
snd_prop['volume', '1'],
VOLUME,
1.0,
snd_prop.real_name,
)
pitch = split_float(
snd_prop['pitch', '100'],
Pitch.__getitem__,
100.0,
snd_prop.real_name,
)
if 'soundlevel' in snd_prop:
level = split_float(
snd_prop['soundlevel'],
Level.__getitem__,
Level.SNDLVL_NORM,
snd_prop.real_name,
)
elif 'attenuation' in snd_prop:
atten_min, atten_max = split_float(
snd_prop['attenuation'],
ATTENUATION.__getitem__,
ATTENUATION['ATTN_IDLE'],
snd_prop.real_name,
)
# Convert to a soundlevel.
# See source_sdk/public/soundflags.h:ATTN_TO_SNDLVL()
level = (
(50.0 + 20.0 / atten_min) if atten_min else 0.0,
(50.0 + 20.0 / atten_max) if atten_max else 0.0,
)
else:
level = (Level.SNDLVL_NORM, Level.SNDLVL_NORM)
# Either 1 "wave", or multiple in "rndwave".
wavs: list[str] = []
for prop in snd_prop:
if prop.name == 'wave':
wavs.append(prop.value)
elif prop.name == 'rndwave':
for subprop in prop:
wavs.append(subprop.value)
channel = Channel(snd_prop['channel', 'CHAN_AUTO'])
sound_version = snd_prop.int('soundentry_version', 1)
if 'operator_stacks' in snd_prop:
if sound_version == 1:
raise ValueError(
'Operator stacks used with version '
'less than 2 in "{}"!'.format(snd_prop.real_name))
start_stack, update_stack, stop_stack = [
Property(stack_name, [
prop.copy()
for prop in
snd_prop.find_children('operator_stacks', stack_name)
])
for stack_name in
['start_stack', 'update_stack', 'stop_stack']
]
else:
start_stack, update_stack, stop_stack = [
Property(stack_name, [])
for stack_name in
['start_stack', 'update_stack', 'stop_stack']
]
sounds[snd_prop.name] = Sound(
snd_prop.real_name,
wavs,
volume,
channel,
level,
pitch,
start_stack,
update_stack,
stop_stack,
sound_version == 2,
)
return sounds
def export(self, file: TextIO):
"""Write a sound to a file.
Pass a file-like object open for text writing.
"""
file.write('"{}"\n\t{{\n'.format(self.name))
file.write('\t' 'channel {}\n'.format(self.channel.value))
file.write('\t' 'soundlevel {}\n'.format(join_float(self.level)))
if self.volume != (1, 1):
file.write('\tvolume {}\n'.format(join_float(self.volume)))
if self.pitch != (100, 100):
file.write('\tpitch {}\n'.format(join_float(self.pitch)))
if len(self.sounds) > 1:
file.write('\trndwave\n\t\t{\n')
for wav in self.sounds:
file.write('\t\twave "{}"\n'.format(wav))
file.write('\t\t}\n')
else:
file.write('\twave "{}"\n'.format(self.sounds[0]))
if self.force_v2 or self.stack_start or self.stack_stop or self.stack_update:
file.write(
'\t' 'soundentry_version 2\n'
'\t' 'operator_stacks\n'
'\t\t' '{\n'
)
if self.stack_start:
file.write(
'\t\t' 'start_stack\n'
'\t\t\t' '{\n'
)
for prop in self.stack_start:
for line in prop.export():
file.write('\t\t\t' + line)
file.write('\t\t\t}\n')
if self.stack_update:
file.write(
'\t\t' 'update_stack\n'
'\t\t\t' '{\n'
)
for prop in self.stack_update:
for line in prop.export():
file.write('\t\t\t' + line)
file.write('\t\t\t}\n')
if self.stack_stop:
file.write(
'\t\t' 'stop_stack\n'
'\t\t\t' '{\n'
)
for prop in self.stack_stop:
for line in prop.export():
file.write('\t\t\t' + line)
file.write('\t\t\t}\n')
file.write('\t\t}\n')
file.write('\t}\n')
| unlicense | -6,019,161,793,720,176,000 | 31.196286 | 137 | 0.511781 | false |
allanlei/rhinocloud-utils | rhinocloud/views/mixins/formset.py | 1 | 2115 | from django.forms.models import modelformset_factory
from django.core.exceptions import ImproperlyConfigured
class ModelFormSetFactoryMixin(object):
formset_fields = None
formset_exclude = None
formset_extra = 1
def get_formset_model(self):
if self.formset_model:
model = self.formset_model
else:
raise ImproperlyConfigured('Provide formset_model or override get_formset_model().')
return model
def get_formset_fields(self):
return self.formset_fields
def get_formset_exclude(self):
return self.formset_exclude
def get_formset_class_kwargs(self):
return {
'fields': self.get_formset_fields(),
'exclude': self.get_formset_exclude(),
'extra': int(self.formset_extra),
}
def get_formset_class(self):
return modelformset_factory(self.get_formset_model(),**self.get_formset_class_kwargs())
def get_formset_queryset(self):
return self.get_formset_model().objects.all()
def get_formset_kwargs(self, **kwargs):
if 'queryset' not in kwargs:
kwargs['queryset'] = self.get_formset_queryset()
return kwargs
def get_formset(self, *args, **kwargs):
if not hasattr(self, 'formset') or self.formset is None:
self.formset = self.get_formset_class()(*args, **self.get_formset_kwargs(**kwargs))
return self.formset
def form_valid(self, form, **kwargs):
formset = self.get_formset(self.request.POST, self.request.FILES)
if formset.is_valid():
response = super(ModelFormSetFactoryMixin, self).form_valid(form, **kwargs)
self.formset_valid(formset, **kwargs)
return response
return self.form_invalid(form, **kwargs)
def form_invalid(self, form, **kwargs):
self.get_formset(self.request.POST, self.request.FILES).is_valid()
return super(ModelFormSetFactoryMixin, self).form_invalid(form, **kwargs)
def formset_valid(self, formset, **kwargs):
formset.save()
| bsd-3-clause | -6,625,479,159,177,680,000 | 34.847458 | 96 | 0.631678 | false |
balloob/pychromecast | pychromecast/controllers/youtube.py | 1 | 3736 | """
Controller to interface with the YouTube-app.
Use the media controller to play, pause etc.
"""
import threading
from casttube import YouTubeSession
from . import BaseController
from ..error import UnsupportedNamespace
from ..config import APP_YOUTUBE
YOUTUBE_NAMESPACE = "urn:x-cast:com.google.youtube.mdx"
TYPE_GET_SCREEN_ID = "getMdxSessionStatus"
TYPE_STATUS = "mdxSessionStatus"
ATTR_SCREEN_ID = "screenId"
MESSAGE_TYPE = "type"
class YouTubeController(BaseController):
""" Controller to interact with Youtube."""
def __init__(self):
super(YouTubeController, self).__init__(YOUTUBE_NAMESPACE, APP_YOUTUBE)
self.status_update_event = threading.Event()
self._screen_id = None
self._session = None
def start_session_if_none(self):
"""
Starts a session it is not yet initialized.
"""
if not (self._screen_id and self._session):
self.update_screen_id()
self._session = YouTubeSession(screen_id=self._screen_id)
def play_video(self, video_id, playlist_id=None):
"""
Play video(video_id) now. This ignores the current play queue order.
:param video_id: YouTube video id(http://youtube.com/watch?v=video_id)
:param playlist_id: youtube.com/watch?v=video_id&list=playlist_id
"""
self.start_session_if_none()
self._session.play_video(video_id, playlist_id)
def add_to_queue(self, video_id):
"""
Add video(video_id) to the end of the play queue.
:param video_id: YouTube video id(http://youtube.com/watch?v=video_id)
"""
self.start_session_if_none()
self._session.add_to_queue(video_id)
def play_next(self, video_id):
"""
Play video(video_id) after the currently playing video.
:param video_id: YouTube video id(http://youtube.com/watch?v=video_id)
"""
self.start_session_if_none()
self._session.play_next(video_id)
def remove_video(self, video_id):
"""
Remove video(videoId) from the queue.
:param video_id: YouTube video id(http://youtube.com/watch?v=video_id)
"""
self.start_session_if_none()
self._session.remove_video(video_id)
def clear_playlist(self):
"""
Clear the entire video queue
"""
self.start_session_if_none()
self._session.clear_playlist()
def update_screen_id(self):
"""
Sends a getMdxSessionStatus to get the screenId and waits for response.
This function is blocking
If connected we should always get a response
(send message will launch app if it is not running).
"""
self.status_update_event.clear()
# This gets the screenId but always throws. Couldn't find a better way.
try:
self.send_message({MESSAGE_TYPE: TYPE_GET_SCREEN_ID})
except UnsupportedNamespace:
pass
self.status_update_event.wait()
self.status_update_event.clear()
def receive_message(self, message, data: dict):
"""Called when a message is received."""
if data[MESSAGE_TYPE] == TYPE_STATUS:
self._process_status(data.get("data"))
return True
return False
def _process_status(self, status):
""" Process latest status update. """
self._screen_id = status.get(ATTR_SCREEN_ID)
self.status_update_event.set()
def quick_play(self, media_id=None, playlist_id=None, enqueue=False, **kwargs):
""" Quick Play """
if enqueue:
self.add_to_queue(media_id, **kwargs)
else:
self.play_video(media_id, playlist_id=playlist_id, **kwargs)
| mit | 7,168,021,507,368,404,000 | 32.963636 | 83 | 0.621253 | false |
soplerproject/sopler | sopler/settings.py | 1 | 7823 | # -*- coding: utf-8 -*-
# Django settings for sopler project.
import os.path
PROJECT_DIR = os.path.normpath(os.path.dirname(os.path.abspath(__file__)))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
SESSION_SAVE_EVERY_REQUEST = True
#Set "True" if all non-SSL requests should be permanently redirected to SSL.
SECURE_SSL_REDIRECT = False
# Setting to an integer number of seconds
#It is recommended to set the max-age to a big value like 31536000 (12 months) or 63072000 (24 months).
SECURE_HSTS_SECONDS = 31536000
# HTTP Strict Transport Security.
SECURE_HSTS_INCLUDE_SUBDOMAINS = True
# Prevent framing of your pages and protect them from clickjacking.
SECURE_FRAME_DENY = True
# Prevent the browser from guessing asset content types.
SECURE_CONTENT_TYPE_NOSNIFF = True
# Enable the browser’s XSS filtering protections.
SECURE_BROWSER_XSS_FILTER = True
SOCIAL_FRIENDS_USING_ALLAUTH = True
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(PROJECT_DIR, "../database.db"), # Or path to database file if using sqlite3.
#'NAME': '',
# The following settings are not used with sqlite3:
#'USER': '',
#'PASSWORD': '',
'HOST': '', # Empty for localhost through domain sockets or '127.0.0.1' for localhost through TCP.
'PORT': '', # Set to empty string for default.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ['*']
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, "../static/"),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
# Ensure that you’re using a long, random and unique "SECRET_KEY"
SECRET_KEY = 'Enter_Here_A_Long_Random_And_Unique_Key_&^%$&!!'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.request',
'django.contrib.auth.context_processors.auth',
"django.core.context_processors.request",
"django.core.context_processors.media",
"django.core.context_processors.static",
"django.contrib.messages.context_processors.messages",
#"allauth.account.context_processors.account",
"allauth.socialaccount.context_processors.socialaccount",
'django.core.context_processors.request',
)
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'djangosecure.middleware.SecurityMiddleware',
# Uncomment the next line for simple clickjacking protection:
#'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'sopler.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'sopler.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_DIR, "../templates/"),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
#'django_admin_bootstrapped',
'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
'allauth',
'allauth.account',
'allauth.socialaccount',
# Available Social Account Providers
'allauth.socialaccount.providers.twitter',
'allauth.socialaccount.providers.persona',
'allauth.socialaccount.providers.google',
'allauth.socialaccount.providers.facebook',
#'allauth.socialaccount.providers.openid',
'core',
# Extra Security Features.
'djangosecure',
# Data migration.
'south',
# Faving and unfaving lists.
'favit',
# Webservice API framework for Django.
#'tastypie',
# Fetches your friends from different social-networks.
'social_friends_finder',
'embed_video',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
try:
from local_settings import *
except ImportError:
pass
| agpl-3.0 | -6,497,830,003,044,947,000 | 33.29386 | 127 | 0.698043 | false |
Pbartek/pyobd-pi-TFT | obd_gui.py | 1 | 17289 | #!/usr/bin/env python
###########################################################################
# obd_gui.py
#
# Created by Paul Bartek ([email protected])
#
###########################################################################
#-------------------------------------------------------------------------------
import os
import wx
import time
from threading import Thread
from obd_capture import OBD_Capture
from obd_sensors import SENSORS
from obd_sensors import *
#-------------------------------------------------------------------------------
# OBD variable
BACKGROUND_FILENAME = "bg_black.jpg"
GAUGE_FILENAME = "frame_C1.jpg"
LOGO_FILENAME = "cowfish.png"
#-------------------------------------------------------------------------------
def obd_connect(o):
o.connect()
class OBDConnection(object):
"""
Class for OBD connection. Use a thread for connection.
"""
def __init__(self):
self.c = OBD_Capture()
def get_capture(self):
return self.c
def connect(self):
self.t = Thread(target=obd_connect, args=(self.c,))
self.t.start()
def is_connected(self):
return self.c.is_connected()
def get_output(self):
if self.c and self.c.is_connected():
return self.c.capture_data()
return ""
def get_port(self):
return self.c.is_connected()
def get_port_name(self):
if self.c:
port = self.c.is_connected()
if port:
try:
return port.port.name
except:
pass
return None
def get_sensors(self):
sensors = []
if self.c:
sensors = self.c.getSupportedSensorList()
return sensors
#-------------------------------------------------------------------------------
class OBDText(wx.TextCtrl):
"""
Text display while loading OBD application.
"""
def __init__(self, parent):
"""
Constructor.
"""
style = wx.TE_READONLY | wx.TE_MULTILINE
wx.TextCtrl.__init__(self, parent, style=style)
self.SetBackgroundColour('#21211f')
self.SetForegroundColour(wx.WHITE)
font = wx.Font(12, wx.ROMAN, wx.NORMAL, wx.NORMAL, faceName="Monaco")
self.SetFont(font)
def AddText(self, text):
self.AppendText(text)
#-------------------------------------------------------------------------------
class OBDStaticBox(wx.StaticBox):
"""
OBD StaticBox.
"""
def __init__(self, *args, **kwargs):
"""
Constructor.
"""
wx.StaticBox.__init__(self, *args, **kwargs)
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDPanelGauges(wx.Panel):
"""
Panel for gauges.
"""
def __init__(self, *args, **kwargs):
"""
Constructor.
"""
super(OBDPanelGauges, self).__init__(*args, **kwargs)
# Background image
image = wx.Image(GAUGE_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
self.bitmap = wx.BitmapFromImage(image)
self.Bind(wx.EVT_PAINT, self.OnPaint)
# Create an accelerator table
lid = wx.NewId()
cid = wx.NewId()
rid = wx.NewId()
self.Bind(wx.EVT_MENU, self.onCtrlC, id=cid)
self.Bind(wx.EVT_MENU, self.onLeft, id=lid)
self.Bind(wx.EVT_MENU, self.onRight, id=rid)
self.accel_tbl = wx.AcceleratorTable([
(wx.ACCEL_CTRL, ord('C'), cid),
(wx.ACCEL_NORMAL, wx.WXK_LEFT, lid),
(wx.ACCEL_NORMAL, wx.WXK_RIGHT, rid),
])
self.SetAcceleratorTable(self.accel_tbl)
# Handle events for mouse clicks
self.Bind(wx.EVT_LEFT_DOWN, self.onLeft)
self.Bind(wx.EVT_RIGHT_DOWN, self.onRight)
# Connection
self.connection = None
# Sensors
self.istart = 0
self.sensors = []
# Port
self.port = None
# List to hold children widgets
self.boxes = []
self.texts = []
def setConnection(self, connection):
self.connection = connection
def setSensors(self, sensors):
self.sensors = sensors
def setPort(self, port):
self.port = port
def getSensorsToDisplay(self, istart):
"""
Get at most 1 sensor to be displayed on screen.
"""
sensors_display = []
if istart<len(self.sensors):
iend = istart + 1
sensors_display = self.sensors[istart:iend]
return sensors_display
def ShowSensors(self):
"""
Display the sensors.
"""
sensors = self.getSensorsToDisplay(self.istart)
# Destroy previous widgets
for b in self.boxes: b.Destroy()
for t in self.texts: t.Destroy()
self.boxes = []
self.texts = []
# Main sizer
boxSizerMain = wx.BoxSizer(wx.VERTICAL)
# Grid sizer
nrows, ncols = 1, 1
vgap, hgap = 50, 50
gridSizer = wx.GridSizer(nrows, ncols, vgap, hgap)
# Create a box for each sensor
for index, sensor in sensors:
(name, value, unit) = self.port.sensor(index)
box = OBDStaticBox(self, wx.ID_ANY)
self.boxes.append(box)
boxSizer = wx.StaticBoxSizer(box, wx.VERTICAL)
# Text for sensor value
if type(value)==float:
value = str("%.2f"%round(value, 3))
t1 = wx.StaticText(parent=self, label=str(value), style=wx.ALIGN_CENTER)
t1.SetForegroundColour('WHITE')
font1 = wx.Font(30, wx.ROMAN, wx.NORMAL, wx.NORMAL, faceName="Monaco")
t1.SetFont(font1)
boxSizer.Add(t1, 0, wx.ALIGN_CENTER | wx.ALL, 70)
boxSizer.AddStretchSpacer()
self.texts.append(t1)
# Text for sensor name
t2 = wx.StaticText(parent=self, label=name, style=wx.ALIGN_CENTER)
t2.SetForegroundColour('WHITE')
font2 = wx.Font(10, wx.ROMAN, wx.NORMAL, wx.BOLD, faceName="Monaco")
t2.SetFont(font2)
boxSizer.Add(t2, 0, wx.ALIGN_CENTER | wx.ALL, 45)
self.texts.append(t2)
gridSizer.Add(boxSizer, 1, wx.EXPAND | wx.ALL)
# Add invisible boxes if necessary
nsensors = len(sensors)
for i in range(1-nsensors):
box = OBDStaticBox(self)
boxSizer = wx.StaticBoxSizer(box, wx.VERTICAL)
self.boxes.append(box)
box.Show(False)
gridSizer.Add(boxSizer, 1, wx.EXPAND | wx.ALL)
# Layout
boxSizerMain.Add(gridSizer, 1, wx.EXPAND | wx.ALL, 0)
self.SetSizer(boxSizerMain)
self.Refresh()
self.Layout()
# Timer for update
self.timer = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.refresh, self.timer)
self.timer.Start(1500)
def refresh(self, event):
sensors = self.getSensorsToDisplay(self.istart)
itext = 0
for index, sensor in sensors:
(name, value, unit) = self.port.sensor(index)
if type(value)==float:
value = str("%.2f"%round(value, 3))
if itext<len(self.texts):
self.texts[itext*2].SetLabel(str(value))
itext += 1
def onCtrlC(self, event):
self.GetParent().Close()
def onLeft(self, event):
"""
Get data from 1 previous sensor in the list.
"""
istart = self.istart + 1
if istart<len(self.sensors):
self.istart = istart
self.ShowSensors()
else:
istart = self.istart - 31
self.istart = istart
self.ShowSensors()
def onRight(self, event):
"""
Get data from 1 next sensor in the list.
"""
istart = self.istart + 1
if istart<len(self.sensors):
self.istart = istart
self.ShowSensors()
else:
istart = self.istart - 31
self.istart = istart
self.ShowSensors()
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDLoadingPanel(wx.Panel):
"""
Main panel for OBD application.
Show loading screen. Handle event from mouse/keyboard.
"""
def __init__(self, *args, **kwargs):
"""
Constructor.
"""
super(OBDLoadingPanel, self).__init__(*args, **kwargs)
# Background image
image = wx.Image(BACKGROUND_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
self.bitmap = wx.BitmapFromImage(image)
self.Bind(wx.EVT_PAINT, self.OnPaint)
# Logo
bitmap = wx.Bitmap(LOGO_FILENAME)
width, height = bitmap.GetSize()
image = wx.ImageFromBitmap(bitmap)
image = image.Scale(width/12, height/12, wx.IMAGE_QUALITY_HIGH)
bitmap = wx.BitmapFromImage(image)
control = wx.StaticBitmap(self, wx.ID_ANY, bitmap)
control.SetPosition((2, 2))
# Create an accelerator table
cid = wx.NewId()
self.Bind(wx.EVT_MENU, self.onCtrlC, id=cid)
self.accel_tbl = wx.AcceleratorTable([
(wx.ACCEL_CTRL, ord('C'), cid),
])
self.SetAcceleratorTable(self.accel_tbl)
# Connection
self.c = None
# Sensors list
self.sensors = []
# Port
self.port = None
def getConnection(self):
return self.c
def showLoadingScreen(self):
"""
Display the loading screen.
"""
boxSizer = wx.BoxSizer(wx.VERTICAL)
self.textCtrl = OBDText(self)
boxSizer.Add(self.textCtrl, 1, wx.EXPAND | wx.ALL, 40)
self.SetSizer(boxSizer)
font3 = wx.Font(10, wx.ROMAN, wx.NORMAL, wx.NORMAL, faceName="Monaco")
self.textCtrl.SetFont(font3)
self.textCtrl.AddText(" Opening interface (serial port)\n")
self.textCtrl.AddText(" Trying to connect...\n")
self.timer0 = wx.Timer(self)
self.Bind(wx.EVT_TIMER, self.connect, self.timer0)
self.timer0.Start(1000)
def connect(self, event):
if self.timer0:
self.timer0.Stop()
# Connection
self.c = OBDConnection()
self.c.connect()
connected = False
while not connected:
connected = self.c.is_connected()
self.textCtrl.Clear()
self.textCtrl.AddText(" Trying to connect ..." + time.asctime())
if connected:
break
if not connected:
self.textCtrl.AddText(" Not connected\n")
return False
else:
self.textCtrl.Clear()
#self.textCtrl.AddText(" Connected\n")
port_name = self.c.get_port_name()
if port_name:
self.textCtrl.AddText(" Failed Connection: " + port_name +"\n")
self.textCtrl.AddText(" Please hold alt & esc to view terminal.")
self.textCtrl.AddText(str(self.c.get_output()))
self.sensors = self.c.get_sensors()
self.port = self.c.get_port()
self.GetParent().update(None)
def getSensors(self):
return self.sensors
def getPort(self):
return self.port
def onCtrlC(self, event):
self.GetParent().Close()
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDFrame(wx.Frame):
"""
OBD frame.
"""
def __init__(self):
"""
Constructor.
"""
wx.Frame.__init__(self, None, wx.ID_ANY, "OBD-Pi")
image = wx.Image(BACKGROUND_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
self.bitmap = wx.BitmapFromImage(image)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.panelLoading = OBDLoadingPanel(self)
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.sizer.Add(self.panelLoading, 1, wx.EXPAND)
self.SetSizer(self.sizer)
self.panelLoading.showLoadingScreen()
self.panelLoading.SetFocus()
def update(self, event):
if self.panelLoading:
connection = self.panelLoading.getConnection()
sensors = self.panelLoading.getSensors()
port = self.panelLoading.getPort()
self.panelLoading.Destroy()
self.panelGauges = OBDPanelGauges(self)
if connection:
self.panelGauges.setConnection(connection)
if sensors:
self.panelGauges.setSensors(sensors)
self.panelGauges.setPort(port)
self.sizer = wx.BoxSizer(wx.VERTICAL)
self.sizer.Add(self.panelGauges, 1, wx.EXPAND)
self.SetSizer(self.sizer)
self.panelGauges.ShowSensors()
self.panelGauges.SetFocus()
self.Layout()
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDFrame0(wx.Frame):
"""
OBD starting frame. Used only for full screen purpose at startup.
"""
def __init__(self):
"""
Constructor.
"""
wx.Frame.__init__(self, None, wx.ID_ANY, "")
image = wx.Image(BACKGROUND_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
self.bitmap = wx.BitmapFromImage(image)
self.Bind(wx.EVT_PAINT, self.OnPaint)
def OnPaint(self, event):
self.Paint(wx.PaintDC(self))
def Paint(self, dc):
dc.DrawBitmap(self.bitmap, 0, 0)
#-------------------------------------------------------------------------------
class OBDSplashScreen(wx.SplashScreen):
"""
Splash screen.
"""
def __init__(self, parent=None, frame0=None):
"""
Constructor.
"""
self.frame0 = frame0
image = wx.Image(SPLASHSCREEN_FILENAME)
width, height = wx.GetDisplaySize()
image = image.Scale(width, height, wx.IMAGE_QUALITY_HIGH)
bitmap = wx.BitmapFromImage(image)
splashStyle = wx.SPLASH_CENTRE_ON_SCREEN | wx.SPLASH_TIMEOUT
splashDuration = SPLASHSCREEN_TIMEOUT
wx.SplashScreen.__init__(self, bitmap, splashStyle, splashDuration, parent)
self.Bind(wx.EVT_CLOSE, self.OnExit)
wx.Yield()
def OnExit(self, evt):
"""
Exit splash screen and pass over other to main OBD frame.
"""
# Main frame
frame = OBDFrame()
app.SetTopWindow(frame)
frame.ShowFullScreen(True)
frame.Show(True)
# Delete frame0
if self.frame0:
self.frame0.Destroy()
del self.frame0
evt.Skip()
#-------------------------------------------------------------------------------
class OBDApp(wx.App):
"""
OBD Application.
"""
def __init__(self, redirect=False, filename=None, useBestVisual=False, clearSigInt=True):
"""
Constructor.
"""
wx.App.__init__(self, redirect, filename, useBestVisual, clearSigInt)
def OnInit(self):
"""
Initializer.
"""
# Main frame
frame = OBDFrame()
self.SetTopWindow(frame)
frame.ShowFullScreen(True)
frame.Show(True)
#frame.showLoadingPanel()
# This frame is used only to set the full screen mode
# for the splash screen display and for transition with
# the loading screen.
# This frame is not shown and will be deleted later on.
#frame0 = OBDFrame0()
#self.SetTopWindow(frame0)
#frame0.ShowFullScreen(True)
#self.SetTopWindow(frame0)
# Splash screen
#splash = OBDSplashScreen(frame0, frame0)
#self.SetTopWindow(splash)
#splash.Show(True)
#splash.ShowFullScreen(True)
return True
def FilterEvent(self, event):
if event.GetEventType == wx.KeyEvent:
pass
#-------------------------------------------------------------------------------
app = OBDApp(False)
app.MainLoop()
#-------------------------------------------------------------------------------
| gpl-2.0 | -4,817,368,613,607,881,000 | 27.863105 | 93 | 0.515588 | false |
networkdynamics/zenlib | src/zen/tests/modularity.py | 1 | 5825 | import unittest
from zen import *
class TestSynonymModularity(unittest.TestCase):
def test_small_graph_modularity(self):
graph=Graph()
graph.add_node('1')
graph.add_node('2')
graph.add_node('3')
graph.add_node('4')
graph.add_node('5')
graph.add_node('6')
graph.add_edge('1','2')
graph.add_edge('3','2')
graph.add_edge('1','3')
graph.add_edge('4','5')
graph.add_edge('4','6')
graph.add_edge('5','6')
graph.add_edge('2','5')
community_assignment={0:['1','2','3'],1:['4','5','6']}
expected_result=10.0/28 # hand calculated
#print 'modularity: ' + str(modularity.modularity(graph, community_assignment))
self.assertAlmostEqual(modularity(graph, community_assignment), expected_result)
graph.add_node('7')
graph.add_node('8')
graph.add_node('9')
graph.add_edge('7','8')
graph.add_edge('8','9')
graph.add_edge('9','7')
graph.add_edge('8','5')
graph.add_edge('8','2')
expected_result=5.0/12 # hand calculated
community_assignment={0:['1','2','3'],1:['4','5','6'],2:['7','8','9']}
#print 'modularity: ' + str(modularity.modularity(graph, community_assignment))
self.assertAlmostEqual(modularity(graph, community_assignment), expected_result)
#community_assignment={0:['1','3'],1:['4','6'],2:['7','9'],3:['2','5','8']}
#print 'modularity: ' + str(modularity.modularity(graph, community_assignment))
'''
def test_words(self):
# Create the graph first so we can use it for the tests
# Using adjacency lists to store synonyms of a word
synonyms = []
synonyms.append(['free', 'complimentary', 'at liberty', 'costless', 'gratis', 'unpaid', 'liberated', 'unrestricted', 'unlimited', 'open', 'familiar', 'permitted', 'independent', 'idle'])
synonyms.append(['complimentary', 'costless', 'flattering', 'polite', 'gratis'])
synonyms.append(['gratis', 'free', 'costless', 'donated'])
synonyms.append(['unpaid', 'donated', 'volunteer'])
synonyms.append(['unrestricted', 'unlimited', 'at liberty', 'open', 'liberated'])
synonyms.append(['unlimited', 'absolute', 'endless', 'boundless', 'countless', 'total', 'vast', 'infinite'])
synonyms.append(['open', 'accessible', 'clear', 'spacious', 'wide', 'available', 'permitted', 'overt', 'plain', 'frank', 'undecided'])
synonyms.append(['familiar', 'frequent', 'usual', 'simple'])
synonyms.append(['permitted', 'allowed', 'acceptable', 'approved', 'tolerated'])
synonyms.append(['independent', 'liberated', 'self-reliant', 'separate', 'sovereign'])
synonyms.append(['idle', 'abandoned', 'dead', 'empty', 'untouched', 'lazy', 'pointless', 'resting', 'slothful'])
synonyms.append(['wide', 'spacious', 'expansive', 'roomy'])
synonyms.append(['infinite', 'absolute', 'endless', 'eternal', 'boundless', 'vast', 'wide', 'limitless', 'immense', 'total', 'untold'])
synonyms.append(['lazy', 'slothful'])
synonyms.append(['absolute', 'complete', 'free', 'supreme', 'unlimited', 'sovereign', 'certain', 'infallible', 'exact', 'precise'])
synonyms.append(['immense', 'unlimited', 'vast', 'limitless', 'endless', 'boundless', 'wide'])
synonyms.append(['limitless', 'vast', 'unlimited', 'endless', 'boundless'])
synonyms.append(['endless', 'boundless'])
#synonyms.append(['idle', 'abandoned', 'dead', 'empty', 'untouched', 'lazy', 'pointless', 'resting', 'slothful'])
# Now make it into a graph
graph = Graph()
for adj_list in synonyms:
# The node is always the first element
node = adj_list[0]
for i in range(1, len(adj_list)):
other_node = adj_list[i]
# Make sure there isn't already an edge
if graph.has_edge(node, other_node):
#print "already exists an edge between " + node + " and " + other_node
pass
else:
# print "adding edge between " + node + ' and ' + other_node
graph.add_edge(node, other_node)
self.graph = graph
#print graph.num_nodes
#print graph.num_edges
# Test a group of very similar words:
similar_words = dict({0:['vast', 'infinite', 'unlimited', 'endless', 'immense', 'limitless', 'boundless']})
# print str(similar_words) + ' has modularity: ' + str(modularity.modularity(graph, similar_words))
# Calculated by hand (+wolfram)
expected_result = float(321) / float(4418)
#self.assertAlmostEqual(modularity.modularity(graph, similar_words), expected_result)
# Test a group of dissimilar words:
dissimilar_words = {1:['open', 'lazy', 'wide', 'gratis', 'separate']}
# print str(dissimilar_words) + ' has modularity: ' + str(modularity.modularity(graph, dissimilar_words))
expected_result = float(-3) / float(4418)
self.assertAlmostEqual(modularity.modularity(graph, dissimilar_words), expected_result)
# Test a smaller group of similar words
less_similar_words = {2:['limitless', 'unlimited', 'endless', 'boundless']}
# print str(less_similar_words) + ' has modularity: ' + str(modularity.modularity(graph, less_similar_words))
expected_result = float(867) / float(35344)
self.assertAlmostEqual(modularity.modularity(graph, less_similar_words), expected_result)
#community_assignment={0:[ 'frequent', 'usual', 'simple'],1:['free', 'complimentary', 'at liberty', 'costless', 'gratis', 'unpaid', 'liberated', 'unrestricted', 'unlimited', 'open', 'familiar', 'permitted', 'independent', 'idle']}
#print 'modularity: ' + str(modularity.modularity(graph, community_assignment))
'''
| bsd-3-clause | 4,426,548,635,912,823,000 | 47.541667 | 238 | 0.607382 | false |
skosukhin/spack | var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py | 1 | 1662 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class OptionalDepTest3(Package):
"""Depends on the optional-dep-test package"""
homepage = "http://www.example.com"
url = "http://www.example.com/optional-dep-test-3-1.0.tar.gz"
version('1.0', '0123456789abcdef0123456789abcdef')
variant('var', default=False)
depends_on('a', when='~var')
depends_on('b', when='+var')
def install(self, spec, prefix):
pass
| lgpl-2.1 | -7,378,908,064,907,632,000 | 38.571429 | 78 | 0.661853 | false |
enritoomey/DiagramaDeRafagasyManiobras | calculos.py | 1 | 15303 | # TODO: turn this into a set of functions
import numpy as np
import matplotlib.pyplot as plt
from scipy.optimize import fsolve
def plot_diagrama_de_rafagas(ax, Vb, Vc, Vd, n_25fts, n_50fts, n_60fts, dv, units, vel_label):
ax.plot(np.arange(0, Vb[units], dv), [1 + n_60fts(vel) for vel in np.arange(0, Vb[units], dv)], color='r')
ax.plot(np.arange(0, Vb[units], dv), [1 - n_60fts(vel) for vel in np.arange(0, Vb[units], dv)], color='r')
ax.plot(np.arange(0, Vc[units], dv), [1 + n_50fts(vel) for vel in np.arange(0, Vc[units], dv)], color='b')
ax.plot(np.arange(0, Vc[units], dv), [1 - n_50fts(vel) for vel in np.arange(0, Vc[units], dv)], color='b')
ax.plot(np.arange(0, Vd[units], dv), [1 + n_25fts(vel) for vel in np.arange(0, Vd[units], dv)], color='g')
ax.plot(np.arange(0, Vd[units], dv), [1 - n_25fts(vel) for vel in np.arange(0, Vd[units], dv)], color='g')
ax.plot([Vb[units], Vc[units]], [1 + n_60fts(Vb[units]), 1 + n_50fts(Vc[units])], color='m')
ax.plot([Vb[units], Vc[units]], [1 - n_60fts(Vb[units]), 1 - n_50fts(Vc[units])], color='m')
ax.plot([Vc[units], Vd[units]], [1 + n_50fts(Vc[units]), 1 + n_25fts(Vd[units])], color='m')
ax.plot([Vc[units], Vd[units]], [1 - n_50fts(Vc[units]), 1 - n_25fts(Vd[units])], color='m')
ax.plot([Vd[units], Vd[units]], [1 + n_25fts(Vd[units]), 1 - n_25fts(Vd[units])], color='m')
ax.set_xlabel("Speed [{}]".format(vel_label[units]))
ax.set_ylabel("n")
ax.set_title("Gust Diagram")
def plot_diagrama_de_maniobras(ax, n_stall_pos, n_stall_neg, n_max, Vs1, Vs0, Va, dv, units, vel_label):
ax.plot(np.arange(0, Vs1[units], dv), [n_stall_pos(vel) for vel in np.arange(0, Vs1[units], dv)], color='m',
linestyle='--')
ax.plot([Vs1[units], Vs1[units]], [0, n_stall_pos(Vs1[units])], color='m')
ax.plot(np.arange(Vs1[units], Va[units], dv), [n_stall_pos(vel) for vel in np.arange(Vs1[units], Va[units], dv)],
color='m', linestyle='-')
ax.plot(np.arange(0, Vs0[units] + dv, dv), [n_stall_neg(vel) for vel in np.arange(0, Vs0[units] + dv, dv)],
color='m', linestyle='--')
ax.plot([Vs0[units], Vs0[units]], [0, -1.0], color='m')
ax.plot([Vs1[units], Vs0[units]], [0.0, 0.0], color='m')
ax.plot([Va[units], Vd[units]], [n_max, n_max], color='m')
ax.plot([Vd[units], Vd[units]], [n_max, 0], color='m')
ax.plot([Vs0[units], Vc[units]], [-1.0, -1.0], color='m')
ax.plot([Vc[units], Vd[units]], [-1.0, 0.0], color='m')
ax.set_xlabel("Speed [{}]".format(vel_label[units]))
ax.set_ylabel("n")
ax.set_title("Manoeuvre Diagram")
def plot_diagrama_de_maniobras_con_flap(ax, n_stall_flap, Vsf, Vf_n2, Vf, dv, units, vel_label):
ax.plot(np.arange(0, Vsf[units] + dv, dv), [n_stall_flap(vel) for vel in np.arange(0, Vsf[units] + dv, dv)],
color='b', linestyle='--')
ax.plot(np.arange(Vsf[units], Vf_n2 + dv, dv), [n_stall_flap(vel) for vel in np.arange(Vsf[units], Vf_n2 + dv, dv)],
color='b', linestyle='-')
ax.plot([Vsf[units], Vsf[units]], [0.0, n_stall_flap(Vsf[units])], color='b', linestyle='-')
ax.plot([Vf_n2, Vf[units]], [2.0, 2.0], color='b', linestyle='-')
ax.plot([Vf[units], Vf[units]], [0.0, 2.0], color='b', linestyle='-')
ax.plot([Vsf[units], Vf[units]], [0.0, 0.0], color='b', linestyle='-')
ax.set_xlabel("Speed [{}]".format(vel_label[units]))
ax.set_ylabel("n")
ax.set_title("Manoeuvre Diagram")
def plot_diagrama_de_maniobras_y_rafagas(ax, n_stall_pos, n_stall_neg, n_gust_pos, n_gust_neg, n_manoeuvre_pos,
n_manoeuvre_neg, v_intersec_pos, v_intersec_neg, Vd, dv, units, vel_label):
ax.fill_between(np.arange(0, v_intersec_pos, dv), 0, [n_stall_pos(vel) for vel in np.arange(0, v_intersec_pos, dv)],
color='m', alpha=0.2)
ax.fill_between(np.arange(v_intersec_pos, Vd[units], dv), 0, [max(n_gust_pos(vel), n_manoeuvre_pos(vel))
for vel in np.arange(v_intersec_pos, Vd[units], dv)],
color='m', alpha=0.2)
ax.fill_between(np.arange(0, v_intersec_neg, dv), 0, [n_stall_neg(vel) for vel in np.arange(0, v_intersec_neg, dv)],
color='m', alpha=0.2)
ax.fill_between(np.arange(v_intersec_neg, Vd[units], dv), 0, [min(n_gust_neg(vel), n_manoeuvre_neg(vel))
for vel in np.arange(v_intersec_neg, Vd[units], dv)],
color='m', alpha=0.2)
ax.fill_between([Vd[units], Vd[units]], 0, [max(n_manoeuvre_pos(Vd[units]), n_gust_pos(Vd[units])),
min(n_manoeuvre_neg(Vd[units]), n_gust_neg(Vd[units]))], color='m',
alpha=0.2)
ax.set_xlabel("Speed [{}]".format(vel_label[units]))
ax.set_ylabel("n")
ax.set_title("Combined Gust & Manoeuvre Diagram")
#import ipdb
if __name__ == '__main__':
# Units
units = 'IM' # 'IM'
ft2m = 0.3048
lb2kg = 0.453592
slugcuft2kgm3 = 515.379
vel_label = {'IM': 'ft/s', 'SI': 'm/s'}
# Input Data:
CAM = {'SI': 2.461}
CAM['IM'] = CAM['SI']/ft2m
sw = {'SI': 60}
sw['IM'] = sw['SI']/ft2m/ft2m
a3D = 5.0037 #1/rad
MTOW = {'SI': 23000}
MTOW['IM'] = MTOW['SI']/lb2kg
MLW = {'SI': 23000}
MLW['IM'] = MLW['SI']/lb2kg
W0 = {'SI': 13766.0}
W0['IM'] = W0['SI']/lb2kg
MZFW = {'SI': 16376.0}
MZFW['IM'] = MZFW['SI']/lb2kg
Vc = {'SI': 151.93}
Vc['IM'] = Vc['SI']/ft2m
clmax = 1.2463
clmax_flap = 1.499
clmin = -0.75*clmax
Zmo = {'SI': 9999.2}
Zmo['IM'] = Zmo['SI']/ft2m
# Variables
W = {'SI': 20000}
W['IM'] = W['SI']/lb2kg
h = {'SI': 5000}
h['IM'] = h['SI']/ft2m
den = {'SI': 0.125}
den['IM'] = den['SI']/lb2kg*ft2m**3
# constantes
cte_fgz = {'IM': 250000}
cte_fgz['SI'] = cte_fgz['IM']*ft2m
s = {'IM': 100.015}
s['SI'] = s['IM']*ft2m
gravedad = {'SI': 9.81}
gravedad['IM'] = gravedad['SI']*ft2m/lb2kg
cte_nmax_1 = {'IM': 24000}
cte_nmax_1['SI'] = cte_nmax_1['IM']*lb2kg
cte_nmax_2 = {'IM': 10000}
cte_nmax_2['SI'] = cte_nmax_1['IM']*lb2kg
# Constants depending from input data
carga_alar = {}
H = {}
Vs1 = {}
Vs0 = {}
Vsf = {}
Vd = {}
Va = {}
Vf = {}
Vb = {}
Uref = {}
Uds = U = {}
Ude_25fts = {}
Ude_50fts = {}
Ude_60fts = {}
carga_alar[units] = W[units] / sw[units]
mu_g = 2 * carga_alar[units] / (den[units] * CAM[units] * a3D)#*gravedad[units])
Kg = 0.88 * (mu_g / (5.3 + mu_g))
Vs1[units] = np.sqrt(carga_alar[units] / (0.5 * den[units] * clmax))
Vs0[units] = np.sqrt(-carga_alar[units] / (0.5 * den[units] * clmin))
Vsf[units] = np.sqrt(carga_alar[units] / (0.5 * den[units] * clmax_flap))
# Calculo de n_max
n_max = 2.1 + cte_nmax_1[units] / (MTOW[units] + cte_nmax_2[units])
if n_max < 2.5:
n_max = 2.5
elif n_max > 3.8:
n_max = 3.8
Va[units] = Vs1[units] * np.sqrt(n_max)
if Va[units] > Vc[units]:
Va[units] = Vc[units]
Vd[units] = Vc[units] / 0.85
Vf[units] = max(Vs1[units] * 1.6, Vsf[units] * 1.8)
cte_Uref_h1 = {'IM': 15000}
cte_Uref_h1['SI'] = cte_Uref_h1['IM'] * ft2m
cte_Uref_h2 = {'IM': 50000}
cte_Uref_h2['SI'] = cte_Uref_h2['IM'] * ft2m
cte_Uref_v1 = {'IM': 56}
cte_Uref_v1['SI'] = cte_Uref_v1['IM'] * ft2m
cte_Uref_v2 = {'IM': 56}
cte_Uref_v2['SI'] = cte_Uref_v2['IM'] * ft2m
cte_Uref_v3 = {'IM': 26}
cte_Uref_v3['SI'] = cte_Uref_v3['IM'] * ft2m
#ipdb.set_trace()
if h[units] < cte_Uref_h1[units]:
Uref[units] = cte_Uref_v1[units] - 12.0 * h[units] / cte_Uref_h1[units]
elif h[units] < cte_Uref_h2[units]:
Uref[units] = cte_Uref_v2[units] - 18.0 * (h[units] - cte_Uref_h1[units]) /\
(cte_Uref_h2[units] - cte_Uref_h1[units])
else:
Uref[units] = cte_Uref_v3[units]
# Esta constante esta porque hay que usar la pendiente a_cn = dCn/dalpha, y no a_cl = dCl/dalpha, pero no se de donde sale el valor
ad_CN = 0.59248
cte_Vb = {'IM': 498.0}# lb/s**2
cte_Vb['SI'] = cte_Vb['IM'] * ft2m**4 / lb2kg
Vb[units] = min(Vc[units], Vs1[units] * np.sqrt(1 + Kg * Uref[units] * Vc[units] * a3D * ad_CN /
(cte_Vb[units] * carga_alar[units])))
# Velocidad de rafadas
cte_Ude_h1 = {'IM': 20000}
cte_Ude_h1['SI'] = cte_Ude_h1['IM'] * ft2m
cte_Ude_h2 = {'IM': 50000}
cte_Ude_h2['SI'] = cte_Ude_h2['IM'] * ft2m
cte_25fts_v1 = {'IM': 25}
cte_25fts_v1['SI'] = cte_25fts_v1['IM'] * ft2m
cte_25fts_v2 = {'IM': 33.34}
cte_25fts_v2['SI'] = cte_25fts_v2['IM'] * ft2m
cte_25fts_m2 = 0.000417
cte_25fts_v3 = {'IM': 12.5}
cte_25fts_v3['SI'] = cte_25fts_v3['IM'] * ft2m
cte_50fts_v1 = {'IM': 50}
cte_50fts_v1['SI'] = cte_50fts_v1['IM'] * ft2m
cte_50fts_v2 = {'IM': 66.77}
cte_50fts_v2['SI'] = cte_50fts_v2['IM'] * ft2m
cte_50fts_m2 = 0.0008933
cte_50fts_v3 = {'IM': 25}
cte_50fts_v3['SI'] = cte_50fts_v3['IM'] * ft2m
cte_60fts_v1 = {'IM': 60}
cte_60fts_v1['SI'] = cte_60fts_v1['IM'] * ft2m
cte_60fts_v2 = {'IM': 60}
cte_60fts_v2['SI'] = cte_60fts_v2['IM'] * ft2m
cte_60fts_m2 = {'IM': 18}
cte_60fts_m2['SI'] = cte_60fts_m2['IM'] * ft2m
cte_60fts_v3 = {'IM': 38}
cte_60fts_v3['SI'] = cte_60fts_v3['IM'] * ft2m
if h[units] < cte_Ude_h1[units]:
Ude_25fts[units] = cte_25fts_v1[units]
Ude_50fts[units] = cte_50fts_v1[units]
Ude_60fts[units] = cte_60fts_v1[units]
elif h[units] < cte_Ude_h2[units]:
Ude_25fts[units] = cte_25fts_v2[units] - cte_25fts_m2 * h[units]
Ude_50fts[units] = cte_50fts_v2[units] - cte_50fts_m2 * h[units]
Ude_60fts[units] = cte_60fts_v2[units] - cte_60fts_m2[units] * (h[units] - cte_Ude_h1[units])\
/ (cte_Ude_h2[units] - cte_Ude_h2[units])
else:
Ude_25fts[units] = cte_25fts_v3[units]
Ude_50fts[units] = cte_50fts_v3[units]
Ude_60fts[units] = cte_60fts_v3[units]
def n_25fts(vel):
return fg * Ude_25fts[units] * a3D * ad_CN * vel / (cte_Vb[units] * carga_alar[units])
def n_50fts(vel):
return Kg * Ude_50fts[units] * a3D * ad_CN * vel / (cte_Vb[units] * carga_alar[units])
def n_60fts(vel):
return Kg * Ude_60fts[units] * a3D * ad_CN * vel / (cte_Vb[units] * carga_alar[units])
def n_gust_pos(vel):
if 0 <= vel <= Vb[units]:
return 1 + n_60fts(vel)
elif vel <= Vc[units]:
m = (n_50fts(Vc[units]) - n_60fts(Vb[units])) / (Vc[units] - Vb[units])
b = n_50fts(Vc[units]) - m * Vc[units]
return 1 + m*vel + b
elif vel <= Vd[units]:
m = (n_25fts(Vd[units]) - n_50fts(Vc[units])) / (Vd[units] - Vc[units])
b = n_25fts(Vd[units]) - m * Vd[units]
return 1 + m * vel + b
return None
def n_gust_neg(vel):
if 0 <= vel <= Vb[units]:
return 1 - n_60fts(vel)
elif vel <= Vc[units]:
m = (n_50fts(Vc[units]) - n_60fts(Vb[units])) / (Vc[units] - Vb[units])
b = n_50fts(Vc[units]) - m * Vc[units]
return 1 - m * vel + b
elif vel <= Vd[units]:
m = (n_25fts(Vd[units]) - n_50fts(Vc[units])) / (Vd[units] - Vc[units])
b = n_25fts(Vd[units]) - m * Vd[units]
return 1 - m * vel + b
return None
# Variables definidas pero no utilizadas
# H[units] = 12.5*CAM[units]
R1 = MLW[units] / MTOW[units]
R2 = MZFW[units] / MTOW[units]
fgm = np.sqrt(R2 * np.tan(np.pi * R1 / 4.0))
fgz = 1 - Zmo[units] / cte_fgz[units]
fg = 0.5*(fgz + fgm)
# cte_Uds = {'IM':350}
# cte_Uds['SI'] = cte_Uds['IM']*ft2m
# Uds[units] = Uref[units]*fg*(H[units]/cte_Uds[units])**(1.0/6.0)
# U[units] = 0.5*Uds[units]*(1-np.cos(np.pi*s[units]/H[units]))
print("Kg = {}, Vb = {}, Vc = {}, Vd = {}".format(Kg, Vb, Vc, Vd))
dv = 1.0
# Diagrama de Rafagas
fig, ax = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True, squeeze=True)
plot_diagrama_de_rafagas(ax, Vb, Vc, Vd, n_25fts, n_50fts, n_60fts, dv, units, vel_label)
plt.grid(True)
plt.show()
def n_stall_pos(vel):
return 0.5 * den[units] * vel**2 * sw[units] * clmax / W[units]
def n_stall_neg(vel):
return 0.5 * den[units] * vel**2 * sw[units] * clmin / W[units]
def n_stall_flap(vel):
return 0.5 * den[units] * vel**2 * sw[units] * clmax_flap / W[units]
def n_manoeuvre_pos(vel):
if 0 <= vel <= Va[units]:
return n_stall_pos(vel)
elif vel <= Vd[units]:
return n_max
return None
def n_manoeuvre_neg(vel):
if 0 <= vel <= Vs0[units]:
return n_stall_neg(vel)
elif vel <= Vc[units]:
return -1.0
elif vel <= Vd[units]:
return -1 + 1 / (Vd[units] - Vc[units]) * (vel - Vc[units])
return None
# Diagrama de maniobras:
fig, ax = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True, squeeze=True)
plot_diagrama_de_maniobras(ax, n_stall_pos, n_stall_neg, n_max, Vs1, Vs0, Va, dv)
#plt.show()
# Diagrama de maniobras c/flap:
#fig, ax = plt.subplots(nrows=1, ncols= 1, sharex=True, sharey=True, squeeze=True)
Vf_n2 = np.sqrt(2 * W[units] / (0.5 * den[units] * clmax_flap * sw[units]))
plot_diagrama_de_maniobras_con_flap(ax, n_stall_flap, Vsf, Vf_n2, Vf, dv, units, vel_label)
plt.grid(True)
plt.show()
# Calculo de las intersecciones:
if n_gust_pos(Va[units]) > n_max:
# extender stall hasta interseccion con gust y arrancar la comparacion desde ese punto
def func1(vel):
return n_gust_pos(vel) - n_stall_pos(vel)
v_intersec_pos = fsolve(func1, Va[units])[0]
else:
v_intersec_pos = Va[units]
if n_gust_pos(Vs0[units]) < -1.0:
# extender stall hasta interseccion con gust y arrancar la comparacion desde ese punto
def func2(vel):
return n_gust_neg(vel) - n_stall_neg(vel)
v_intersec_neg = fsolve(func2, Vs0[units])[0]
else:
v_intersec_neg = Vs0[units]
# Plot intersection
# fig = plt.figure(facecolor='white')
# axescolor = '#f6f6f6' # the axes background color
# ax = fig.add_axes([0, 1, 0, 1], axisbg=axescolor)
fig, ax = plt.subplots(nrows=1, ncols=1, sharex=True, sharey=True, squeeze=True)
plot_diagrama_de_maniobras_y_rafagas(ax, n_stall_pos, n_stall_neg, n_gust_pos, n_gust_neg, n_manoeuvre_pos,
n_manoeuvre_neg, v_intersec_pos, v_intersec_neg, Vd, dv, units, vel_label)
plt.grid(True)
plt.show()
| mit | 6,982,465,582,770,812,000 | 40.041209 | 135 | 0.52493 | false |
joshua-cogliati-inl/raven | framework/Metrics/Factory.py | 1 | 3069 | # Copyright 2017 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Created on Jul 18 2016
@author: mandd
"""
#for future compatibility with Python 3-----------------------------------------
from __future__ import division, print_function, unicode_literals, absolute_import
#End compatibility block for Python 3-------------------------------------------
################################################################################
from utils import utils
from .Metric import Metric
from .DTW import DTW
from .SklMetric import SKL
from .PairwiseMetric import PairwiseMetric
from .CDFAreaDifference import CDFAreaDifference
from .PDFCommonArea import PDFCommonArea
from .ScipyMetric import ScipyMetric
## [ Add new class here ]
################################################################################
## Alternatively, to fully automate this file:
# from OutStreamManagers import *
################################################################################
"""
Interface Dictionary (factory) (private)
"""
# This machinery will automatically populate the "knownTypes" given the
# imports defined above.
__base = 'Metric'
__interFaceDict = {}
for classObj in utils.getAllSubclasses(eval(__base)):
__interFaceDict[classObj.__name__] = classObj
def knownTypes():
"""
Returns a list of strings that define the types of instantiable objects for
this base factory.
@ In, None
@ Out, knownTypes, list, list of known types
"""
return __interFaceDict.keys()
def returnInstance(Type,caller):
"""
Attempts to create and return an instance of a particular type of object
available to this factory.
@ In, Type, string, string should be one of the knownTypes.
@ In, caller, instance, the object requesting the instance (used for error/debug messaging).
@ Out, returnInstance, instance, instance of OutStreamManager subclass, a subclass object constructed with no arguments
"""
try:
return __interFaceDict[Type]()
except KeyError:
caller.raiseAnError(NameError,__name__+': unknown '+__base+' type '+Type)
def returnClass(Type,caller):
"""
Attempts to return a particular class type available to this factory.
@ In, Type, string, string should be one of the knownTypes.
@ In, caller, instance, the object requesting the class (used for error/debug messaging).
@ Out, returnClass, class, reference to the subclass
"""
try:
return __interFaceDict[Type]
except KeyError:
caller.raiseAnError(NameError,__name__+': unknown '+__base+' type '+Type)
| apache-2.0 | -6,392,335,731,617,827,000 | 36.888889 | 123 | 0.659498 | false |
mrrichardchou/FAST_EVD | DataIO/ismrmd/doc/source/conf.py | 1 | 8582 | # -*- coding: utf-8 -*-
#
# ISMRMRD documentation build configuration file, created by
# sphinx-quickstart on Thu Nov 13 10:11:39 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
#import breathe
import sphinx_bootstrap_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
#'breathe'
]
#breathe_projects = { 'ISMRMRD': '/Users/naegelejd/src/github.com/ismrmrd/ismrmrd/build/doc/api/xml' }
#breathe_default_project = 'ISMRMRD'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'ISMRMRD'
copyright = u'2014, ISMRMRD Developers'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.1'
# The full version, including alpha/beta/rc tags.
release = '1.1.0'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'bootstrap'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {
'navbar_links': [
('API Reference', "api/index.html", True)
]
}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'ISMRMRDdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'ISMRMRD.tex', u'ISMRMRD Documentation',
u'ISMRMRD Developers', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'ismrmrd', u'ISMRMRD Documentation',
[u'ISMRMRD Developers'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'ISMRMRD', u'ISMRMRD Documentation',
u'ISMRMRD Developers', 'ISMRMRD', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| bsd-2-clause | 9,073,641,456,777,190,000 | 30.435897 | 102 | 0.706595 | false |
hpcuantwerpen/easybuild-easyblocks | easybuild/easyblocks/b/boost.py | 1 | 17638 | ##
# Copyright 2009-2021 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for Boost, implemented as an easyblock
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
@author: Ward Poelmans (Ghent University)
@author: Petar Forai (IMP/IMBA)
@author: Luca Marsella (CSCS)
@author: Guilherme Peretti-Pezzi (CSCS)
@author: Joachim Hein (Lund University)
@author: Michele Dolfi (ETH Zurich)
@author: Simon Branford (University of Birmingham)
"""
from distutils.version import LooseVersion
import fileinput
import glob
import os
import re
import sys
import easybuild.tools.toolchain as toolchain
from easybuild.framework.easyblock import EasyBlock
from easybuild.framework.easyconfig import CUSTOM
from easybuild.tools.build_log import EasyBuildError
from easybuild.tools.config import ERROR
from easybuild.tools.filetools import apply_regex_substitutions, copy, mkdir, symlink, which, write_file
from easybuild.tools.modules import get_software_root, get_software_version
from easybuild.tools.run import run_cmd
from easybuild.tools.systemtools import AARCH64, POWER, UNKNOWN
from easybuild.tools.systemtools import get_cpu_architecture, get_glibc_version, get_shared_lib_ext
class EB_Boost(EasyBlock):
"""Support for building Boost."""
def __init__(self, *args, **kwargs):
"""Initialize Boost-specific variables."""
super(EB_Boost, self).__init__(*args, **kwargs)
self.objdir = None
self.pyvers = []
if LooseVersion(self.version) >= LooseVersion("1.71.0"):
self.bjamcmd = 'b2'
else:
self.bjamcmd = 'bjam'
@staticmethod
def extra_options():
"""Add extra easyconfig parameters for Boost."""
extra_vars = {
'boost_mpi': [False, "Build mpi boost module", CUSTOM],
'boost_multi_thread': [None, "Build boost with multi-thread option (DEPRECATED)", CUSTOM],
'tagged_layout': [None, "Build with tagged layout on library names, default from version 1.69.0", CUSTOM],
'single_threaded': [None, "Also build single threaded libraries, requires tagged_layout, "
"default from version 1.69.0", CUSTOM],
'toolset': [None, "Toolset to use for Boost configuration ('--with-toolset' for bootstrap.sh)", CUSTOM],
'build_toolset': [None, "Toolset to use for Boost compilation "
"('toolset' for b2, default calculated from toolset)", CUSTOM],
'mpi_launcher': [None, "Launcher to use when running MPI regression tests", CUSTOM],
'only_python_bindings': [False, "Only install Boost.Python library providing Python bindings", CUSTOM],
'use_glibcxx11_abi': [None, "Use the GLIBCXX11 ABI", CUSTOM],
}
return EasyBlock.extra_options(extra_vars)
def patch_step(self):
"""Patch Boost source code before building."""
super(EB_Boost, self).patch_step()
# TIME_UTC is also defined in recent glibc versions, so we need to rename it for old Boost versions (<= 1.49)
glibc_version = get_glibc_version()
old_glibc = glibc_version is not UNKNOWN and LooseVersion(glibc_version) > LooseVersion("2.15")
if old_glibc and LooseVersion(self.version) <= LooseVersion("1.49.0"):
self.log.info("Patching because the glibc version is too new")
files_to_patch = ["boost/thread/xtime.hpp"] + glob.glob("libs/interprocess/test/*.hpp")
files_to_patch += glob.glob("libs/spirit/classic/test/*.cpp") + glob.glob("libs/spirit/classic/test/*.inl")
for patchfile in files_to_patch:
try:
for line in fileinput.input("%s" % patchfile, inplace=1, backup='.orig'):
line = re.sub(r"TIME_UTC", r"TIME_UTC_", line)
sys.stdout.write(line)
except IOError as err:
raise EasyBuildError("Failed to patch %s: %s", patchfile, err)
def prepare_step(self, *args, **kwargs):
"""Prepare build environment."""
super(EB_Boost, self).prepare_step(*args, **kwargs)
# keep track of Python version(s) used during installation,
# so we can perform a complete sanity check
if get_software_root('Python'):
self.pyvers.append(get_software_version('Python'))
def configure_step(self):
"""Configure Boost build using custom tools"""
# boost_multi_thread is deprecated
if self.cfg['boost_multi_thread'] is not None:
self.log.deprecated("boost_multi_thread has been replaced by tagged_layout. "
"We build with tagged layout and both single and multi threading libraries "
"from version 1.69.0.", '5.0')
self.cfg['tagged_layout'] = True
# mpi sanity check
if self.cfg['boost_mpi'] and not self.toolchain.options.get('usempi', None):
raise EasyBuildError("When enabling building boost_mpi, also enable the 'usempi' toolchain option.")
# create build directory (Boost doesn't like being built in source dir)
self.objdir = os.path.join(self.builddir, 'obj')
mkdir(self.objdir)
# generate config depending on compiler used
toolset = self.cfg['toolset']
if toolset is None:
if self.toolchain.comp_family() == toolchain.INTELCOMP:
toolset = 'intel-linux'
elif self.toolchain.comp_family() == toolchain.GCC:
toolset = 'gcc'
else:
raise EasyBuildError("Unknown compiler used, don't know what to specify to --with-toolset, aborting.")
cmd = "%s ./bootstrap.sh --with-toolset=%s --prefix=%s %s"
tup = (self.cfg['preconfigopts'], toolset, self.objdir, self.cfg['configopts'])
run_cmd(cmd % tup, log_all=True, simple=True)
# Use build_toolset if specified or the bootstrap toolset without the OS suffix
self.toolset = self.cfg['build_toolset'] or re.sub('-linux$', '', toolset)
user_config = []
# Explicitely set the compiler path to avoid B2 checking some standard paths like /opt
cxx = os.getenv('CXX')
if cxx:
cxx = which(cxx, on_error=ERROR)
# Remove default toolset config which may lead to duplicate toolsets (e.g. for intel-linux)
apply_regex_substitutions('project-config.jam', [('using %s ;' % toolset, '')])
# Add our toolset config with no version and full path to compiler
user_config.append("using %s : : %s ;" % (self.toolset, cxx))
if self.cfg['boost_mpi']:
# configure the boost mpi module
# http://www.boost.org/doc/libs/1_47_0/doc/html/mpi/getting_started.html
# let Boost.Build know to look here for the config file
# Check if using a Cray toolchain and configure MPI accordingly
if self.toolchain.toolchain_family() == toolchain.CRAYPE:
if self.toolchain.PRGENV_MODULE_NAME_SUFFIX == 'gnu':
craympichdir = os.getenv('CRAY_MPICH2_DIR')
craygccversion = os.getenv('GCC_VERSION')
# We configure the gcc toolchain below, so make sure the EC doesn't use another toolset
if self.toolset != 'gcc':
raise EasyBuildError("For the cray toolchain the 'gcc' toolset must be used.")
# Remove the previous "using gcc" line add above (via self.toolset) if present
user_config = [x for x in user_config if not x.startswith('using gcc :')]
user_config.extend([
'local CRAY_MPICH2_DIR = %s ;' % craympichdir,
'using gcc ',
': %s' % craygccversion,
': CC ',
': <compileflags>-I$(CRAY_MPICH2_DIR)/include ',
r' <linkflags>-L$(CRAY_MPICH2_DIR)/lib \ ',
'; ',
'using mpi ',
': CC ',
': <find-shared-library>mpich ',
': %s' % self.cfg['mpi_launcher'],
';',
'',
])
else:
raise EasyBuildError("Bailing out: only PrgEnv-gnu supported for now")
else:
user_config.append("using mpi : %s ;" % os.getenv("MPICXX"))
write_file('user-config.jam', '\n'.join(user_config), append=True)
def build_boost_variant(self, bjamoptions, paracmd):
"""Build Boost library with specified options for bjam."""
# build with specified options
cmd = "%s ./%s %s %s %s" % (self.cfg['prebuildopts'], self.bjamcmd, bjamoptions, paracmd, self.cfg['buildopts'])
run_cmd(cmd, log_all=True, simple=True)
# install built Boost library
cmd = "%s ./%s %s install %s %s" % (
self.cfg['preinstallopts'], self.bjamcmd, bjamoptions, paracmd, self.cfg['installopts'])
run_cmd(cmd, log_all=True, simple=True)
# clean up before proceeding with next build
run_cmd("./%s %s --clean-all" % (self.bjamcmd, bjamoptions), log_all=True, simple=True)
def build_step(self):
"""Build Boost with bjam tool."""
self.bjamoptions = " --prefix=%s --user-config=user-config.jam" % self.objdir
if 'toolset=' not in self.cfg['buildopts']:
self.bjamoptions += " toolset=" + self.toolset
cxxflags = os.getenv('CXXFLAGS')
# only disable -D_GLIBCXX_USE_CXX11_ABI if use_glibcxx11_abi was explicitly set to False
# None value is the default, which corresponds to default setting (=1 since GCC 5.x)
if self.cfg['use_glibcxx11_abi'] is not None:
cxxflags += ' -D_GLIBCXX_USE_CXX11_ABI='
if self.cfg['use_glibcxx11_abi']:
cxxflags += '1'
else:
cxxflags += '0'
if cxxflags is not None:
self.bjamoptions += " cxxflags='%s'" % cxxflags
ldflags = os.getenv('LDFLAGS')
if ldflags is not None:
self.bjamoptions += " linkflags='%s'" % ldflags
# specify path for bzip2/zlib if module is loaded
for lib in ["bzip2", "zlib"]:
libroot = get_software_root(lib)
if libroot:
self.bjamoptions += " -s%s_INCLUDE=%s/include" % (lib.upper(), libroot)
self.bjamoptions += " -s%s_LIBPATH=%s/lib" % (lib.upper(), libroot)
self.paracmd = ''
if self.cfg['parallel']:
self.paracmd = "-j %s" % self.cfg['parallel']
if self.cfg['only_python_bindings']:
# magic incantation to only install Boost Python bindings is... --with-python
# see http://boostorg.github.io/python/doc/html/building/installing_boost_python_on_your_.html
self.bjamoptions += " --with-python"
# Default threading since at least 1.47.0 is multi with system layout
threading = " threading=multi"
layout = " --layout=system"
if LooseVersion(self.version) >= LooseVersion("1.69.0"):
# As of 1.69.0 we build with layout tagged and both single and multi threading
# Linking default libraries to multi-threaded versions.
if self.cfg['tagged_layout'] is None:
self.cfg['tagged_layout'] = True
if self.cfg['single_threaded'] is None:
self.cfg['single_threaded'] = True
if self.cfg['tagged_layout']:
layout = " --layout=tagged"
if self.cfg['single_threaded']:
if not self.cfg['tagged_layout']:
raise EasyBuildError("Singled threaded build requires tagged layout.")
threading = " threading=single,multi"
self.bjamoptions += threading + layout
if self.cfg['boost_mpi']:
self.log.info("Building boost_mpi library")
mpi_bjamoptions = " --with-mpi"
self.build_boost_variant(self.bjamoptions + mpi_bjamoptions, self.paracmd)
self.log.info("Building Boost libraries")
# build with specified options
cmd = ' '.join([
self.cfg['prebuildopts'],
os.path.join('.', self.bjamcmd),
self.bjamoptions,
self.paracmd,
self.cfg['buildopts'],
])
run_cmd(cmd, log_all=True, simple=True)
def install_step(self):
"""Install Boost by copying files to install dir."""
# install boost libraries
self.log.info("Installing Boost libraries")
cmd = ' '.join([
self.cfg['preinstallopts'],
os.path.join('.', self.bjamcmd),
self.bjamoptions,
'install',
self.paracmd,
self.cfg['installopts'],
])
run_cmd(cmd, log_all=True, simple=True)
self.log.info("Copying %s to installation dir %s", self.objdir, self.installdir)
if self.cfg['only_python_bindings'] and 'Python' in self.cfg['multi_deps'] and self.iter_idx > 0:
self.log.info("Main installation should already exist, only copying over missing Python libraries.")
copy(glob.glob(os.path.join(self.objdir, 'lib', 'libboost_python*')), os.path.join(self.installdir, 'lib'),
symlinks=True)
else:
copy(glob.glob(os.path.join(self.objdir, '*')), self.installdir, symlinks=True)
if self.cfg['tagged_layout']:
if LooseVersion(self.version) >= LooseVersion("1.69.0") or not self.cfg['single_threaded']:
# Link tagged multi threaded libs as the default libs
lib_glob = 'lib*-mt*.*'
mt_replace = re.compile(r'-[^.]*\.')
for source_lib in glob.glob(os.path.join(self.installdir, 'lib', lib_glob)):
target_lib = mt_replace.sub('.', os.path.basename(source_lib))
symlink(os.path.basename(source_lib), os.path.join(self.installdir, 'lib', target_lib),
use_abspath_source=False)
def sanity_check_step(self):
"""Custom sanity check for Boost."""
shlib_ext = get_shared_lib_ext()
custom_paths = {
'files': [],
'dirs': ['include/boost']
}
if self.cfg['only_python_bindings']:
for pyver in self.pyvers:
pymajorver = pyver.split('.')[0]
pyminorver = pyver.split('.')[1]
if LooseVersion(self.version) >= LooseVersion("1.67.0"):
suffix = '%s%s' % (pymajorver, pyminorver)
elif int(pymajorver) >= 3:
suffix = pymajorver
else:
suffix = ''
custom_paths['files'].append(os.path.join('lib', 'libboost_python%s.%s' % (suffix, shlib_ext)))
else:
custom_paths['files'].append(os.path.join('lib', 'libboost_system.%s' % shlib_ext))
if self.cfg['tagged_layout']:
lib_mt_suffix = '-mt'
# MT libraries gained an extra suffix from v1.69.0 onwards
if LooseVersion(self.version) >= LooseVersion("1.69.0"):
if get_cpu_architecture() == AARCH64:
lib_mt_suffix += '-a64'
elif get_cpu_architecture() == POWER:
lib_mt_suffix += '-p64'
else:
lib_mt_suffix += '-x64'
custom_paths['files'].append(os.path.join('lib', 'libboost_thread%s.%s' % (lib_mt_suffix, shlib_ext)))
if self.cfg['boost_mpi']:
custom_paths['files'].append(os.path.join('lib', 'libboost_mpi.%s' % shlib_ext))
if self.cfg['tagged_layout']:
custom_paths['files'].append(os.path.join('lib', 'libboost_mpi%s.%s' % (lib_mt_suffix, shlib_ext)))
super(EB_Boost, self).sanity_check_step(custom_paths=custom_paths)
def make_module_extra(self):
"""Set up a BOOST_ROOT environment variable to e.g. ease Boost handling by cmake"""
txt = super(EB_Boost, self).make_module_extra()
if not self.cfg['only_python_bindings']:
txt += self.module_generator.set_environment('BOOST_ROOT', self.installdir)
return txt
| gpl-2.0 | 8,498,834,797,141,491,000 | 45.785146 | 120 | 0.589806 | false |
pacoqueen/ginn | ginn/formularios/partes_de_fabricacion_bolsas.py | 1 | 117840 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright (C) 2005-2020 Francisco José Rodríguez Bogado, #
# Diego Muñoz Escalante. #
# ([email protected], [email protected]) #
# #
# This file is part of GeotexInn. #
# #
# GeotexInn is free software; you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation; either version 2 of the License, or #
# (at your option) any later version. #
# #
# GeotexInn is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with GeotexInn; if not, write to the Free Software #
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA #
###############################################################################
"""
###################################################################
# # partes_de_fabricacion_bolsas.py - Partes embolsado fibra cem.
###################################################################
# NOTAS:
##
# ----------------------------------------------------------------
##
###################################################################
# Changelog:
# 5 de mayo de 2009 -> Inicio
##
###################################################################
# NOTAS:
##
###################################################################
"""
# import sys, os
# sys.stdout = open("salida_debug.txt", "a")
# pylint: disable=import-error,too-many-lines,wrong-import-position
from __future__ import print_function
import time # noqa
import datetime # noqa
import gtk # noqa
import pygtk # noqa
pygtk.require('2.0')
from ventana import Ventana # noqa
from formularios import utils # noqa
from formularios.reports import mandar_a_imprimir_con_ghostscript # noqa
from framework import pclases # noqa
import mx.DateTime # noqa
from informes import geninformes # noqa
# pylint: disable=redefined-builtin
from utils import _float as float # noqa
from ventana_progreso import VentanaActividad, VentanaProgreso # noqa
from partes_de_fabricacion_balas import verificar_solapamiento # noqa
from partes_de_fabricacion_balas import entran_en_turno # noqa
from partes_de_fabricacion_rollos import descontar_material_adicional # noqa
try:
from api import murano
MURANO = True
except ImportError:
MURANO = False
# pylint:disable=unused-argument
def copy2(entry1, evento, entry2, sumar=0):
"""
Simplemente copia el contenido del entry1 en el entry2.
Si sumar es algo distinto de 0 intenta convertir el contenido del entry 1
a entero y escribirlo en el segundo como cadena tras sumarle el número
en cuestión.
"""
# No es más que para evitarme escribir la fecha de fin en 2 de cada 3
# partes.
if not sumar:
entry2.set_text(entry1.get_text())
else:
# Y ahora la hora. Menos tecleo, más rapidez.
try:
num = int(entry1.get_text())
except (ValueError, TypeError):
entry2.set_text(entry1.get_text())
else:
entry2.set_text(str((num + sumar) % 24))
MEMENTO_MORI = {'tipo': None,
'que_imprimir': None} # Tipo de etiqueta y qué imprimir
# (caja, palé o palé + cajas) hasta que cierren la ventana o cambien de parte.
# pylint: disable=too-many-public-methods,too-many-instance-attributes
class PartesDeFabricacionBolsas(Ventana):
"""
Clase que encapsula la ventana de partes de producción de fibra embolsada.
"""
def __init__(self, objeto=None, usuario=None):
# pylint: disable=invalid-name,consider-using-ternary
self.SHOW_PALES_COMPLETOS = False # Si True meterá también cajas y
# bolsas en el treeview. Igual a mucho más lento.
self.NIVEL_POR_LOTES = 2 # Mínimo nivel (o máximo, según se vea)
# para poder crear palés por lote.
self.objeto = objeto
if not isinstance(usuario, pclases.Usuario):
try:
usuario = pclases.Usuario.selectBy(usuario=usuario)[0]
except IndexError:
usuario = None
self.usuario = usuario
self.producto = None # Producto relacionado con el parte.
self.__lecturaescritura = objeto and objeto.id or None
if usuario:
nombreventana = "partes_de_fabricacion_bolsas.py"
try:
ventana = pclases.Ventana.selectBy(fichero=nombreventana)[0]
self.__permisos = usuario.get_permiso(ventana)
except IndexError:
txt = "WARNING: partes_de_fabricacion_bolsas.py::__init__ -> "\
"No se pudieron determinar permisos de %s para la venta"\
"na %s." % (self.usuario.usuario, nombreventana)
print(txt)
self.logger.error(txt)
else:
self.__lecturaescritura = (self.__permisos.escritura
and self.objeto and self.objeto.id
or None)
else:
class FakePermisos:
# pylint: disable=too-few-public-methods
"""Clase para habilitar temporalmente permisos."""
def __init__(self):
self.nuevo = True
self.lectura = self.escritura = self.permiso = self.nuevo
self.__permisos = FakePermisos()
Ventana.__init__(self, 'partes_de_fabricacion_bolsas.glade', objeto,
usuario=usuario)
connections = {'b_salir/clicked': self._salir,
'ventana/delete_event': self._salir,
'b_add_empleado/clicked': self.add_empleado,
'b_drop_empleado/clicked': self.drop_empleado,
"b_partida/clicked": self.seleccionar_partida,
"b_producto/clicked": self.seleccionar_producto,
'b_actualizar/clicked': self.actualizar_ventana,
'b_guardar/clicked': self.guardar,
'b_nuevo/clicked': self.crear_nuevo_partedeproduccion,
'b_borrar/clicked': self.borrar_parte,
'b_buscar/clicked': self.buscar_partedeproduccion,
'ch_bloqueado/clicked': self.bloquear,
'b_add_consumo/clicked': self.add_consumo,
'b_add_bigbag/clicked': self.add_bigbag,
'b_drop_consumo/clicked': self.drop_consumo,
'b_add_incidencia/clicked': self.add_incidencia,
'b_drop_incidencia/clicked': self.drop_incidencia,
'b_add_pale/clicked': self.add_pale,
'b_drop_pale/clicked': self.drop_pale,
'b_etiquetar/clicked': self.etiquetar,
'b_next/clicked': self.siguiente,
'b_back/clicked': self.anterior
}
self.add_connections(connections)
self.wids['e_fechaini'].connect("key-release-event",
copy2,
self.wids['e_fechafin'])
self.wids['e_horaini'].connect("key-release-event",
copy2,
self.wids['e_horafin'],
8)
try:
linea = pclases.LineaDeProduccion.select(
pclases.LineaDeProduccion.q.nombre.contains(
'de embolsado'))[0]
except IndexError:
print("WARNING: La línea de embolsado no está correctamente "
"dada a de alta. La creo sobre la marcha.")
linea = pclases.LineaDeProduccion(
formulacion=None,
nombre="Línea de embolsado",
descripcion="Línea de embolsado de fibra de cemento.",
observaciones="Produce bolsas de fibra de cemento a partir"
" de bigbags fabricados en la línea de fibra.")
pclases.Auditoria.nuevo(linea, self.usuario, __file__)
self.linea = linea
self.formulacion = linea.formulacion
self.inicializar_ventana()
if self.objeto is None:
self.ir_a_primero()
else:
self.ir_a(objeto)
gtk.main()
def anterior(self, boton=None):
"""Va al parte de embolsado anterior al actual."""
if self.objeto:
anterior = self.objeto.anterior()
if anterior:
self.objeto = anterior
# Reinicio preferencias de etiqueta.
global MEMENTO_MORI # pylint:disable=global-statement
MEMENTO_MORI = {'que_imprimir': None, 'tipo': None}
self.actualizar_ventana()
else:
utils.dialogo_info(
titulo="NO MÁS PARTES",
texto="No hay partes de producción anteriores "
"al actual.",
padre=self.wids['ventana'])
def siguiente(self, boton=None):
"""Va al siguiente parte de producción."""
if self.objeto:
siguiente = self.objeto.siguiente()
if siguiente:
self.objeto = siguiente
# Reinicio preferencias de etiqueta.
global MEMENTO_MORI # pylint: disable=global-statement
MEMENTO_MORI = {'que_imprimir': None, 'tipo': None}
self.actualizar_ventana()
else:
utils.dialogo_info(
titulo="NO MÁS PARTES",
texto="No hay partes de producción posteriores "
"al actual.",
padre=self.wids['ventana'])
# --------------- Funciones auxiliares ------------------------------
def leer_valores_ventana(self):
"""
Devuelve un diccionario con los nombres de los campos del objeto
como claves y los valores de la ventana ya tratados como valores.
"""
res = {}
try:
fecha = utils.parse_fecha(self.wids['e_fechaini'].get_text())
except (TypeError, ValueError):
fecha = mx.DateTime.localtime()
# self.wids['e_fechaini'].set_text(utils.str_fecha(fecha))
# ¿Qué parte de LEER no entendiste? ¿Por qué cambias el entry?
res["fecha"] = fecha
try:
hora = utils.parse_hora(self.wids['e_horaini'].get_text())
except (TypeError, ValueError):
hora = mx.DateTime.DateTimeDelta(0.0)
res["horainicio"] = hora
try:
hora = utils.parse_hora(self.wids['e_horafin'].get_text())
except (TypeError, ValueError):
hora = mx.DateTime.DateTimeDelta(0.0)
res["horafin"] = hora
res["prodestandar"] = 0 # No se usa
res["merma"] = 0.0 # Tampoco se usa
res["bloqueado"] = self.wids['ch_bloqueado'].get_active()
buff = self.wids['txt_observaciones'].get_buffer()
txt = buff.get_text(buff.get_start_iter(), buff.get_end_iter())
res["observaciones"] = txt
res["fechahorainicio"] = res["fecha"] + res["horainicio"]
try:
fechafin = utils.parse_fecha(self.wids['e_fechafin'].get_text())
except (TypeError, ValueError):
fechafin = mx.DateTime.localtime()
# self.wids['e_fechafin'].set_text(utils.str_fecha(fechafin))
res["fechahorafin"] = fechafin + res["horafin"]
codpartida = self.wids['e_partida'].get_text()
try:
partida = pclases.PartidaCem.selectBy(codigo=codpartida)[0]
res["partidaCemID"] = partida.id
except IndexError:
print("partes_de_fabricacion_bolsas.py::leer_valores_ventana -> "
"No se encontró partida con código '%s'. Probablemente "
"no se haya terminado de cargar la ventana." % codpartida)
partida = None
res["partidaCemID"] = None
return res
def es_diferente(self):
"""
Devuelve True si la información en pantalla es distinta a la
del objeto en memoria.
"""
partedeproduccion = self.objeto
if partedeproduccion is None:
return False # Si no hay partedeproduccion activo, devuelvo
# que no hay cambio respecto a la ventana
condicion = True
valores = self.leer_valores_ventana()
for campo in valores:
valor_objeto = getattr(self.objeto, campo)
# El nuevo psycopg2 devuelve datetimes. Aaaargh!
if "hora" in campo and "fecha" not in campo:
valor_objeto = utils.DateTime2DateTimeDelta(valor_objeto)
# X X X
valor_ventana = valores[campo]
condicion = condicion and valor_ventana == valor_objeto
if not condicion:
if pclases.DEBUG:
print("partes_de_fabricacion_bolsas.py::es_diferente -> ",
campo,
"ventana", type(valor_ventana), valor_ventana,
"objeto", valor_objeto, type(valor_objeto))
break
return not condicion # Condición verifica que sea igual
def colorear_tabla_empleados(self):
"""
Prepara y asocia la función para resaltar los empleados
cuyas horas trabajadas sean inferiores o superiores a
la duración del parte.
"""
def cell_func(column, cell, model, itr, numcol):
idht = model[itr][-1]
horastrab = pclases.HorasTrabajadas.get(idht)
duracion_parte = self.objeto.get_duracion()
ht_horas = horastrab.horas
try:
supera_parte = ht_horas > duracion_parte
except TypeError: # horastrab.horas es datetime.time
ht_horas = utils.DateTime2DateTimeDelta(ht_horas)
supera_parte = ht_horas > duracion_parte
if supera_parte:
color = "orange"
elif ht_horas < duracion_parte:
color = "red"
else:
color = "black"
cell.set_property("foreground", color)
cols = self.wids['tv_empleados'].get_columns()
numcol = len(cols) - 1
column = cols[numcol]
cells = column.get_cell_renderers()
for cell in cells:
column.set_cell_data_func(cell, cell_func, numcol)
def inicializar_ventana(self):
"""
Inicializa los controles de la ventana, estableciendo sus
valores por defecto, deshabilitando los innecesarios,
rellenando los combos, formateando el TreeView -si lo hay-...
"""
# Inicialmente no se muestra NADA. Sólo se le deja al
# usuario la opción de buscar o crear nuevo.
self.activar_widgets(False)
self.wids['b_actualizar'].set_sensitive(False)
self.wids['b_guardar'].set_sensitive(False)
self.wids['b_nuevo'].set_sensitive(True)
self.wids['b_buscar'].set_sensitive(True)
# Inicialización del resto de widgets:
# (Nombre, tipo, editable, ordenable, buscable, función_actualización)
cols = (('Nº. de Palet', 'gobject.TYPE_STRING',
False, True, True, None),
('# cajas/palé', 'gobject.TYPE_STRING',
False, True, False, None),
('# bolsas/caja', 'gobject.TYPE_STRING',
True, True, False, self.cambiar_numbolsas),
('Peso neto', 'gobject.TYPE_STRING',
False, True, False, None),
('B', "gobject.TYPE_BOOLEAN",
True, True, False, self.pasar_pale_a_B),
('Observaciones', 'gobject.TYPE_STRING',
True, False, False, self.cambiar_observaciones),
('PUID', 'gobject.TYPE_STRING', False, False, False, None)
)
utils.preparar_treeview(self.wids['tv_produccion'], cols)
self.wids['tv_produccion'].get_selection().set_mode(
gtk.SELECTION_MULTIPLE)
self.wids['tv_produccion'].add_events(gtk.gdk.BUTTON_PRESS_MASK)
cols = (('Código', 'gobject.TYPE_INT64', False, True, False, None),
('Nombre', 'gobject.TYPE_STRING', False, True, False, None),
('Apellidos', 'gobject.TYPE_STRING', False, True, True, None),
('Horas', 'gobject.TYPE_STRING', True, True, False,
self.cambiar_horas_trabajadas),
('ID', 'gobject.TYPE_INT64', False, False, False, None))
utils.preparar_listview(self.wids['tv_empleados'], cols)
self.colorear_tabla_empleados()
cols = (('Producto', 'gobject.TYPE_STRING', False, True, True, None),
('Cantidad', 'gobject.TYPE_STRING', False, True, False, None),
('ID', 'gobject.TYPE_INT64', False, False, False, None))
utils.preparar_listview(self.wids['tv_consumos'], cols)
self.wids['tv_consumos'].get_selection().set_mode(
gtk.SELECTION_MULTIPLE)
cols = (("Tipo de incidencia", "gobject.TYPE_STRING",
False, True, False, None),
("Hora de inicio", "gobject.TYPE_STRING",
False, True, False, None),
("Hora de finalización", "gobject.TYPE_STRING",
False, True, False, None),
("Observaciones", "gobject.TYPE_STRING",
False, True, True, None),
("ID", "gobject.TYPE_INT64", False, False, False, None))
utils.preparar_listview(self.wids['tv_incidencias'], cols)
self.wids['tv_incidencias'].get_selection().set_mode(
gtk.SELECTION_MULTIPLE)
self.wids['ventana'].maximize()
def cambiar_horas_trabajadas(self, cell, path, newtext):
"""Cambia las horas trabajadas por el operario seleccionado."""
newtext = newtext.replace(".", ":").replace(",", ":")
if ":" not in newtext:
if len(newtext) < 4:
newtext = ("0" * (4 - len(newtext))) + newtext
newtext = "%s:%s" % (newtext[:-2], newtext[-2:])
model = self.wids['tv_empleados'].get_model()
iid = model[path][-1]
horastrab = pclases.HorasTrabajadas.get(iid)
try:
try:
dtdelta = mx.DateTime.DateTimeDelta(
0,
float(newtext.split(':')[0]),
float(newtext.split(':')[1]),
0)
except IndexError:
dtdelta = mx.DateTime.DateTimeDelta(0, int(newtext), 0)
newtext = utils.str_hora_corta(dtdelta)
if dtdelta > self.objeto.get_duracion():
utils.dialogo_info(
titulo="TIEMPO INCORRECTO",
texto="El tiempo trabajado no puede superar la\n"
"duración del parte de producción.",
padre=self.wids['ventana'])
return
horastrab.horas = newtext
horastrab.sync()
horastrab.syncUpdate()
model[path][3] = horastrab.horas.strftime('%H:%M')
except (ValueError, TypeError):
utils.dialogo_info(
titulo="ERROR",
texto='El texto "%s" no representa el formato horario.' % (
newtext),
padre=self.wids['ventana'])
def activar_widgets(self, sens):
"""
Activa o desactiva (sensitive=True/False) todos
los widgets de la ventana que dependan del
objeto mostrado.
Entrada: s debe ser True o False. En todo caso
se evaluará como boolean.
"""
sens = sens and ((self.usuario and self.usuario.nivel <= 2)
or not self.objeto.bloqueado or not self.usuario)
if self.objeto:
sens = sens or self.objeto.id == self.__lecturaescritura
wids = ('hbox1', 'hbox2', 'hbox3', 'tv_produccion', 'hbox7',
'tv_incidencias', 'hbox8', 'tv_consumos', 'hbox9',
'table1', 'hbox6')
for wid in wids:
self.wids[wid].set_sensitive(sens)
if self.usuario and self.usuario.nivel > 3:
# No permito (des)bloquear.
self.wids['ch_bloqueado'].set_sensitive(False)
# if self.usuario:
# self.wids['b_partida'].set_sensitive(
# sens and self.usuario.nivel < 3)
def ir_a_primero(self):
"""
Pregunta si crear un parte nuevo, de forma que al abrir la ventana
siempre se pueda empezar un parte de rápidamente.
Si se contesta que no al diálogo, se va al _último_ registro de la
tabla.
"""
nuevo = False
if nuevo:
self.crear_nuevo_partedeproduccion(None)
else:
partedeproduccion = self.objeto
try:
if partedeproduccion is not None:
partedeproduccion.notificador.desactivar()
# Anulo el aviso de actualización del parte que deja de
# ser activo.
partesdeproduccion = pclases.ParteDeProduccion.select(
pclases.ParteDeProduccion.q.partidaCemID != None) # noqa
partesdeproduccion = partesdeproduccion.orderBy("-id")
partedeproduccion = partesdeproduccion[0]
partedeproduccion.notificador.activar(self.aviso_actualizacion)
# Activo la notificación
except IndexError:
partedeproduccion = None
self.objeto = partedeproduccion
self.actualizar_ventana()
def refinar_resultados_busqueda(self, resultados):
"""
Muestra en una ventana de resultados todos los
registros de "resultados".
Devuelve el id (primera columna de la ventana
de resultados) de la fila seleccionada o None
si se canceló.
"""
filas_res = []
for result in resultados:
filas_res.append((result.id,
utils.str_fecha(result.fecha),
utils.str_hora_corta(result.horainicio),
utils.str_hora_corta(result.horafin),
"CLIC PARA VER",
result.observaciones))
idpartedeproduccion = utils.dialogo_resultado(
filas_res,
titulo='Seleccione parte de línea de envasado',
cabeceras=('ID Interno',
'Fecha',
'Hora inicio',
'Hora fin',
'Partida',
'Observaciones'),
func_change=self.mostrar_info_parte,
padre=self.wids['ventana'])
if idpartedeproduccion < 0:
res = None
else:
res = idpartedeproduccion
return res
def mostrar_info_parte(self, treev):
"""Rellena la información general del parte."""
model, itr = treev.get_selection().get_selected()
if itr is not None and model[itr][-2] == "CLIC PARA VER":
parte = pclases.ParteDeProduccion.get(model[itr][0]) # En los
# diálogos de resultado el ID va al revés.
if parte.es_de_bolsas() and parte.articulos:
# partida=parte.articulos[0].bolsa.caja.pale.partidaCem.codigo
partida = parte.partidaCem.codigo
else:
partida = 'VACIO'
producto = (parte.articulos != []
and parte.articulos[0].productoVenta.nombre or 'VACÍO')
model[itr][-2] = "%s (%s)" % (partida, producto)
def rellenar_widgets(self):
"""
Introduce la información del partedeproduccion actual
en los widgets.
No se chequea que sea != None, así que
hay que tener cuidado de no llamar a
esta función en ese caso.
"""
if not self.objeto:
self.activar_widgets(False)
return
partedeproduccion = self.objeto
self.wids['ch_bloqueado'].set_active(self.objeto.bloqueado)
# Información global:
if self.objeto.articulos != []:
self.producto = self.objeto.articulos[0].productoVenta
# Y si no hay, sigo usando el de antes.
self.rellenar_datos_producto(self.producto)
self.wids['e_fechaini'].set_text(
utils.str_fecha(partedeproduccion.fechahorainicio))
self.wids['e_fechafin'].set_text(
utils.str_fecha(partedeproduccion.fechahorafin))
self.wids['e_horaini'].set_text(
partedeproduccion.horainicio.strftime('%H:%M'))
self.wids['e_horafin'].set_text(
partedeproduccion.horafin.strftime('%H:%M'))
self.wids['e_duracion'].set_text(
partedeproduccion.get_duracion().strftime('%H:%M'))
self.wids['txt_observaciones'].get_buffer().set_text(
partedeproduccion.observaciones)
self.rellenar_estadisticas()
# Información de detalle:
try:
e_partida = self.objeto.partidaCem.codigo
mostrar_mensaje_correccion_partidaCem = False
except AttributeError:
self.objeto._corregir_partidaCem_nula()
e_partida = self.objeto.partidaCem.codigo
mostrar_mensaje_correccion_partidaCem = True
self.wids['e_partida'].set_text(e_partida)
self.rellenar_tabla_empleados()
self.rellenar_tabla_bolsas()
self.rellenar_tabla_incidencias()
self.rellenar_tabla_consumos()
self.objeto.make_swap()
self.check_permisos()
if mostrar_mensaje_correccion_partidaCem:
utils.dialogo_info(
titulo="PARTIDA DE CEMENTO CORREGIDA",
texto="La partida de cemento del parte actual contenía "
"\nun error o era nula.\n"
"Se ha corregido automáticamente. Por favor, \n"
"verifique que se corresponde con la partida real.",
padre=self.wids['ventana'])
self.objeto.observaciones += "\nPartida corregida automáticamente."
self.wids['b_back'].set_sensitive(
self.objeto and self.objeto.anterior() and 1 or 0)
self.wids['b_next'].set_sensitive(
self.objeto and self.objeto.siguiente() and 1 or 0)
def rellenar_estadisticas(self):
partedeproduccion = self.objeto
# Estadísticas:
# numbolsas = len(self.objeto.articulos)
# numbolsas = pclases.ParteDeProduccion._queryOne("""
# SELECT COUNT(id)
# FROM articulo
# WHERE articulo.parte_de_produccion_id = %d""" % self.objeto.id)[0]
numbolsas = sum(
[a.caja.numbolsas for a in partedeproduccion.articulos])
self.wids['e_prod_bolsas'].set_text(str(numbolsas))
# kilos = sum([a.peso for a in self.objeto.articulos])
# Optimizando, que es gerundio:
try:
# kilos = (len(self.objeto.articulos)
kilos = (numbolsas
* self.producto.camposEspecificosBala.gramosBolsa/1000.0)
except AttributeError:
kilos = 0.0
self.wids['e_prod_kg'].set_text(utils.float2str(kilos, autodec=True))
cajas = set([a.caja for a in self.objeto.articulos]) # Ojo:python>2.3
numcajas = len(cajas)
# Optimización:
# sqlpales = pclases.Pale.select(pclases.AND(
# pclases.Articulo.q.parteDeProduccionID == self.objeto.id,
# pclases.Articulo.q.bolsaID == pclases.Bolsa.q.id,
# pclases.Bolsa.q.cajaID == pclases.Caja.q.id,
# pclases.Caja.q.paleID == pclases.Pale.q.id))
# pales = set([p for p in sqlpales]) # Ojo: python > 2.3
# numcajas = sum(p.numcajas for p in pales)
self.wids['e_prod_cajas'].set_text(str(numcajas))
try:
bolsasminuto = str(numbolsas
/ partedeproduccion.get_duracion().minutes)
except ZeroDivisionError:
bolsasminuto = "inf."
self.wids['e_bolsasminuto'].set_text(bolsasminuto)
try:
kgh = utils.float2str(kilos/partedeproduccion.get_duracion().hours,
autodec=True)
except ZeroDivisionError:
kgh = "inf."
self.wids['e_kgh'].set_text(kgh)
pales = set([a.caja.pale
for a in self.objeto.articulos]) # Ojo: python > 2.3
# numpales = len(pales)
# Optimizando:
# numpales = sqlpales.count() # Sin groupBy salen tantas como bolsas
numpales = len(pales)
self.wids['e_prodpales'].set_text(str(numpales))
try:
activo = partedeproduccion.get_horas_trabajadas()
except AssertionError:
# pylint: disable=protected-access
partedeproduccion._corregir_duracion_paradas()
activo = partedeproduccion.get_horas_trabajadas()
self.wids['e_activo'].set_text(activo.strftime("%H:%M"))
pasivo = partedeproduccion.get_horas_paradas()
self.wids['e_pasivo'].set_text(pasivo.strftime("%H:%M"))
self.wids['e_bbconsumidos'].set_text(
utils.float2str(len(self.objeto.bigbags), autodec=True))
self.wids['e_kgconsumidos'].set_text(
utils.float2str(sum([bigbag.pesobigbag for bigbag
in self.objeto.bigbags]),
autodec=True))
try:
palesa = len(self.objeto.partidaCem.get_pales_a())
palesb = len(self.objeto.partidaCem.get_pales_b())
except AttributeError:
palesa = palesb = 0
self.wids['e_palesa'].set_text(repr(palesa))
self.wids['e_palesb'].set_text(repr(palesb))
def rellenar_tabla_incidencias(self):
"""Rellena la tabla de paradas del parte."""
parte = self.objeto
treev = self.wids['tv_incidencias']
if parte is not None:
model = treev.get_model()
treev.set_model(None)
model.clear()
incidencias = pclases.Incidencia.select(
pclases.Incidencia.q.parteDeProduccionID == self.objeto.id,
orderBy="horainicio")
for incidencia in incidencias:
model.append((incidencia.tipoDeIncidencia.descripcion,
utils.str_fechahora(incidencia.horainicio),
utils.str_fechahora(incidencia.horafin),
incidencia.observaciones,
incidencia.id))
treev.set_model(model)
def rellenar_tabla_consumos(self):
"""
Rellena la tabla de consumos del parte.
"""
parte = self.objeto
if parte is not None:
model = self.wids['tv_consumos'].get_model()
self.wids['tv_consumos'].set_model(None)
model.clear()
consumos = parte.consumos[:]
try:
consumos.sort(lambda c1, c2: c1 is not None and c2 is not None
and int(c1.id - c2.id) or 0)
except TypeError as msg:
self.logger.error("partes_de_fabricacion_bolsas.py (rellenar"
"_tabla_consumos): Error ordenando consumo"
"s (%s):\n%s" % (msg, consumos))
for c in parte.consumos:
if c.productoCompraID is not None:
unidad = c.productoCompra.unidad
producto = c.productoCompra.descripcion
else:
unidad = ""
producto = ""
model.append((producto,
"%s %s" % (utils.float2str(c.cantidad), unidad),
c.id))
for bigbag in parte.bigbags: # Consumos de fibra de cemento:
str_bb = "{} ({}) {}".format(bigbag.codigo,
bigbag.articulo.productoVenta.nombre,
bigbag.api and "✔" or "✘")
str_bb = geninformes.sanitize_unicode(str_bb)
model.append((str_bb,
utils.float2str(bigbag.pesobigbag) + " kg",
-bigbag.id))
self.wids['tv_consumos'].set_model(model)
def check_permisos(self):
if self.__permisos.escritura: # Puede modificar los partes:
self.activar_widgets(True)
else: # Sólo puede modificar el parte que haya creado nuevo (si es
# que ha creado alguno)
if self.__lecturaescritura == (self.objeto.id or
not self.objeto.bloqueado):
self.activar_widgets(True)
else:
self.activar_widgets(False)
# Compruebo primero este porque habilita o deshabilita todos los
# botones, incluso los que dependen de los otros dos permisos.
self.wids['b_buscar'].set_sensitive(self.__permisos.lectura)
self.wids['b_nuevo'].set_sensitive(self.__permisos.nuevo)
def rellenar_tabla_bolsas(self):
model = self.wids['tv_produccion'].get_model()
model.clear()
# detallesdeproduccion = self.objeto.articulos[:]
# detallesdeproduccion.sort(lambda x, y:
# utils.orden_por_campo_o_id(x,y,"fechahora"))
# detallesdeproduccion = self.objeto.articulos
detallesdeproduccion = pclases.Articulo.select(
pclases.Articulo.q.parteDeProduccionID == self.objeto.id,
orderBy="id")
# Filas del TreeView
pales = {} # Diccionarios de nodos padres (cajas) y abuelos (palés).
cajas = {}
self.wids['tv_produccion'].freeze_child_notify()
self.wids['tv_produccion'].set_model(None)
for articulo in detallesdeproduccion:
pale = articulo.caja.pale
if pale not in pales: # Inserto palé.
es_clase_b = pale.es_clase_b()
pale_api = pale.api
if pale_api is None:
volcado = ""
elif pale_api:
volcado = " ✔"
else:
volcado = " ✘"
volcado = geninformes.sanitize_unicode(volcado)
peso_neto = sum([c.articulo.peso_neto for c in pale.cajas])
numcajas = len(pale.cajas) # = pale.numcajas
pales[pale] = model.append(
None,
("Palé " + pale.codigo + volcado,
numcajas,
pale.numbolsas,
peso_neto,
es_clase_b,
pale.observaciones,
pale.get_puid()))
if not self.SHOW_PALES_COMPLETOS:
continue
caja = articulo.caja
if caja not in cajas:
if caja.articulo.api is None:
volcado = ""
elif caja.articulo:
volcado = " ✔"
else:
volcado = " ✘"
volcado = geninformes.sanitize_unicode(volcado)
cajas[caja] = model.append(
pales[pale],
("Caja " + caja.codigo + volcado,
1, # 1 caja por caja:)
caja.numbolsas,
caja.peso,
es_clase_b,
caja.observaciones,
caja.get_puid()))
# pesogramos = "%s gr" % utils.float2str(
# bolsa.peso * 1000, autodec = True)
# model.append(cajas[bolsa.caja],
# ("Bolsa " + bolsa.codigo,
# pesogramos,
# bolsa.claseb,
# bolsa.observaciones,
# bolsa.get_puid()))
self.wids['tv_produccion'].set_model(model)
self.wids['tv_produccion'].thaw_child_notify()
def seleccionar_producto(self, boton):
"""
Selecciona el producto del parte actual.
Si ya tiene producción, cambia el producto de toda la producción
de la partida completa.
"""
a_buscar = utils.dialogo_entrada(titulo="BUSCAR PRODUCTO",
texto="Introduzca el texto a buscar:",
padre=self.wids['ventana'])
if a_buscar is not None:
pvs = utils.buscar_productos_venta(a_buscar)
pvs = [p for p in pvs if p.es_bolsa()]
if len(pvs):
if len(pvs) == 1:
pv = pvs[0]
elif len(pvs) > 1:
idpv = self.refinar_resultados_busqueda_producto(pvs)
if idpv:
pv = pclases.ProductoVenta.get(idpv)
else:
pv = None
if pv:
try:
pcem = self.objeto.partidaCem
producto_anterior = pcem.pales[0].productoVenta
except IndexError:
producto_anterior = None
if producto_anterior == pv:
return
if not producto_anterior:
producto_anterior = pv
if (producto_anterior.camposEspecificosBala.bolsasCaja !=
pv.camposEspecificosBala.bolsasCaja or
producto_anterior.camposEspecificosBala.cajasPale !=
pv.camposEspecificosBala.cajasPale):
utils.dialogo_info(
titulo="PRODUCTO INCOMPATIBLE",
texto="Seleccione un producto con el mismo"
"número de bolsas por caja\no elimine primero"
" la producción actual, cree una nueva "
"partida\n y vuelva a crearla con el nuevo "
"producto.",
padre=self.wids['ventana'])
return
titulo = "¿CAMBIAR PRODUCTO AL LOTE COMPLETO?"
texto = "Va a cambiar la producción del lote completo de"\
" %s\na %s. ¿Está seguro?\n\n"\
"(Puede durar bastante. "\
"No interrumpa el proceso)" % (
producto_anterior
and producto_anterior.descripcion or "",
pv.descripcion)
padre = self.wids['ventana']
if (not self.objeto.partidaCem.pales
or utils.dialogo(texto, titulo, padre)):
ceb = pv.camposEspecificosBala
for pale in self.objeto.partidaCem.pales:
pale.numcajas = ceb.cajasPale
pale.numbolsas = ceb.bolsasCaja
pale.sync()
for caja in pale.cajas:
caja.numbolsas = ceb.bolsasCaja
caja.peso = (ceb.bolsasCaja
* ceb.gramosBolsa / 1000)
a = caja.articulo
a.pesoReal = (caja.peso
+ pclases.PESO_EMBALAJE_CAJAS)
a.productoVenta = pv
a.syncUpdate()
self.producto = pv
self.rellenar_datos_producto(pv)
self.actualizar_ventana()
def rellenar_datos_producto(self, producto):
"""
A partir del artículo recibido, completa la información
de la cabecera del formulario (ancho, etc...) en
función de los datos de la bolsa.
También verifica si el parte tiene ficha de fabricación. Si no la
tiene, pone la del producto recibido.
"""
if producto is None:
self.wids['e_producto'].set_text('')
else:
nomproducto = "%s. Corte: %d mm. %d gr/bolsa" % (
producto.descripcion,
producto.camposEspecificosBala.corte,
producto.camposEspecificosBala.gramosBolsa)
self.wids['e_producto'].set_text(nomproducto)
# --------------- Manejadores de eventos ----------------------------
def add_pale(self, wid):
"""
Crea un nuevo palé con todas las cajas y bolsas que contiene.
Si es el primer palé del lote pide el número de bolsas que han
entrado en la primera de las cajas, si no, toma las del primer
palé de la PARTIDA (aunque debe coincidir con el parte, pero en el
caso de que no sea así me curo en salud y procuro desde el principio
que todos los palés de la misma partida sean idénticos).
Si el número de bolsas es inferior a 40 se va a crear por defecto
como B.
"""
if not MURANO:
utils.dialogo_info(titulo="ERROR DE CONEXIÓN CON MURANO",
texto="No puede crear cajas. Solo consultas.",
padre=self.wids['ventana'])
return
if not self.producto:
utils.dialogo_info(
titulo="SELECCIONE UN PRODUCTO",
texto="Antes debe seleccionar un producto.",
padre=self.wids['ventana'])
return
partidaCem = self.objeto.partidaCem
try:
pale = partidaCem.pales[0]
defecto = pale.numbolsas
except IndexError:
defecto = self.producto.camposEspecificosBala.bolsasCaja
if not defecto:
defecto = pclases.Pale.NUMBOLSAS
texto = "Introduzca el número de bolsas de la primera caja:"
if self.usuario and self.usuario.nivel <= self.NIVEL_POR_LOTES:
texto += "\n\n<small>Si introduce una serie de números se\n"\
"crearán tantos palés como números haya tecleado;\n"\
"cada uno de ellos con las bolsas por caja indicadas."\
"</small>"
numbolsas = utils.dialogo_pedir_rango(
titulo="¿NÚMERO DE BOLSAS?",
texto=texto,
padre=self.wids['ventana'],
valor_por_defecto=defecto,
permitir_repetidos=True)
else:
numbolsas = utils.dialogo_entrada(
titulo="¿NÚMERO DE BOLSAS?",
texto=texto,
padre=self.wids['ventana'],
valor_por_defecto=defecto)
if not numbolsas:
return
if not self.usuario or self.usuario.nivel > self.NIVEL_POR_LOTES:
try:
numbolsas = [int(numbolsas)]
except (ValueError, TypeError):
utils.dialogo_info(titulo="NÚMERO INCORRECTO",
texto='El texto "%s" no es un número.' % (
numbolsas),
padre=self.wids['ventana'])
return
listanumbolsas = numbolsas
if pclases.DEBUG:
print(listanumbolsas)
pales_a_etiquetar = []
productoVenta = self.producto
for numbolsas in listanumbolsas:
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
icont = 0.0
# numcajasdefecto = pclases.Pale.NUMCAJAS
numcajasdefecto = productoVenta.camposEspecificosBala.cajasPale
# 1.- Creo el palé.
numpale, codigo = pclases.Pale.get_next_numpale(numbolsas)
ahora = mx.DateTime.localtime()
pale = pclases.Pale(partidaCem=partidaCem,
numpale=numpale,
codigo=codigo,
fechahora=None,
numbolsas=numbolsas,
numcajas=numcajasdefecto)
try:
pale.fechahora = ahora
except: # noqa
pale.fechahora = datetime.datetime.now()
pclases.Auditoria.nuevo(pale, self.usuario, __file__)
# 2.- Creo las cajas.
tot = pale.numcajas
for i in range(pale.numcajas): # @UnusedVariable
numcaja, codigo = pclases.Caja.get_next_numcaja()
vpro.set_valor(icont / tot, "Creando caja %s..." % codigo)
try:
gramos = productoVenta.camposEspecificosBala.gramosBolsa
except AttributeError:
gramos = 0
peso_neto = (gramos * numbolsas) / 1000.0
# peso = peso_bruto = peso_neto + 0.150 + 0.100 # Palé+cartón
caja = pclases.Caja(pale=pale,
numcaja=numcaja,
codigo=codigo,
fechahora=None,
peso=peso_neto,
numbolsas=numbolsas)
try:
caja.fechahora = mx.DateTime.localtime()
except: # noqa
caja.fechahora = datetime.datetime.now()
pclases.Auditoria.nuevo(caja, self.usuario, __file__)
articulo = pclases.Articulo(
parteDeProduccion=self.objeto,
caja=caja,
rolloDefectuoso=None,
albaranSalida=None,
productoVenta=self.producto,
bala=None,
rollo=None,
bigbag=None,
almacen=pclases.Almacen.get_almacen_principal(),
rolloC=None,
balaCable=None)
pclases.Auditoria.nuevo(articulo, self.usuario, __file__)
# DONE: Al final sí que está volcando. Pero va tan sumamente
# lento caja a caja que parece que se cuelga. Además la salida
# por consola no está habilitada, con lo que al final el error
# que da es por algún print de depuración que hay por ahí.
# try:
# murano.ops.create_articulo(articulo)
# except IOError:
# pass # Alguna movida con la salida por consola de
# # depuración y no está disponible.
icont += 1
# 3.- Creo el palé en Murano
vpro.set_valor(icont / tot,
"Creando palé {}...".format(pale.codigo))
# murano.ops.create_pale(pale, observaciones="")
# OJO: Le paso el último artículo porque la formulación de esta
# línea será por PALÉS COMPLETOS.
pales_a_etiquetar.append(pale)
vpro.ocultar()
descontar_material_adicional(self, articulo)
self.objeto.buscar_o_crear_albaran_interno(
incluir_consumos_auto=True) # Normalmente no, pero
# aquí sí quiero que aparezcan en el alb. interno.
imprimir_etiquetas_pales(pales_a_etiquetar, self.wids['ventana'],
mostrar_dialogo=False)
self.rellenar_tabla_consumos()
self.rellenar_tabla_bolsas()
self.rellenar_estadisticas()
def seleccionar_partida(self, wid):
"""
Wrapper para cambiar_partida.
"""
self.cambiar_partida(wid)
def _salir(self, wid, event=None):
if (self.__permisos.escritura
and self.objeto
and not self.objeto.bloqueado
and self.objeto.fecha < (mx.DateTime.localtime()
- mx.DateTime.oneDay)
and (not self.usuario or self.usuario.nivel <= 2)):
# Tiene permiso para bloquear el parte
res = utils.dialogo(titulo="DEBE VERIFICAR EL PARTE",
texto="Antes de cerrar el parte debe verifi"
"carlo.\n¿Marcar como verificado?",
padre=self.wids['ventana'],
bloq_temp=["Sí"])
self.objeto.bloqueado = res
self.wids['ch_bloqueado'].set_active(self.objeto.bloqueado)
# return True
if not self.salir(wid, mostrar_ventana=event is None):
# Devuelve True cuando se cancela el cierre de la ventana (por
# temas de event-chain).
try:
padre = self.wids['ventana']
except KeyError:
padre = None
vpro = VentanaActividad(texto="Comprobando disparo de alertas...",
padre=padre)
vpro.mostrar()
if not self.linea:
linea = pclases.LineaDeProduccion.select(
pclases.LineaDeProduccion.q.nombre.contains('de embolsado'))
self.linea = linea
vpro.mover()
if self.linea is None:
txt = "WARNING: La línea de embolsado no está correctamente "\
"dada de alta."
print(txt)
self.logger.warning(txt)
else:
vpro.mover()
formulacion = self.linea.formulacion
if not formulacion:
# TODO: Dar mensaje de error por logger
pass
else:
for ca in [ca_con_p for ca_con_p
in formulacion.consumosAdicionales
if ca_con_p.productoCompra is not None]:
vpro.mover()
# Verifico que no haya productos bajo mínimos:
if (ca.productoCompra.existencias
< ca.productoCompra.minimo):
vpro.mover()
try:
v = pclases.Ventana.select(
pclases.Ventana.q.fichero
== "pedidos_de_compra.py")[0]
except IndexError:
txt = "WARNING: ¡La ventana de pedidos de "\
"compra SE HA PERDIDO!"
print(txt)
self.logger.warning(txt)
mensaje = "El producto %s tiene las existencias "\
"bajo mínimos. Considere hacer un "\
"pedido de compra." % (
ca.productoCompra.descripcion)
for u in [p.usuario
for p in v.permisos if p.nuevo]:
vpro.mover()
u.enviar_mensaje(mensaje)
# Y Verifico que no haya existencias negativas:
if ca.productoCompra.existencias < 0:
vpro.mover()
try:
v = pclases.Ventana.select(
pclases.Ventana.q.fichero
== "pedidos_de_compra.py")[0]
except IndexError:
print("WARNING: ¡La ventana de pedidos de "
"compra SE HA PERDIDO!")
self.logger.error(
"partes_de_fabricacion_rollos: ¡La "
"ventana de pedidos de compra "
"SE HA PERDIDO!")
vpro.mover()
mensaje = "El producto %s tiene existencias "
mensaje += "NEGATIVAS. Corrija el error lo antes"
mensaje += " posible." % (
ca.productoCompra.descripcion)
for u in [p.usuario
for p in v.permisos if p.nuevo]:
vpro.mover()
u.enviar_mensaje(mensaje)
vpro.mover()
vpro.ocultar()
def cambiar_observaciones(self, cell, path, newtext):
"""
Solo cambia las observaciones del objeto. NO PASA A PRODUCTO B.
"""
model = self.wids['tv_produccion'].get_model()
puid = model[path][-1]
clase, aidi = puid.split(":")
objeto = getattr(pclases, clase).get(int(aidi))
objeto.observaciones = newtext
model[path][5] = newtext
def crear_nuevo_partedeproduccion(self, widget):
"""
Función callback del botón b_nuevo.
Pide los datos básicos para crear un nuevo objeto.
Una vez insertado en la BD hay que hacerlo activo
en la ventana para que puedan ser editados el resto
de campos que no se hayan pedido aquí.
"""
partedeproduccion = self.objeto
if not utils.dialogo('Se creará un nuevo parte de producción vacío.',
'NUEVO PARTE',
padre=self.wids['ventana']):
return
if partedeproduccion is not None:
partedeproduccion.notificador.desactivar()
partedeproduccion = pclases.ParteDeProduccion(
fecha=mx.DateTime.localtime(),
horainicio=time.struct_time(
time.localtime()[:4] + (0, 0) + time.localtime()[6:]),
horafin=time.struct_time(
time.localtime()[:3] + ((time.localtime()[3]+8) % 24, 0, 0)
+ time.localtime()[6:]),
prodestandar=0,
observaciones='',
bloqueado=False,
partidaCem=pclases.PartidaCem.get_nueva_o_ultima_vacia(),
merma=0.0)
pclases.Auditoria.nuevo(partedeproduccion, self.usuario, __file__)
# pylint: disable=protected-access
partedeproduccion._corregir_campos_fechahora()
self.objeto = partedeproduccion
self.wids['e_partida'].set_text(self.objeto.partidaCem.codigo)
self.add_empleados_calendario()
self.__lecturaescritura = self.objeto.id
self.actualizar_ventana()
self.objeto.notificador.activar(self.aviso_actualizacion)
verificar_solapamiento(partedeproduccion, self.wids['ventana'])
def refinar_resultados_busqueda_producto(self, resultados):
"""
Muestra en una ventana de resultados todos los
registros de "resultados".
Devuelve el id (primera columna de la ventana
de resultados) de la fila seleccionada o None
si se canceló.
"""
filas_res = []
for result in resultados:
filas_res.append((result.id, result.codigo, result.nombre,
result.descripcion))
idproducto = utils.dialogo_resultado(
filas_res,
titulo='Seleccione producto',
cabeceras=('ID Interno', 'Código', 'Nombre', 'Descripción'),
padre=self.wids['ventana'])
if idproducto < 0:
res = None
else:
res = idproducto
return res
# pylint: disable=too-many-branches,too-many-statements
def buscar_partedeproduccion(self, widget):
"""
Muestra una ventana de búsqueda y a continuación los
resultados. El objeto seleccionado se hará activo
en la ventana a no ser que se pulse en Cancelar en
la ventana de resultados.
PRECONDICION: Los partes de embolsado SIEMPRE deben tener una
partida de cemento relacionada.
"""
partedeproduccion = self.objeto
a_buscar = utils.dialogo_entrada(
titulo="BUSCAR PARTE",
texto="Introduzca fecha del parte o nombre del producto:",
padre=self.wids['ventana'])
if a_buscar is not None:
try:
if a_buscar != '':
a_buscar = a_buscar.replace("-", "/")
if a_buscar.count('/') == 1:
a_buscar = "%s/%d" % (a_buscar,
mx.DateTime.localtime().year)
if len(a_buscar.split('/')[-1]) == 2:
fecha = time.strptime(a_buscar, '%d/%m/%y')
else:
fecha = time.strptime(a_buscar, '%d/%m/%Y')
# pylint: disable=singleton-comparison
resultados = pclases.ParteDeProduccion.select(
pclases.AND(
pclases.ParteDeProduccion.q.fecha == fecha,
pclases.ParteDeProduccion.q.partidaCemID
!= None)) # noqa
else:
resultados = pclases.ParteDeProduccion.select(
# pylint: disable=singleton-comparison
pclases.ParteDeProduccion.q.partidaCemID
!= None) # noqa
except (TypeError, ValueError):
# pylint: disable=singleton-comparison
producto = pclases.ProductoVenta.select(pclases.AND(
pclases.ProductoVenta.q.nombre.contains(a_buscar),
pclases.ProductoVenta.q.camposEspecificosBalaID
!= None)) # noqa
producto = pclases.SQLtuple(
[p for p in producto if p.es_bolsa()])
resultados = pclases.ParteDeProduccion.select()
# Pongo la barra porque con muchos partes esto tarda
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
i = 0.0
tot = resultados.count()
partes = []
if producto.count() > 1:
idproducto = self.refinar_resultados_busqueda_producto(
producto)
if idproducto is not None:
for p in resultados:
if (p.articulos != []
and p.articulos[0].productoVentaID
== idproducto):
partes.append(p)
vpro.set_valor(i/tot, 'Buscando partes')
i += 1
else:
vpro.ocultar()
return
elif producto.count() == 1:
for p in resultados:
if (p.articulos != []
and p.articulos[0].productoVentaID
== producto[0].id):
partes.append(p)
vpro.set_valor(i/tot, 'Buscando partes')
i += 1
else:
for p in resultados:
if p.es_de_bolsas():
partes.append(p)
vpro.set_valor(i/tot, 'Buscando partes')
i += 1
vpro.ocultar()
resultados = partes
try:
len_resultados = len(resultados)
except TypeError:
len_resultados = resultados.count()
if len_resultados > 1:
# Refinar los resultados
idpartedeproduccion = self.refinar_resultados_busqueda(
resultados)
if idpartedeproduccion is None:
return
resultados = [
pclases.ParteDeProduccion.get(idpartedeproduccion)]
elif len_resultados < 1:
# Sin resultados de búsqueda
utils.dialogo_info(
'SIN RESULTADOS',
'La búsqueda no produjo resultados.\nPruebe a cambiar'
' el texto buscado o déjelo en blanco para ver una '
'lista completa.\n(Atención: Ver la lista completa '
'puede resultar lento si el número de elementos es '
'muy alto)',
padre=self.wids['ventana'])
return
# # Un único resultado
# Primero anulo la función de actualización
if partedeproduccion is not None:
partedeproduccion.notificador.desactivar()
# Pongo el objeto como actual
try:
partedeproduccion = resultados[0]
except IndexError:
utils.dialogo_info(
titulo="ERROR",
texto="Se produjo un error al recuperar la "
"información.\nCierre y vuelva a abrir la "
"aplicación antes de volver a intentarlo.",
padre=self.wids['ventana'])
return
# Y activo la función de notificación:
partedeproduccion.notificador.activar(self.aviso_actualizacion)
self.objeto = partedeproduccion
# Reinicio preferencias de etiqueta.
global MEMENTO_MORI
MEMENTO_MORI = {'que_imprimir': None, 'tipo': None}
self.actualizar_ventana()
def guardar(self, widget):
"""
Guarda el contenido de los entry y demás widgets de entrada
de datos en el objeto y lo sincroniza con la BD.
"""
partedeproduccion = self.objeto
valores = self.leer_valores_ventana()
if valores["fechahorainicio"] > valores["fechahorafin"]:
self.wids['e_fechafin'].set_text(
self.wids['e_fechaini'].get_text())
self.wids['e_horafin'].set_text(
self.wids['e_horaini'].get_text())
valores = self.leer_valores_ventana()
ye_olde_fecha = partedeproduccion.fecha
ye_olde_horainicio = utils.str_hora_corta(partedeproduccion.horainicio)
ye_olde_horafin = utils.str_hora_corta(partedeproduccion.horafin)
# Desactivo el notificador momentáneamente
partedeproduccion.notificador.activar(lambda: None)
# Actualizo los datos del objeto
for campo in valores:
try:
if (isinstance(valores[campo],
type(mx.DateTime.DateTimeDelta(0))) and
isinstance(getattr(self.objeto, campo),
type(datetime.time()))):
# Hay un bug con el mx de Python 2.7 en Windows y tengo
# que hacer esta conversión a mano:
valores[campo] = datetime.time(valores[campo].hour,
valores[campo].minute)
setattr(self.objeto, campo, valores[campo])
except ValueError:
if isinstance(valores[campo], mx.DateTime.DateTimeDeltaType):
setattr(self.objeto, campo,
valores[campo].strftime("%H:%M"))
# partedeproduccion._corregir_campos_fechahora() <-- Aquí no hace falta
# Verificación de que no se solapa con otros partes:
verificar_solapamiento(partedeproduccion,
self.wids['ventana'], # <- Esto es horrible.
ye_olde_fecha,
ye_olde_horainicio,
ye_olde_horafin)
# Fuerzo la actualización de la BD y no espero a que SQLObject lo haga
# por mí:
partedeproduccion.sync()
# Vuelvo a activar el notificador
partedeproduccion.notificador.activar(self.aviso_actualizacion)
self.actualizar_ventana()
self.wids['b_guardar'].set_sensitive(False)
def borrar_parte(self, boton):
if not self.objeto:
return
if not utils.dialogo('Se va a intentar eliminar el parte actual.\nSi '
'hay operaciones complejas implicadas se cancela'
'rá el borrado.\nDe cualquier forma, no se acons'
'eja eliminar ningún parte que ya tenga producci'
'ón relacionada.\n¿Está seguro de borrar el part'
'e actual?',
'ELIMINAR PARTE',
padre=self.wids['ventana']):
return
partedeproduccion = self.objeto
partedeproduccion.notificador.desactivar()
try:
partedeproduccion.destroy(ventana=__file__)
except Exception as msgexception:
utils.dialogo_info(
'PARTE NO BORRADO',
'El parte no se eliminó.\nSi tiene bolsas o empleados '
'asociados, trate primero de eliminarlos y vuelva a '
'intentarlo.\n\nExcepción capturada: {}'.format(
msgexception),
padre=self.wids['ventana'])
return
self.ir_a_primero()
def add_incidencia(self, boton):
ii = pclases.TipoDeIncidencia.select()
idincidencia = utils.dialogo_combo(
'SELECCIONE UN TIPO DE INCIDENCIA',
'Seleccione un tipo de incidencia del desplegable inferior',
[(i.id, i.descripcion) for i in ii],
padre=self.wids['ventana'])
if idincidencia is None:
return
utils.dialogo_info(
'HORA INICIO',
'A continuación seleccione la hora de inicio de la '
'incidencia.',
padre=self.wids['ventana'])
horaini = utils.mostrar_hora(time.localtime()[3], 0, 0, 'HORA INICIO')
if not horaini:
return
utils.dialogo_info(
'HORA FIN',
'A continuación seleccione la hora de finalización de la '
'incidencia.',
padre=self.wids['ventana'])
horafin = utils.mostrar_hora(time.localtime()[3], 0, 0, 'HORA FIN')
if not horafin:
return
self.objeto.sync()
horaini = mx.DateTime.DateTimeFrom(year=self.objeto.fecha.year,
month=self.objeto.fecha.month,
day=self.objeto.fecha.day,
hour=int(horaini.split(":")[0]),
minute=int(horaini.split(":")[1]))
horafin = mx.DateTime.DateTimeFrom(year=self.objeto.fecha.year,
month=self.objeto.fecha.month,
day=self.objeto.fecha.day,
hour=int(horafin.split(":")[0]),
minute=int(horafin.split(":")[1]))
if horaini > horafin:
horafin += mx.DateTime.oneDay
while horaini < self.objeto.fechahorainicio: # El parte está en la
# franja de medianoche y la incidencia comienza después de las 12.
horaini += mx.DateTime.oneDay # Debe llevar la fecha del día
# siguiente.
horafin += mx.DateTime.oneDay
if entran_en_turno(self.objeto, horaini, horafin):
observaciones = utils.dialogo_entrada(
titulo='OBSERVACIONES',
texto='Introduzca observaciones sobre la incidencia:',
padre=self.wids['ventana'])
if observaciones is None:
return
incidencia = pclases.Incidencia(
tipoDeIncidencia=pclases.TipoDeIncidencia.get(idincidencia),
horainicio=horaini,
horafin=horafin,
parteDeProduccion=self.objeto,
observaciones=observaciones)
pclases.Auditoria.nuevo(incidencia, self.usuario, __file__)
# self.actualizar_ventana()
self.rellenar_tabla_incidencias()
self.rellenar_estadisticas()
else:
utils.dialogo_info(
titulo='ERROR HORARIO',
texto='La franja horaria que ha seleccionado no entra en '
'el turno del parte.',
padre=self.wids['ventana'])
def drop_pale(self, boton):
"""
Elimina el palé, sus cajas, bolsas y consumos relacionados.
"""
if not self.usuario or self.usuario.nivel > 1:
utils.dialogo_info(
titulo="PERMISOS INSUFICIENTES",
texto="No puede borrar artículos fabricados.\n\n"
"Solicite su eliminación por escrito indicando\n"
"claramente los motivos y el código de\n"
"trazabilidad del artículo en cuestión.",
padre=self.wids['ventana'])
return
if not MURANO:
utils.dialogo_info(
titulo="ERROR DE CONEXIÓN CON MURANO",
texto="No puede eliminar cajas. Solo consultas.",
padre=self.wids['ventana'])
return
model, paths = self.wids['tv_produccion'].get_selection(
).get_selected_rows()
if (not paths or
not utils.dialogo(
titulo="¿ESTÁ SEGURO?",
texto="Se van a eliminar %d líneas. ¿Desea continuar?" % (
len(paths)),
padre=self.wids['ventana'])):
return
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
icont = 0
tot = len(paths)
error = False
for path in paths:
puid = model[path][-1]
vpro.set_valor(icont / tot, "Eliminando %s..." % puid)
clase, aidi = puid.split(":")
objeto = getattr(pclases, clase).get(int(aidi))
if isinstance(objeto, pclases.Pale):
try:
articulo = objeto.cajas[0].articulo
except IndexError:
# Si el palé está vacío, artículo será None
articulo = None
elif isinstance(objeto, pclases.Caja):
articulo = objeto.articulo
if articulo:
# OJO: Le paso el último artículo porque la formulación de
# esta línea es por PALÉS COMPLETOS.
descontar_material_adicional(self, articulo, restar=False)
try:
# murano.ops.delete_articulo(objeto)
objeto.destroy_en_cascada(ventana=__file__)
except IOError:
pass # No tenemos consola para sacar los mensajes de debug.
except Exception as msg:
vpro.ocultar()
error = True
utils.dialogo_info(
titulo="ERROR AL ELIMINAR",
texto="Ocurrió un error al eliminar la producción.\n"
"\n\nInformación de depuración:\n"
"PUID: %s\nMensaje de la excepción:\n"
"%s" % (objeto.get_puid(), msg),
padre=self.wids['ventana'])
break # Paso de seguir con los demás paths (si los hubiera)
icont += 1
if not error:
vpro.ocultar()
if paths:
self.rellenar_tabla_consumos()
self.rellenar_tabla_bolsas()
self.rellenar_estadisticas()
def drop_incidencia(self, boton):
model, paths = self.wids['tv_incidencias'].get_selection(
).get_selected_rows()
if paths is None or paths == []:
utils.dialogo_info(
'INCIDENCIA NO SELECCIONADA',
'Debe seleccionar la incidencia que desee eliminar del '
'parte.',
padre=self.wids['ventana'])
else:
if not utils.dialogo('¿Eliminar del parte?',
'BORRAR INCIDENCIAS DE CONTROL DE PRODUCCIÓN',
padre=self.wids['ventana']):
return
for path in paths:
aidi = model[path][-1]
incidencia = pclases.Incidencia.get(aidi)
incidencia.parteDeProduccion = None
try:
incidencia.destroy(ventana=__file__)
except Exception as msgexception:
utils.dialogo_info(titulo='INCIDENCIA NO ELIMINADA',
texto='Ocurrió un error al intentar '
'eliminar la incidencia.\n\n'
'Excepción capturada: {}'.format(
msgexception),
padre=self.wids['ventana'])
self.actualizar_ventana()
def add_empleado(self, wid):
empleados = pclases.Empleado.select(pclases.AND(
pclases.Empleado.q.activo == True, # noqa
pclases.Empleado.q.planta == True), # noqa
orderBy='apellidos')
empleados = [(e.id, e.nombre, e.apellidos) for e in empleados
if e.planta and e.activo and e.categoriaLaboral
and e.categoriaLaboral.planta]
# e.categoriaLaboral.planta and \
# e.categoriaLaboral.lineaDeProduccion == self.linea)]
ids = utils.dialogo_resultado(filas=empleados,
titulo='SELECCIONE EMPLEADOS',
cabeceras=('ID', 'Nombre', 'Apellidos'),
multi=True,
padre=self.wids['ventana'])
if ids == [-1]:
return
for ide in ids:
try:
e = pclases.Empleado.get(ide)
self.objeto.addEmpleado(e)
except Exception as msg:
utils.dialogo_info(
titulo='NÚMERO INCORRECTO',
texto='El empleado con código identificador %s no '
'existe o no se pudo agregar.\n\n'
'Información de depuración:\n'
'\t%s' % (ide, msg),
padre=self.wids['ventana'])
self.rellenar_tabla_empleados()
def drop_empleado(self, wid):
"""Quita un empleado del parte de producción."""
if self.wids['tv_empleados'].get_selection(
).count_selected_rows() == 0:
return
model, path = self.wids['tv_empleados'].get_selection().get_selected()
ide = model[path][0] # El ide del empleado es la columna 0
e = pclases.Empleado.get(ide)
self.objeto.removeEmpleado(e)
self.rellenar_tabla_empleados()
def rellenar_tabla_empleados(self):
"""Rellena la tabla de empleados."""
model = self.wids['tv_empleados'].get_model()
model.clear()
horas_parte = self.objeto.get_duracion()
for horastrab in self.objeto.horasTrabajadas:
try:
supera_duracion_parte = horastrab.horas > horas_parte
except TypeError:
supera_duracion_parte = (
utils.DateTime2DateTimeDelta(horastrab.horas)
> horas_parte)
if supera_duracion_parte:
horastrab.horas = horas_parte.strftime('%H:%M')
horastrab.sync()
model.append((horastrab.empleado.id,
horastrab.empleado.nombre,
horastrab.empleado.apellidos,
horastrab.horas.strftime('%H:%M'),
horastrab.id))
# pylint: disable=too-many-branches
def cambiar_partida(self, wid):
"""
Pide un número de partida por teclado y cambia a él.
"""
texto = """
Al cambiar la partida del parte, se cambiará la partida de
todos los productos relacionados con él, así como el artículo
al que pertencen los productos.
Si quiere comenzar la producción de una nueva partida sin afectar
a los ya existentes, cree un nuevo parte."""
if (self.objeto.articulos != []
and not utils.dialogo(titulo='¿ESTÁ SEGURO?',
texto=texto,
padre=self.wids['ventana'])):
return
codigo = utils.dialogo_entrada(
titulo='¿NÚMERO DE PARTIDA?',
texto='Introduzca el número de partida de embolsado a '
'producir:',
padre=self.wids['ventana'])
if codigo is None: # Cancel
return
ultima_partida = pclases.PartidaCem.get_nueva_o_ultima_vacia()
try:
codigo = utils.parse_numero(codigo.upper().replace(
pclases.PREFIJO_PARTIDACEM, ""))
partida = pclases.PartidaCem.select(
pclases.PartidaCem.q.numpartida == codigo)[0]
if (self.usuario and self.usuario.nivel > 2
and partida.numpartida > ultima_partida):
utils.dialogo_info(
titulo="NÚMERO DE PARTIDA INCORRECTO",
texto="El número de partida %d es superior al de la "
"última partida válida para producir: %d\n"
"Vuelva a seleccionar partida." % (
ultima_partida.numpartida, codigo),
padre=self.wids['ventana'])
return
except (TypeError, ValueError) as msg:
self.logger.error("partes_de_fabricacion_bolsas::cambiar_partida "
"-> Código partida: %s. Excepción capturada: %s"
% (codigo, msg))
return
except IndexError:
if not self.usuario or self.usuario.nivel <= 2:
partida = pclases.PartidaCem(numpartida=codigo,
codigo="M-%d" % codigo)
pclases.Auditoria.nuevo(partida, self.usuario, __file__)
else:
danextone = ultima_partida
if danextone:
danextone = danextone.codigo
else:
danextone = "¡no encontrada!"
if utils.dialogo(
titulo="PARTIDA NO ENCONTRADA",
texto="No se encontró la partida.\n¿Continuar con la"
" siguiente partida de embolsado de cemento sin"
" \nproducción no asignada a ningún otro parte"
" (%s)?" % danextone,
padre=self.wids['ventana'],
defecto=True,
tiempo=15):
partida = ultima_partida
else:
return
# Pongo la partida como actual.
self.objeto.partidaCem = partida
self.wids['e_partida'].set_text(partida.codigo)
if partida.pales: # Ya tiene algún palé asociado de un parte anterior.
# Para no mezclar productos, cambio el del parte actual.
productoVenta = partida.pales[0].cajas[0].articulo.productoVenta
self.producto = productoVenta
self.rellenar_datos_producto(self.producto)
# Y cambio de partida los artículos y de producto de venta.
pales = []
for a in self.objeto.articulos:
a.productoVenta = self.producto
pale = a.caja.pale
if pale not in pales:
pales.append(a.caja.pale)
pale.partidaCem = partida
self.actualizar_ventana()
def get_partida(self):
"""
Devuelve la partida relacionada con el parte actual.
Si no hay partida definida devuelve None.
"""
numpartida = self.wids['e_partida_gtx'].get_text()
numpartida = numpartida.upper().replace(pclases.PREFIJO_PARTIDACEM, "")
numpartida = int(numpartida)
return pclases.PartidaCem.select(
pclases.PartidaCem.q.numpartida == numpartida)[0]
# Debe existir en la BD por fuerza, "óyenme", por fuerza.
def _DEPRECATED_bloquear(self, ch, mostrar_alerta=True):
# Si el parte tiene menos de un día y se encuentra bloqueado, dejo
# que lo pueda desbloquear cualquiera.
if (mx.DateTime.localtime() - self.objeto.fecha <= mx.DateTime.oneDay
and (self.objeto.bloqueado or ch.get_active())):
self.objeto.bloqueado = False
elif ch.get_active() != self.objeto.bloqueado:
# NEW!: Los partes bloqueados solo los pueden desbloquear
# usuarios con nivel <= 1.
if self.objeto.bloqueado:
if self.usuario and self.usuario.nivel <= 2:
# and self.objeto.bloqueado and not ch.get_active():
self.objeto.bloqueado = False
else:
if self.__permisos.escritura: # Tiene permiso para bloquear
# el parte
self.objeto.bloqueado = True
else:
if mostrar_alerta:
utils.dialogo_info(
titulo="USUARIO SIN PRIVILEGIOS",
texto="No tiene permisos suficientes para bloq"
"uear y verificar partes de producción.",
padre=self.wids['ventana'])
self.objeto.sync()
self.objeto.make_swap()
ch.set_active(self.objeto.bloqueado)
def bloquear(self, ch, mostrar_alerta=True):
"""
- Si el usuario no tiene permisos y mostrar_alerta, avisa de que no
puede modificar la verificación del parte.
- Si el usuario tiene permisos,
- Si el parte está verificado y mostrar_alerta, informa de que no
se puede desbloquear un parte ya volcado a Murano.
- Si el parte no está verificado, lo bloquea y vuelca tanto la
producción como los consumos. Si mostrar_alerta, avisa de que
es una operación que no se puede deshacer.
El usuario debe tener nivel 2 o inferior.
"""
if self.objeto and ch.get_active() != self.objeto.bloqueado:
# No es la propia ventana la que está marcando la casilla al
# mostrar un parte bloqueado. El usuario el que ha hecho clic.
if (self.usuario and self.usuario.nivel <= 3
and self.__permisos.escritura):
if self.objeto.bloqueado:
# Ya está bloqueado. **No se puede desbloquear.** Los
# rollos puede que incluso ya se hayan vendido en Murano.
utils.dialogo_info(
titulo="OPERACIÓN NO PERMITIDA",
texto="No se pueden desbloquear partes ya "
"volcados a Murano.",
padre=self.wids['ventana'])
else:
if mostrar_alerta:
seguro = utils.dialogo(
titulo="¿VERIFICAR PARTE?",
texto="Se verificará el parte y se bloqueará."
"\nToda la producción y consumos se "
"volcarán a Murano.\n\n"
"¿Está completamente seguro?\n\n"
"(Esta operación no se puede deshacer)",
padre=self.wids['ventana'])
else:
seguro = True
if seguro:
# Porque Mr. Soy-demasiado-listo-para-esperar me tiene
# hasta los...
finparte = utils.convertir_a_fechahora(
self.objeto.fechahorafin)
ahora = mx.DateTime.now()
parte_terminado = ahora - finparte > 0
sensitive = self.wids['ch_bloqueado'].get_sensitive()
activo = sensitive and parte_terminado
# Impido verificar si el parte está abierto en
# producción todavía. Tiene que pasar al menos 1
# segundo desde la hora de fin de parte.
if not activo:
utils.dialogo_info(
titulo="HOLA, MARTY",
texto="No se puede cerrar un parte que "
"todavía no ha terminado de fabricarse.\n"
"\n\n(Y, por favor, si se te pregunta si "
"estás seguro, mejor que estés seguro "
"de verdad)",
padre=self.wids['ventana'])
else:
res = self.volcar_produccion()
if res:
self.objeto.bloqueado = True
self.objeto.sync()
self.objeto.make_swap()
else:
if mostrar_alerta:
str_error = "No se pudo volcar toda la "
str_error += "producción y consumos a "
str_error += "Murano.\n\nLos artículos no "
str_error += "volcados se han marcado con"
str_error += " el símbolo «✘».\nInténtelo"
str_error += " más tarde o contacte con el"
str_error += " administrador.\nEl parte "
str_error += "quedará pendiente de "
str_error += "verificar mientras tanto."
utils.dialogo_info(
titulo="ERROR VOLCADO",
texto=str_error,
padre=self.wids['ventana'])
self.rellenar_widgets()
else:
if mostrar_alerta:
utils.dialogo_info(
titulo="USUARIO SIN PRIVILEGIOS",
texto="No tiene permisos suficientes para "
"bloquear y verificar partes de "
"producción.\nPruebe a hacerlo desde "
"la ventana de partes pendientes de "
"verificar.",
padre=self.wids['ventana'])
ch.set_active(self.objeto.bloqueado)
def volcar_produccion(self):
"""
Vuelca todos los artículos del parte y consumos relacionados a Murano.
Devuelve True si todo ha ido bien o False si ocurrió algún error.
Vuelca también los consumos del parte.
"""
res = True
if not MURANO:
utils.dialogo_info(
titulo="ERROR CONEXIÓN MURANO",
texto="No hay conexión con Murano. Se aborta operación.",
padre=self.wids['ventana'])
else:
# Producción ===
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
i = 0.0
no_volcados = list(set([a.caja.pale for a in self.objeto.articulos
if not a.api]))
tot = len(no_volcados)
for pale in no_volcados:
i += 1
vpro.set_valor(i/tot, 'Volcando palé {} ({}/{})'.format(
pale.codigo, int(i), tot))
try:
volcado = murano.ops.create_pale(pale, observaciones="")
res = res and volcado
except Exception as errpale:
print("Error creando palé en Murano: ".format(errpale))
res = False
vpro.ocultar()
# Consumos ===
vpro = VentanaProgreso(padre=self.wids['ventana'])
vpro.mostrar()
consumos = [c for c in self.objeto.consumos
if not c.api and c.actualizado]
i = 0.0
tot = len(consumos) + len(self.objeto.bigbags)
# # consumos materiales
for consumo in consumos:
i += 1
vpro.set_valor(i/tot, 'Consumiendo {} ({}/{})'.format(
consumo.productoCompra.descripcion, int(i), tot))
try:
consumido = murano.ops.consumir(consumo.productoCompra,
consumo.cantidad,
consumo=consumo)
res = res and consumido
except Exception as errconsum:
print("Error consumiendo en Murano: {}".format(errconsum))
res = False
# # consumos materia prima (bigbags)
for bigbag in self.objeto.bigbags:
i += 1
vpro.set_valor(i/tot, 'Consumiendo materia prima ({})'.format(
bigbag.codigo))
try:
consumido = murano.ops.consume_bigbag(bigbag)
res = res and consumido
except Exception as errconsbb:
print("Error consumiendo bigbag en Murano: {}".format(
errconsbb))
res = False
vpro.ocultar()
return res
def add_empleados_calendario(self):
"""
Añade los empleados planificados según el calendario laboral
para la línea de producción.
1.- Obtener el calendario para self.linea.
2.- Obtener los laborables del calendario correspondiente a la fecha
del objeto.
3.- Filtrar los laborables en función del turno correspondiente a la
hora del objeto.
4.- Obtener los empleados del laborable resultante.
5.- Eliminar los empleados actuales. (PREGUNTA ANTES DE HACERLO)
6.- Insertarlos los nuevos en el parte.
"""
if self.linea is not None:
idldp = self.linea.id
CAL = pclases.CalendarioLaboral
calendarios = CAL.select("""linea_de_produccion_id = %d AND
date_part('month', mes_anno) = %d AND
date_part('year', mes_anno) = %d"""
% (idldp, self.objeto.fecha.month,
self.objeto.fecha.year))
if calendarios.count() == 1:
calendario = calendarios[0]
empleados = self.get_empleados_de_calendario(calendario)
# Si hay empleados
if self.objeto.horasTrabajadas != []:
# Si no son los mismos del calendario y los quiere borrar.
if ([horastrab.empleado
for horastrab
in self.objeto.horasTrabajadas] != empleados and
utils.dialogo(
titulo="¿ELIMINAR OPERARIOS?",
texto="El parte ya tiene empleados relacionado"
"s.\n¿Desea eliminarlos y asociar los de"
"finidos en el turno?",
padre=self.wids['ventana'])):
for horastrab in self.objeto.horasTrabajadas:
self.objeto.removeEmpleado(horastrab.empleado)
else:
# Si no los quiere borrar, cancelo todo.
return
# Si no había empleados o no eran los mismos y los ha borrado.
# Añado empleados de los laborables que cumplan el turno y
# sean de producción (no-recuperación).
for empleado in empleados:
self.objeto.addEmpleado(empleado)
elif calendarios.count() > 1:
self.logger.error(
"partes_de_fabricacion_bolsas.py -> Existe"
" más de un calendario laboral para el mes, año y "
"línea de producción: fecha %s - idldp %d - idparte "
"%s." % (self.objeto.fecha, idldp, self.objeto.id))
# pylint: disable=too-many-locals
def get_empleados_de_calendario(self, calendario):
"""
Devuelve los empleados programados para trabajar en el parte
actual según el calendario recibido.
"""
res = []
lab = pclases.Laborable
dia_lab_parte = self.objeto.fecha
seis_am = mx.DateTime.DateTimeDeltaFrom(hours=6)
medianoche = mx.DateTime.DateTimeDeltaFrom(hours=0)
if (self.objeto.horainicio >= medianoche and
self.objeto.horainicio <= seis_am and
self.objeto.horafin <= seis_am): # No se mezclan turnos, esta
# última comprobación podría no hacer falta.
dia_lab_parte -= mx.DateTime.oneDay
laborables = lab.select(
"calendario_laboral_id = %d AND date_part('day', fecha) = %d"
% (calendario.id, dia_lab_parte.day))
for laborable in laborables:
turno = laborable.turno
if turno is None:
mensaje = "partes_de_fabricacion_bolsas.py::"\
"get_empleados_de_calendario -> Laborable ID %d no"\
" tiene turno relacionado. Intento eliminarlo de la"\
" BD." % (laborable.id)
print("ERROR: %s" % (mensaje))
self.logger.error(mensaje)
try:
laborable.destroy(ventana=__file__)
idlaborable = laborable.id
self.logger.warning(
"partes_de_fabricacion_bolsas.py::"
"get_empleados_de_calendario -> Registro "
"laborable ID %d ELIMINADO "
"SATISFACTORIAMENTE." % (idlaborable))
except Exception as msg: # pylint: disable=bare-exception
self.logger.error(
"partes_de_fabricacion_bolsas.py::"
"get_empleados_de_calendario -> Registro "
"laborable ID %d NO ELIMINADO.\n\n"
"Error: %s" % (laborable.id, msg))
continue
turnohorainicio = utils.DateTime2DateTimeDelta(turno.horainicio)
turnohorafin = utils.DateTime2DateTimeDelta(turno.horafin)
objetohorainicio = utils.DateTime2DateTimeDelta(
self.objeto.horainicio)
objetohorafin = utils.DateTime2DateTimeDelta(self.objeto.horafin)
if not turno.recuperacion:
ohi = objetohorainicio
ohf = objetohorafin
thi = turnohorainicio
thf = turnohorafin
if thi > thf:
thf += mx.DateTime.oneDay
if ohi > ohf:
ohf += mx.DateTime.oneDay
if seis_am > ohi >= medianoche:
ohi += mx.DateTime.oneDay
if seis_am >= ohf >= medianoche:
ohf += mx.DateTime.oneDay
if thi <= ohi <= thf and thi <= ohf <= thf:
for empleado in laborable.empleados:
res.append(empleado)
return res
def add_consumo(self, boton):
"""Agrega un consumo al parte actual."""
self.consumir_manual(boton)
def add_bigbag(self, boton):
"""
Consume un bigbag buscándolo por su código de trazabilidad.
"""
codigo = utils.dialogo_entrada(
titulo="BUSCAR BIGBAG",
texto="Introduzca el código de trazabilidad del bigbag\n"
"de fibra de cemento:",
padre=self.wids['ventana'])
if codigo:
codigo = codigo.replace(" ", "").replace("-", "").upper().strip()
if not codigo.startswith("C"):
try:
codigo = "C%d" % utils.parse_numero(codigo)
except TypeError:
utils.dialogo_info(
titulo="ERROR",
texto="El texto introducido «%s» no es un número."
% (codigo),
padre=self.wids['ventana'])
codigo = "erróneo"
try:
bigbag = pclases.Bigbag.selectBy(codigo=codigo)[0]
except IndexError:
utils.dialogo_info(
titulo="CÓDIGO NO ENCONTRADO",
texto="El código %s no se encontró." % codigo,
padre=self.wids['ventana'])
else:
albint = self.objeto.buscar_o_crear_albaran_interno(
incluir_consumos_auto=True) # Normalmente no, pero
# aquí sí quiero que aparezcan en el alb. interno.
bigbag.articulo.sync()
if bigbag.articulo.almacen != albint.almacenOrigen:
utils.dialogo_info(
titulo="BIGBAG NO ESTÁ EN ALMACÉN",
texto="El bigbag %s no se encuentra en el almacén"
" %s" % (codigo,
albint.almacenOrigen.nombre),
padre=self.wids['ventana'])
else:
# Para consumir lo sacamos del almacén.
bigbag.parteDeProduccion = self.objeto
bigbag.articulo.almacen = None
bigbag.articulo.syncUpdate()
# Y lo metemos en el albarán interno.
lineas_albaran = {}
for ldv in albint.lineasDeVenta:
pv = ldv.productoVenta
if pv not in lineas_albaran:
lineas_albaran[pv] = [ldv]
else:
lineas_albaran[pv].append(ldv)
pv_bb = bigbag.articulo.productoVenta
if pv_bb not in lineas_albaran:
linea_albaran = pclases.LineaDeVenta(
ticket=None,
pedidoVenta=None,
facturaVenta=None,
productoVenta=pv_bb,
albaranSalida=albint,
prefactura=None,
productoCompra=None,
fechahora=mx.DateTime.localtime(),
cantidad=0.0,
precio=pv_bb.precioDefecto,
descuento=0.0,
notas="",
descripcionComplementaria="Reembolsado")
lineas_albaran[pv_bb] = [linea_albaran]
pclases.Auditoria.nuevo(linea_albaran,
self.usuario, __file__)
bigbag.articulo.albaranSalida = albint
lineas_albaran[pv_bb][-1].cantidad += bigbag.pesobigbag
lineas_albaran[pv_bb][-1].syncUpdate()
self.rellenar_tabla_consumos()
self.rellenar_estadisticas()
def drop_consumo(self, boton):
"""
Elimina los consumos seleccionados.
"""
model, paths = self.wids['tv_consumos'].get_selection(
).get_selected_rows()
if paths is None or paths == []:
utils.dialogo_info(
'CONSUMOS NO SELECCIONADOS',
'Debe seleccionar uno o varios consumos a eliminar del '
'parte.',
padre=self.wids['ventana'])
else:
if not utils.dialogo('¿Eliminar del parte?',
'BORRAR CONSUMOS DEL CONTROL DE ENVASADO',
padre=self.wids['ventana']):
return
for path in paths:
ide = model[path][-1]
if ide > 0: # Es consumo
consumo = pclases.Consumo.get(ide)
consumo.parteDeProduccion = None
try:
consumo.anular_consumo()
# consumo.destroy(ventana = __file__)
except Exception as msg:
utils.dialogo_info(
titulo='INCIDENCIA NO ELIMINADA',
texto='Ocurrió un error al intentar '
'eliminar el consumo.\n\n\n'
'Error: {}'.format(msg),
padre=self.wids['ventana'])
elif ide < 0: # Es bigbag
ide = -ide
bigbag = pclases.Bigbag.get(ide)
albint = self.objeto.get_albaran_interno() # DEBE existir
assert albint == bigbag.articulo.albaranSalida
# Devuelvo al almacén
bigbag.parteDeProduccion = None
bigbag.articulo.almacen = albint.almacenOrigen
bigbag.articulo.albaranSalida = None
bigbag.articulo.sync()
# Y saco del albarán
idldv = albint._buscar_ldv(
albint.agrupar_articulos(),
bigbag.articulo.productoVenta.codigo,
0.0) # La cantidad no me importa.
ldv = pclases.LineaDeVenta.get(idldv)
ldv.cantidad -= bigbag.pesobigbag
ldv.syncUpdate()
# self.actualizar_ventana()
self.objeto.buscar_o_crear_albaran_interno(
incluir_consumos_auto=True) # Normalmente no, pero
# aquí sí quiero que aparezcan en el alb. interno.
self.rellenar_tabla_consumos()
def cambiar_numbolsas(self, cell, path, newtext):
"""
Comprueba que se ha escrito un número y ajusta el número de bolsas por
caja del palé creando o eliminando bolsas de cada caja hasta llegar
al número tecleado.
"""
# TODO:
pass
def pasar_pale_a_B(self, cell, path):
"""
Si la fila que ha marcado era B cambia todo el palé a A. Si no, hace
lo contrario y lo cambia entero a A.
"""
# TODO: Implica cambiar bolsas por caja y demás. No es solo cambiar un
# atributo en el objeto.
pass
def consumir_manual(self, boton):
"""
Crea un registro de consumo manualmente y unifica los
consumos a continuación.
Si algún consumo acaba con cantidad 0 (porque se haya
agregado un consumo negativo que haya restado a otro)
se elimina antes de salir de la rutina.
"""
# Pedir producto(s) a consumir.
producto, texto_buscado = utils.pedir_producto_compra(
padre=self.wids['ventana'])
# Pedir cantidad.
if producto is not None:
unidad = ""
try:
producto_unidad = producto.unidad
if producto_unidad != "":
unidad = " en %s" % (producto_unidad)
except AttributeError as msg:
self.logger.error(
"%sEl producto tipo %s ID %d no tiene "
"atributo unidad. Excepción AttributeError: %s." % (
self.usuario and self.usuario.usuario + ": " or "",
type(producto),
producto is not None and producto.id or "NONE",
msg))
descripcion = producto.descripcion
cantidad = utils.dialogo_entrada(
titulo="CANTIDAD",
texto="Introduzca la cantidad a consumir de %s%s."
"\n<small><i>%s</i></small>" % (
descripcion, unidad, producto.observaciones),
padre=self.wids['ventana'])
if cantidad is not None:
try:
cantidad_a_consumir = utils._float(cantidad)
except (TypeError, ValueError):
utils.dialogo_info(
titulo="ERROR DE FORMATO",
texto='El texto introducido "%s" no es un número'
'.' % (cantidad),
padre=self.wids['ventana'])
else:
# Crear consumo.
producto.sync()
consumo = pclases.Consumo(
silo=None,
parteDeProduccion=self.objeto,
productoCompra=producto,
actualizado=True,
antes=producto.existencias,
despues=producto.existencias - cantidad_a_consumir,
cantidad=cantidad_a_consumir)
pclases.Auditoria.nuevo(consumo, self.usuario, __file__)
# Actualizar existencias
producto.existencias -= cantidad_a_consumir
producto.add_existencias(-cantidad_a_consumir)
producto.syncUpdate()
self.logger.warning(
"%sCONSUMO LÍNEA EMBOLSADO -> PARTE %d -> "
"Consumiendo manualmente %f %s de %s (ID %d). "
"Existencias: %f." % (
self.usuario
and self.usuario.usuario + ": " or "",
self.objeto.id,
cantidad_a_consumir,
producto.unidad,
producto.descripcion,
producto.id,
producto.existencias))
# Unificar consumos.
self.objeto.unificar_consumos()
# Eliminar consumos con cantidad cero.
for c in self.objeto.consumos:
if round(c.cantidad, 3) == 0:
# Cosas tan pequeñas como las agujas se descuentan
# en cantidades tan pequeñas que tras varias
# inserciones y borrados puede quedar el consumo
# con cantidad 0.0000...1, que debe ser borrado.
try:
c.destroy(ventana=__file__)
except Exception as msg:
self.logger.error(
"%sConsumo ID %d no se pudo eliminar."
" Excepción: %s" % (
self.usuario
and self.usuario.usuario + ": "
or "",
c.id,
msg))
self.rellenar_tabla_consumos()
# Buscar y crear (si no existe) el albarán interno de
# consumos.
self.objeto.buscar_o_crear_albaran_interno(
incluir_consumos_auto=True) # Normalmente no, pero
# aquí sí quiero que aparezcan en el alb. interno.
def etiquetar(self, boton):
"""
Genera las etiquetas de los palés seleccionados.
"""
treev = self.wids['tv_produccion']
model, paths = treev.get_selection().get_selected_rows()
if paths is None or paths == []:
utils.dialogo_info(
'SELECCIONE PALÉS',
'Debe seleccionar uno o más palés a etiquetar.',
padre=self.wids['ventana'])
else:
if not utils.dialogo(
'A continuación se generarán las etiquetas para {} palé{}.'
'\n\n¿Continuar?'.format(len(paths),
len(paths) != 1 and "s" or ""),
'¿GENERAR ETIQUETAS?',
padre=self.wids['ventana']):
return
ids = []
for path in paths:
puid = model[path][-1]
clase, ide = puid.split(":")
ids.append(ide)
pales = [pclases.Pale.get(ide) for ide in ids]
imprimir_etiquetas_pales(pales, self.wids['ventana'])
def imprimir_etiquetas_pales(pales, padre=None, mostrar_dialogo=True):
"""
Muestra una ventana para seleccionar el tipo de etiquetas a imprimir.
Por defecto se marca la neutral (tipo = 1).
Si mostrar_dialogo es False, usa la opción global que haya guardada. Si no
hay opción globar guardada, se ignora el parámetro y se muestra el
cuadro de diálogo.
"""
global MEMENTO_MORI
# Para la normativa del 1 de julio de 2013 fuerzo a que siempre se
# saquen ya las etiquetas con el nuevo formato. Pero como puede haber una
# vuelta atrás, voy a permitir la posibilidad (aunque no en GUI, solo
# programáticamente) de seguir sacando etiquetas antiguas.
MEMENTO_MORI['que_imprimir'] = 0 # Ya no se harán etiquetas de caja. Con
# la norma 2013 solo etiquetas de palé.
if MEMENTO_MORI['que_imprimir'] is None: # Nunca ha elegido una opción:
mostrar_dialogo = True
else:
que_imprimir = MEMENTO_MORI['que_imprimir']
mostrar_dialogo = False
if mostrar_dialogo:
que_imprimir = utils.dialogo_radio(
titulo="SELECCIONAR TIPO IMPRESIÓN",
texto="Seleccione qué imprimir:",
ops=[(0, "Etiqueta de palé"),
(1, "Etiquetas de caja"),
(2, "Etiquetas de palé y cajas")],
padre=padre,
valor_por_defecto=2)
if que_imprimir is not None:
MEMENTO_MORI['que_imprimir'] = que_imprimir
if que_imprimir in (0, 2):
tipo = 3 # Opción inexistente en el diálogo pero reconocible
# por la función que va a generar las etiquetas.
# BACKTRACKING a etiqueta antigua hasta que arreglemos la
# etiquetadora de la línea de cemento.
# tipo = MEMENTO_MORI['tipo'] # <- 18/09/2013: Pasamos a la nueva.
# Ya no permito seleccionar otra.
if tipo is None:
tipo = utils.dialogo_radio(
titulo="SELECCIONAR ETIQUETA",
texto="Seleccione el tipo de etiqueta a generar:",
ops=[(0, "Mínima (solo código de palé, partida y "
"marcado CE)"),
(1, "Neutra (incluye datos de producto)"),
(2, "Completa (incluye el nombre de la empresa"
")")],
padre=padre,
valor_por_defecto=1)
if tipo is not None:
MEMENTO_MORI['tipo'] = tipo
else:
return
# EOBACKTRACK: Descomentar el rotate=True cuando volvamos a usar
# las etiquetas nuevas.
try:
ceb = pales[0].productoVenta.camposEspecificosBala
func_etiqueta = ceb.modeloEtiqueta.get_func()
filetiqpale = func_etiqueta(pales)
except (AttributeError, IndexError, ValueError): # Fallback a
# etiqueta por defecto.
filetiqpale = geninformes.generar_etiqueta_pale(pales, tipo)
for pale in pales:
pclases.Auditoria.modificado(
pale,
# self.usuario,
None,
__file__,
"Impresión de etiqueta para palé %s" % (
pale.codigo))
mandar_a_imprimir_con_ghostscript(filetiqpale, rotate=True)
if que_imprimir == 1 or que_imprimir == 2:
tipo = MEMENTO_MORI['tipo']
if tipo is None:
tipo = utils.dialogo_radio(
titulo="SELECCIONAR ETIQUETA",
texto="Seleccione el tipo de etiqueta a generar:",
ops=[(0, "Mínima (solo código de palé, partida y "
"marcado CE)"),
(1, "Neutra (incluye datos de producto)"),
(2, "Completa (incluye el nombre de la empresa"
")")],
padre=padre,
valor_por_defecto=1)
if tipo is not None:
MEMENTO_MORI['tipo'] = tipo
cajas = []
for p in pales:
cajas += p.cajas[:]
for caja in cajas:
pclases.Auditoria.modificado(
caja.articulo,
# self.usuario,
None,
__file__,
"Impresión de etiqueta para caja %s" % (
caja.articulo.get_info()))
filetiqcaja = geninformes.generar_etiqueta_caja(cajas, tipo)
mandar_a_imprimir_con_ghostscript(filetiqcaja)
if __name__ == "__main__":
# TODO: ¿Por qué no me coje usuario y contraseña desde los parámetros? Necesito poderlo ejecutar como python formularios/partes...
p = PartesDeFabricacionBolsas()
| gpl-2.0 | -6,043,880,975,479,553,000 | 46.973039 | 134 | 0.493912 | false |
hackerspace-silesia/cebulany-manager | cebulany/resources/payment_summary.py | 1 | 1554 |
from flask_restful import fields, marshal_with
from flask_restful.reqparse import RequestParser
from cebulany.auth import token_required
from cebulany.queries.payment_summary import PaymentSummaryQuery
from cebulany.resources.model import ModelListResource
resource_fields = {
'payments': fields.List(fields.Nested({
'cost': fields.Price(decimals=2),
'is_positive': fields.Boolean(),
'payment_type_id': fields.Integer(),
'budget_id': fields.Integer(),
})),
'balances': fields.Nested({
'curr_start_year': fields.Price(decimals=2),
'curr_end_year': fields.Price(decimals=2),
'prev_start_year': fields.Price(decimals=2),
'prev_end_year': fields.Price(decimals=2),
'diff_start_year': fields.Price(decimals=2),
'diff_end_year': fields.Price(decimals=2),
'diff_prev_start_year': fields.Price(decimals=2),
'diff_prev_end_year': fields.Price(decimals=2),
}),
'outstanding_cost': fields.Price(decimals=2),
}
query_summary_parser = RequestParser()
query_summary_parser.add_argument('year', type=int)
class PaymentSummaryResource(ModelListResource):
@token_required
@marshal_with(resource_fields)
def get(self):
args = query_summary_parser.parse_args()
year = args['year']
return {
'payments': PaymentSummaryQuery.get_payment_data(year),
'balances': PaymentSummaryQuery.get_balances(year),
'outstanding_cost': PaymentSummaryQuery.get_outstanding_cost(year),
}
| mit | 7,752,611,034,030,863,000 | 32.782609 | 79 | 0.666667 | false |
vprnet/iframe | app/views.py | 1 | 2454 | from app import app
from flask import render_template, request
from config import BASE_URL
import csv
# Not being used anywhere
@app.route('/connect-choropleth')
def connect():
page_url = BASE_URL + request.path
return render_template('health-connect-choropleth.html', page_url=page_url)
@app.route('/sewage')
def sewage():
page_url = BASE_URL + request.path
return render_template('sewage.html', page_url=page_url)
@app.route('/doc-cloud')
def cloud():
return render_template('doc-cloud.html')
@app.route('/license-plates')
def license():
return render_template('license-plates.html')
@app.route('/broadband')
def broadband():
return render_template('broadband.html')
@app.route('/town-meeting/school-budget-revote')
def school_budget_revote():
return render_template('school-budget-revote.html')
@app.route('/town-meeting/school-budget')
def school_budget():
return render_template('school-budget.html')
@app.route('/town-meeting/town-budget')
def town_budget():
return render_template('town-budget.html')
@app.route('/town-meeting/public-bank')
def public_bank():
return render_template('public-bank.html')
@app.route('/town-meeting/results')
def results():
return render_template('results.html')
@app.route('/town-meeting/tar-sands')
def tar_sands():
return render_template('tar-sands.html')
@app.route('/code/tmd-index')
def tmd_index():
return render_template('tmdindex.html')
@app.route('/code/tmd-script')
def tmd_script():
return render_template('tmdscript.html')
@app.route('/code/tmd-structure')
def tmd_structure():
return render_template('tmdstructure.html')
@app.route('/winter')
def winter_length():
f = csv.reader(open('app/data/winters.csv', 'rU'))
winters = [l for l in f]
temperatures = [[20, 30, 40, 50, 60, 70, 80, 90],
[20, 22, 24, 26, 28, 30, 32, 34],
[25, 26, 27, 28, 29, 30, 31, 32],
[30, 32, 34, 36, 38, 40, 42, 44],
[35, 36, 37, 38, 39, 40, 41, 42],
[40, 42, 44, 46, 48, 50, 52, 54],
[45, 46, 47, 48, 49, 50, 51, 52],
[50, 52, 54, 56, 58, 60, 62, 64],
[55, 56, 57, 58, 59, 60, 61, 62],
[60, 62, 64, 66, 68, 70, 72, 74],
[65, 66, 67, 68, 69, 70, 71, 72],
[70, 72, 74, 76, 78, 80, 82, 84],
[80, 82, 84, 86, 88, 90, 92, 94]]
return render_template('winter.html',
temperatures=temperatures,
winters=winters)
| apache-2.0 | -876,918,957,947,200,600 | 23.54 | 79 | 0.628769 | false |
foursquare/pants | src/python/pants/backend/jvm/tasks/analysis_extraction.py | 1 | 6308 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import os
from collections import defaultdict
from pants.backend.jvm.subsystems.dependency_context import DependencyContext
from pants.backend.jvm.subsystems.zinc import Zinc
from pants.backend.jvm.tasks.nailgun_task import NailgunTask
from pants.base.build_environment import get_buildroot
from pants.base.exceptions import TaskError
from pants.base.workunit import WorkUnitLabel
from pants.goal.products import MultipleRootedProducts
from pants.util.memo import memoized_property
class AnalysisExtraction(NailgunTask):
"""A task that handles extracting product and dependency information from zinc analysis."""
# The output JSON created by this task is not localized, but is used infrequently enough
# that re-computing it from the zinc analysis (which _is_ cached) when necessary is fine.
create_target_dirs = True
@classmethod
def subsystem_dependencies(cls):
return super(AnalysisExtraction, cls).subsystem_dependencies() + (DependencyContext, Zinc.Factory)
@classmethod
def register_options(cls, register):
super(AnalysisExtraction, cls).register_options(register)
@classmethod
def prepare(cls, options, round_manager):
super(AnalysisExtraction, cls).prepare(options, round_manager)
round_manager.require_data('zinc_analysis')
round_manager.require_data('runtime_classpath')
@classmethod
def product_types(cls):
return ['classes_by_source', 'product_deps_by_src']
def _create_products_if_should_run(self):
"""If this task should run, initialize empty products that it will populate.
Returns true if the task should run.
"""
should_run = False
if self.context.products.is_required_data('classes_by_source'):
should_run = True
make_products = lambda: defaultdict(MultipleRootedProducts)
self.context.products.safe_create_data('classes_by_source', make_products)
if self.context.products.is_required_data('product_deps_by_src'):
should_run = True
self.context.products.safe_create_data('product_deps_by_src', dict)
return should_run
@memoized_property
def _zinc(self):
return Zinc.Factory.global_instance().create(self.context.products)
def _summary_json_file(self, vt):
return os.path.join(vt.results_dir, 'summary.json')
@memoized_property
def _analysis_by_runtime_entry(self):
zinc_analysis = self.context.products.get_data('zinc_analysis')
return {cp_entry: analysis_file for _, cp_entry, analysis_file in zinc_analysis.values()}
def execute(self):
# If none of our computed products are necessary, return immediately.
if not self._create_products_if_should_run():
return
zinc_analysis = self.context.products.get_data('zinc_analysis')
classpath_product = self.context.products.get_data('runtime_classpath')
classes_by_source = self.context.products.get_data('classes_by_source')
product_deps_by_src = self.context.products.get_data('product_deps_by_src')
fingerprint_strategy = DependencyContext.global_instance().create_fingerprint_strategy(
classpath_product)
targets = list(zinc_analysis.keys())
with self.invalidated(targets,
fingerprint_strategy=fingerprint_strategy,
invalidate_dependents=True) as invalidation_check:
# Extract and parse products for any relevant targets.
for vt in invalidation_check.all_vts:
summary_json_file = self._summary_json_file(vt)
cp_entry, _, analysis_file = zinc_analysis[vt.target]
if not vt.valid:
self._extract_analysis(vt.target, analysis_file, summary_json_file)
self._register_products(vt.target,
cp_entry,
summary_json_file,
classes_by_source,
product_deps_by_src)
def _extract_analysis(self, target, analysis_file, summary_json_file):
target_classpath = self._zinc.compile_classpath('runtime_classpath', target)
analysis_by_cp_entry = self._analysis_by_runtime_entry
upstream_analysis = list(self._upstream_analysis(target_classpath, analysis_by_cp_entry))
args = [
'-summary-json', summary_json_file,
'-analysis-cache', analysis_file,
'-classpath', ':'.join(target_classpath),
'-analysis-map', ','.join('{}:{}'.format(k, v) for k, v in upstream_analysis),
]
args.extend(self._zinc.rebase_map_args)
result = self.runjava(classpath=self._zinc.extractor,
main=Zinc.ZINC_EXTRACT_MAIN,
args=args,
workunit_name=Zinc.ZINC_EXTRACTOR_TOOL_NAME,
workunit_labels=[WorkUnitLabel.MULTITOOL])
if result != 0:
raise TaskError('Failed to parse analysis for {}'.format(target.address.spec),
exit_code=result)
def _upstream_analysis(self, target_classpath, analysis_by_cp_entry):
for entry in target_classpath:
analysis_file = analysis_by_cp_entry.get(entry)
if analysis_file is not None:
yield entry, analysis_file
def _register_products(self,
target,
target_cp_entry,
summary_json_file,
classes_by_source,
product_deps_by_src):
summary_json = self._parse_summary_json(summary_json_file)
# Register a mapping between sources and classfiles (if requested).
if classes_by_source is not None:
buildroot = get_buildroot()
for abs_src, classes in summary_json['products'].items():
source = os.path.relpath(abs_src, buildroot)
classes_by_source[source].add_abs_paths(target_cp_entry, classes)
# Register classfile product dependencies (if requested).
if product_deps_by_src is not None:
product_deps_by_src[target] = summary_json['dependencies']
def _parse_summary_json(self, summary_json_file):
with open(summary_json_file) as f:
return json.load(f, encoding='utf-8')
| apache-2.0 | -4,502,153,119,825,968,600 | 40.5 | 102 | 0.677394 | false |
anarchih/SmallProject | project1-1/evaluator.py | 1 | 5529 | # import pandas as pd
import datetime as dt
# import matplotlib.pyplot as plt
from collections import Counter
import csv
class Cluster(object):
def __init__(self, capacity):
self.week = [0] * 60
class Evaluater(object):
def __init__(self, filename, k, dist, capacity, date_range):
self.data = self.read_tsv(filename)
self.k = k
self.dist = dist
self.capacity = capacity
self.date_range = dt.timedelta(date_range)
self.cluster = Cluster(capacity)
self.xmin = min(self.data['x'])
self.xmax = max(self.data['x'])
self.ymin = min(self.data['y'])
self.ymax = max(self.data['y'])
self.labels_ = [0 for i in range(len(self.data['x']))]
def read_tsv(self, filename):
data = {'x': [], 'y': [], 'date':[]}
f = open(filename, "r")
f.readline()
for row in csv.reader(f, delimiter='\t'):
data['x'].append(float(row[9]))
data['y'].append(float(row[10]))
data['date'].append(dt.datetime.strptime('2015/' + row[5], "%Y/%m/%d"))
return data
# data = pd.read_csv(filename, sep="\t")
# # rows = random.sample(list(data.index), 5000)
# # data = data.ix[rows]
# data = data.rename(columns = {'經度座標':'x'})
# data = data.rename(columns = {'緯度座標':'y'})
# dtime = pd.DataFrame([[dt.datetime.strptime('2015/' + i, "%Y/%m/%d")] for i in data['發病日期']], columns=['date'])
# data = data.join(dtime)
# del data['發病日期']
# data = data.sort(['date'])
# data = data.reset_index()
# return data
def evaluate(self, ind):
count = 0
for p1 in zip(self.data['x'], self.data['y']):
for j, p2 in enumerate(ind):
if self.distance(p1, p2) < self.dist:
count += 1
break
return count,
def eval(self, ind):
count = 0
latest_date = [dt.datetime(1990, 1, 1)] * self.k
sum_capacity = [0] * self.k
tmp = [[0] * self.date_range.days for i in range(self.k)]
for i, p1 in enumerate(zip(self.data['x'], self.data['y'])):
c = self.find_data_belongs_to(p1, ind)
if c != 10000:
date_gap = self.data['date'][i] - latest_date[c]
latest_date[c] = self.data['date'][i]
if date_gap >= self.date_range:
sum_capacity[c] = 1
tmp[c] = [0] * self.date_range.days
tmp[c][0] = 1
count += 1
else:
t = [0] * date_gap.days + tmp[c][0:self.date_range.days - date_gap.days]
t[0] += 1
sum_c = sum(t)
if sum_c <= self.capacity:
tmp[c] = t
sum_capacity[c] = sum_c
count += 1
return count,
def find_data_belongs_to(self, p1, ind):
current_cluster = 10000
Min = 10000
for j, p2 in enumerate(ind):
dist = self.distance(p1, p2)
if dist < self.dist and dist < Min:
Min = dist
current_cluster = j
return current_cluster
def distance(self, p1, p2):
return abs(p1[0] - p2[0]) + abs(p1[1] - p2[1])
def calc_labels(self, ind):
count = 0
latest_date = [dt.datetime(1990, 1, 1)] * self.k
sum_capacity = [0] * self.k
tmp = [[0] * self.date_range.days for i in range(self.k)]
for i, p1 in enumerate(zip(self.data['x'], self.data['y'])):
c = self.find_data_belongs_to(p1, ind)
if c != 10000:
date_gap = self.data['date'][i] - latest_date[c]
latest_date[c] = self.data['date'][i]
if date_gap >= self.date_range:
sum_capacity[c] = 1
tmp[c] = [0] * self.date_range.days
tmp[c][0] = 1
count += 1
else:
t = [0] * date_gap.days + tmp[c][0:self.date_range.days - date_gap.days]
t[0] += 1
sum_c = sum(t)
if sum_c <= self.capacity:
tmp[c] = t
sum_capacity[c] = sum_c
count += 1
self.labels_[i] = c + 1
return count,
def draw_result(self):
self.draw_data()
# self.draw_range()
print(Counter(self.labels_))
# plt.show()
def draw_range(self):
pass
# plt.scatter(self.cluster_centers_[:, 0], self.cluster_centers_[:, 1], s=50)
def draw_data(self):
tmp = [20 if self.labels_[i] != 0 else 1 for i in range(len(self.labels_))]
# plt.scatter(self.data['x'], self.data['y'], s = tmp, c = self.labels_)
# plt.scatter(self.data['x'], self.data['y'], s=tmp, c=self.result)
def draw_raw_data(self):
pass
# plt.scatter(self.data['x'],self.data['y'],s=1)
# plt.show()
def save_result(self):
pass
# data = pd.DataFrame({'id': self.data['傳染病報告單電腦編號'],
# 'x': self.data['x'],
# 'y': self.data['y'],
# 'class':self.labels_})
# data.to_csv("result.csv")
e = Evaluater("data.tsv", 5, 0.02, 200000, 3)
| gpl-3.0 | 891,766,790,229,226,900 | 35.513333 | 121 | 0.466314 | false |
google/digitalbuildings | tools/validators/instance_validator/validate/handler.py | 1 | 8569 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an AS IS BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Validation Helper."""
from __future__ import print_function
from datetime import datetime
import sys
from typing import Callable, Dict, List, Optional
from validate import entity_instance
from validate import generate_universe
from validate import instance_parser
from validate import subscriber
from validate import telemetry_validator
from yamlformat.validator import presubmit_validate_types_lib as pvt
def Deserialize(
yaml_files: List[str]) -> Dict[str, entity_instance.EntityInstance]:
"""Parses a yaml configuration file and deserializes it.
Args:
yaml_files: list of building configuration files.
Returns:
A map of entity name to EntityInstance.
"""
print('Validating syntax please wait ...')
parser = instance_parser.InstanceParser()
for yaml_file in yaml_files:
print('Opening file: {0}, please wait ...'.format(yaml_file))
parser.AddFile(yaml_file)
parser.Finalize()
default_entity_operation = instance_parser.EntityOperation.ADD
if parser.GetConfigMode() == instance_parser.ConfigMode.UPDATE:
default_entity_operation = instance_parser.EntityOperation.UPDATE
entities = {}
for entity_name, entity_yaml in parser.GetEntities().items():
try:
entities[entity_name] = entity_instance.EntityInstance.FromYaml(
entity_yaml, default_entity_operation)
except ValueError:
print('Invalid Entity ' + entity_name)
raise
return entities, parser.GetConfigMode()
def _ValidateConfig(
filenames: List[str],
universe: pvt.ConfigUniverse) -> List[entity_instance.EntityInstance]:
"""Runs all config validation checks."""
print('\nLoading config files...\n')
entities, config_mode = Deserialize(filenames)
print('\nStarting config validation...\n')
helper = EntityHelper(universe)
return helper.Validate(entities, config_mode)
def _ValidateTelemetry(subscription: str, service_account: str,
entities: Dict[str, entity_instance.EntityInstance],
report_filename: str, timeout: int) -> None:
"""Runs all telemetry validation checks."""
helper = TelemetryHelper(subscription, service_account, report_filename)
helper.Validate(entities, report_filename, timeout)
def RunValidation(filenames: List[str],
modified_types_filepath: str = None,
subscription: str = None,
service_account: str = None,
report_filename: str = None,
timeout: int = 60) -> None:
"""Master runner for all validations."""
if bool(subscription) != bool(service_account):
print('Subscription and a service account file are '
'both needed for the telemetry validation!')
sys.exit(0)
print('\nStarting validator...\n')
print('\nStarting universe generation...\n')
universe = generate_universe.BuildUniverse(modified_types_filepath)
if not universe:
print('\nError generating universe')
sys.exit(0)
print('\nStarting config validation...\n')
entities = _ValidateConfig(filenames, universe)
if subscription:
print('\nStarting telemetry validation...\n')
_ValidateTelemetry(subscription, service_account, entities, report_filename,
timeout)
class TelemetryHelper(object):
"""A validation helper to encapsulate telemetry validation.
Attributes:
subscription: resource string referencing the subscription to check
service_account_file: path to file with service account information
report_filename: a report filename provided by the user
"""
def __init__(self, subscription, service_account_file, report_filename=None):
super().__init__()
self.report_filename = report_filename
self.subscription = subscription
self.service_account_file = service_account_file
def Validate(self, entities: Dict[str, entity_instance.EntityInstance],
report_filename: str, timeout: int) -> None:
"""Validates telemetry payload received from the subscription.
Args:
entities: EntityInstance dictionary keyed by entity name
report_filename: path to write results to
timeout: number of seconds to wait for telemetry
"""
print('Connecting to pubsub subscription: ', self.subscription)
sub = subscriber.Subscriber(self.subscription, self.service_account_file)
validator = telemetry_validator.TelemetryValidator(
entities, timeout,
self.BuildTelemetryValidationCallback(report_filename))
validator.StartTimer()
sub.Listen(validator.ValidateMessage)
def BuildTelemetryValidationCallback(
self,
report_filename: Optional[str] = None
) -> Callable[[telemetry_validator.TelemetryValidator], None]:
"""Returns a callback to be called when a telemetry message is received.
Args:
report_filename: path to write results to
"""
def TelemetryValidationCallback(
validator: telemetry_validator.TelemetryValidator) -> None:
"""Callback when the telemetry validator finishes.
This could be called due to a timeout or because telemetry messages were
received and validated for every expected entity.
Args:
validator: the telemetry validator that triggered the callback.
"""
print('Generating validation report ...')
current_time = datetime.now()
timestamp = current_time.strftime('%d-%b-%Y (%H:%M:%S)')
report = '\nReport Generated at: {0}\n'.format(timestamp)
if not validator.AllEntitiesValidated():
report += ('No telemetry message was received for the following '
'entities:')
report += '\n'
for entity_name in validator.GetUnvalidatedEntityNames():
report += ' {0}\n'.format(entity_name)
report += '\nTelemetry validation errors:\n'
for error in validator.GetErrors():
report += error.GetPrintableMessage()
report += '\nTelemetry validation warnings:\n'
for warnings in validator.GetWarnings():
report += warnings.GetPrintableMessage()
if report_filename:
with open(self.report_filename, 'w') as f:
f.write(report)
f.close()
else:
print('\n')
print(report)
print('Report Generated')
sys.exit(0)
return TelemetryValidationCallback
class EntityHelper(object):
"""A validation helper to coordinate the various steps of the validation.
Attributes:
universe: ConfigUniverse to validate against
"""
def __init__(self, universe: pvt.ConfigUniverse):
super().__init__()
self.universe = universe
def Validate(
self, entities: Dict[str, entity_instance.EntityInstance],
config_mode: instance_parser.ConfigMode
) -> Dict[str, entity_instance.EntityInstance]:
"""Validates entity instances that are already deserialized.
Args:
entities: a dict of entity instances
config_mode: processing mode of the configuration
Returns:
A dictionary containing valid entities by name
Raises:
SyntaxError: If no building is found in the config
"""
print('Validating entities ...')
building_found = False
valid_entities = {}
validator = entity_instance.CombinationValidator(self.universe, config_mode,
entities)
for entity_name, current_entity in entities.items():
if (current_entity.operation is not instance_parser.EntityOperation.DELETE
and current_entity.type_name.lower() == 'building'):
building_found = True
if not validator.Validate(current_entity):
print(entity_name, 'is not a valid instance')
continue
valid_entities[entity_name] = current_entity
if not building_found:
print('Config must contain a non-deleted entity with a building type')
raise SyntaxError('Building Config must contain an '
'entity with a building type')
print('All entities validated')
return valid_entities
| apache-2.0 | 6,749,910,311,691,980,000 | 34.704167 | 80 | 0.690629 | false |
salv-orlando/MyRepo | nova/api/openstack/common.py | 1 | 13765 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import re
import urlparse
from lxml import etree
import webob
from xml.dom import minidom
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova.compute import vm_states
from nova.compute import task_states
from nova import exception
from nova import flags
from nova import ipv6
from nova import log as logging
import nova.network
from nova import quota
LOG = logging.getLogger('nova.api.openstack.common')
FLAGS = flags.FLAGS
XML_NS_V11 = 'http://docs.openstack.org/compute/api/v1.1'
_STATE_MAP = {
vm_states.ACTIVE: {
'default': 'ACTIVE',
task_states.REBOOTING: 'REBOOT',
task_states.REBOOTING_HARD: 'HARD_REBOOT',
task_states.UPDATING_PASSWORD: 'PASSWORD',
task_states.RESIZE_VERIFY: 'VERIFY_RESIZE',
},
vm_states.BUILDING: {
'default': 'BUILD',
},
vm_states.REBUILDING: {
'default': 'REBUILD',
},
vm_states.STOPPED: {
'default': 'STOPPED',
},
vm_states.MIGRATING: {
'default': 'MIGRATING',
},
vm_states.RESIZING: {
'default': 'RESIZE',
},
vm_states.PAUSED: {
'default': 'PAUSED',
},
vm_states.SUSPENDED: {
'default': 'SUSPENDED',
},
vm_states.RESCUED: {
'default': 'RESCUE',
},
vm_states.ERROR: {
'default': 'ERROR',
},
vm_states.DELETED: {
'default': 'DELETED',
},
vm_states.SOFT_DELETE: {
'default': 'DELETED',
},
}
def status_from_state(vm_state, task_state='default'):
"""Given vm_state and task_state, return a status string."""
task_map = _STATE_MAP.get(vm_state, dict(default='UNKNOWN_STATE'))
status = task_map.get(task_state, task_map['default'])
LOG.debug("Generated %(status)s from vm_state=%(vm_state)s "
"task_state=%(task_state)s." % locals())
return status
def vm_state_from_status(status):
"""Map the server status string to a vm state."""
for state, task_map in _STATE_MAP.iteritems():
status_string = task_map.get("default")
if status.lower() == status_string.lower():
return state
def get_pagination_params(request):
"""Return marker, limit tuple from request.
:param request: `wsgi.Request` possibly containing 'marker' and 'limit'
GET variables. 'marker' is the id of the last element
the client has seen, and 'limit' is the maximum number
of items to return. If 'limit' is not specified, 0, or
> max_limit, we default to max_limit. Negative values
for either marker or limit will cause
exc.HTTPBadRequest() exceptions to be raised.
"""
params = {}
if 'limit' in request.GET:
params['limit'] = _get_limit_param(request)
if 'marker' in request.GET:
params['marker'] = _get_marker_param(request)
return params
def _get_limit_param(request):
"""Extract integer limit from request or fail"""
try:
limit = int(request.GET['limit'])
except ValueError:
msg = _('limit param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _('limit param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
return limit
def _get_marker_param(request):
"""Extract marker id from request or fail"""
return request.GET['marker']
def limited(items, request, max_limit=FLAGS.osapi_max_limit):
"""
Return a slice of items according to requested offset and limit.
@param items: A sliceable entity
@param request: `wsgi.Request` possibly containing 'offset' and 'limit'
GET variables. 'offset' is where to start in the list,
and 'limit' is the maximum number of items to return. If
'limit' is not specified, 0, or > max_limit, we default
to max_limit. Negative values for either offset or limit
will cause exc.HTTPBadRequest() exceptions to be raised.
@kwarg max_limit: The maximum number of items to return from 'items'
"""
try:
offset = int(request.GET.get('offset', 0))
except ValueError:
msg = _('offset param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
try:
limit = int(request.GET.get('limit', max_limit))
except ValueError:
msg = _('limit param must be an integer')
raise webob.exc.HTTPBadRequest(explanation=msg)
if limit < 0:
msg = _('limit param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
if offset < 0:
msg = _('offset param must be positive')
raise webob.exc.HTTPBadRequest(explanation=msg)
limit = min(max_limit, limit or max_limit)
range_end = offset + limit
return items[offset:range_end]
def limited_by_marker(items, request, max_limit=FLAGS.osapi_max_limit):
"""Return a slice of items according to the requested marker and limit."""
params = get_pagination_params(request)
limit = params.get('limit', max_limit)
marker = params.get('marker')
limit = min(max_limit, limit)
start_index = 0
if marker:
start_index = -1
for i, item in enumerate(items):
if item['id'] == marker or item.get('uuid') == marker:
start_index = i + 1
break
if start_index < 0:
msg = _('marker [%s] not found') % marker
raise webob.exc.HTTPBadRequest(explanation=msg)
range_end = start_index + limit
return items[start_index:range_end]
def get_id_from_href(href):
"""Return the id or uuid portion of a url.
Given: 'http://www.foo.com/bar/123?q=4'
Returns: '123'
Given: 'http://www.foo.com/bar/abc123?q=4'
Returns: 'abc123'
"""
return urlparse.urlsplit("%s" % href).path.split('/')[-1]
def remove_version_from_href(href):
"""Removes the first api version from the href.
Given: 'http://www.nova.com/v1.1/123'
Returns: 'http://www.nova.com/123'
Given: 'http://www.nova.com/v1.1'
Returns: 'http://www.nova.com'
"""
parsed_url = urlparse.urlsplit(href)
new_path = re.sub(r'^/v[0-9]+\.[0-9]+(/|$)', r'\1', parsed_url.path,
count=1)
if new_path == parsed_url.path:
msg = _('href %s does not contain version') % href
LOG.debug(msg)
raise ValueError(msg)
parsed_url = list(parsed_url)
parsed_url[2] = new_path
return urlparse.urlunsplit(parsed_url)
def get_version_from_href(href):
"""Returns the api version in the href.
Returns the api version in the href.
If no version is found, 1.0 is returned
Given: 'http://www.nova.com/123'
Returns: '1.0'
Given: 'http://www.nova.com/v1.1'
Returns: '1.1'
"""
try:
#finds the first instance that matches /v#.#/
version = re.findall(r'[/][v][0-9]+\.[0-9]+[/]', href)
#if no version was found, try finding /v#.# at the end of the string
if not version:
version = re.findall(r'[/][v][0-9]+\.[0-9]+$', href)
version = re.findall(r'[0-9]+\.[0-9]', version[0])[0]
except IndexError:
version = '1.0'
return version
def check_img_metadata_quota_limit(context, metadata):
if metadata is None:
return
num_metadata = len(metadata)
quota_metadata = quota.allowed_metadata_items(context, num_metadata)
if quota_metadata < num_metadata:
expl = _("Image metadata limit exceeded")
raise webob.exc.HTTPRequestEntityTooLarge(explanation=expl,
headers={'Retry-After': 0})
def dict_to_query_str(params):
# TODO: we should just use urllib.urlencode instead of this
# But currently we don't work with urlencoded url's
param_str = ""
for key, val in params.iteritems():
param_str = param_str + '='.join([str(key), str(val)]) + '&'
return param_str.rstrip('&')
def get_networks_for_instance(context, instance):
"""Returns a prepared nw_info list for passing into the view
builders
We end up with a data structure like:
{'public': {'ips': [{'addr': '10.0.0.1', 'version': 4},
{'addr': '2001::1', 'version': 6}],
'floating_ips': [{'addr': '172.16.0.1', 'version': 4},
{'addr': '172.16.2.1', 'version': 4}]},
...}
"""
def _emit_addr(ip, version):
return {'addr': ip, 'version': version}
networks = {}
fixed_ips = instance['fixed_ips']
ipv6_addrs_seen = {}
for fixed_ip in fixed_ips:
fixed_addr = fixed_ip['address']
network = fixed_ip['network']
vif = fixed_ip.get('virtual_interface')
if not network or not vif:
name = instance['name']
ip = fixed_ip['address']
LOG.warn(_("Instance %(name)s has stale IP "
"address: %(ip)s (no network or vif)") % locals())
continue
label = network.get('label', None)
if label is None:
continue
if label not in networks:
networks[label] = {'ips': [], 'floating_ips': []}
nw_dict = networks[label]
cidr_v6 = network.get('cidr_v6')
if FLAGS.use_ipv6 and cidr_v6:
ipv6_addr = ipv6.to_global(cidr_v6, vif['address'],
network['project_id'])
# Only add same IPv6 address once. It's possible we've
# seen it before if there was a previous fixed_ip with
# same network and vif as this one
if not ipv6_addrs_seen.get(ipv6_addr):
nw_dict['ips'].append(_emit_addr(ipv6_addr, 6))
ipv6_addrs_seen[ipv6_addr] = True
nw_dict['ips'].append(_emit_addr(fixed_addr, 4))
for floating_ip in fixed_ip.get('floating_ips', []):
float_addr = floating_ip['address']
nw_dict['floating_ips'].append(_emit_addr(float_addr, 4))
return networks
class MetadataXMLDeserializer(wsgi.XMLDeserializer):
def extract_metadata(self, metadata_node):
"""Marshal the metadata attribute of a parsed request"""
if metadata_node is None:
return {}
metadata = {}
for meta_node in self.find_children_named(metadata_node, "meta"):
key = meta_node.getAttribute("key")
metadata[key] = self.extract_text(meta_node)
return metadata
def _extract_metadata_container(self, datastring):
dom = minidom.parseString(datastring)
metadata_node = self.find_first_child_named(dom, "metadata")
metadata = self.extract_metadata(metadata_node)
return {'body': {'metadata': metadata}}
def create(self, datastring):
return self._extract_metadata_container(datastring)
def update_all(self, datastring):
return self._extract_metadata_container(datastring)
def update(self, datastring):
dom = minidom.parseString(datastring)
metadata_item = self.extract_metadata(dom)
return {'body': {'meta': metadata_item}}
class MetadataHeadersSerializer(wsgi.ResponseHeadersSerializer):
def delete(self, response, data):
response.status_int = 204
metadata_nsmap = {None: xmlutil.XMLNS_V11}
class MetaItemTemplate(xmlutil.TemplateBuilder):
def construct(self):
sel = xmlutil.Selector('meta', xmlutil.get_items, 0)
root = xmlutil.TemplateElement('meta', selector=sel)
root.set('key', 0)
root.text = 1
return xmlutil.MasterTemplate(root, 1, nsmap=metadata_nsmap)
class MetadataTemplateElement(xmlutil.TemplateElement):
def will_render(self, datum):
return True
class MetadataTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = MetadataTemplateElement('metadata', selector='metadata')
elem = xmlutil.SubTemplateElement(root, 'meta',
selector=xmlutil.get_items)
elem.set('key', 0)
elem.text = 1
return xmlutil.MasterTemplate(root, 1, nsmap=metadata_nsmap)
class MetadataXMLSerializer(xmlutil.XMLTemplateSerializer):
def index(self):
return MetadataTemplate()
def create(self):
return MetadataTemplate()
def update_all(self):
return MetadataTemplate()
def show(self):
return MetaItemTemplate()
def update(self):
return MetaItemTemplate()
def default(self):
return xmlutil.MasterTemplate(None, 1)
def check_snapshots_enabled(f):
@functools.wraps(f)
def inner(*args, **kwargs):
if not FLAGS.allow_instance_snapshots:
LOG.warn(_('Rejecting snapshot request, snapshots currently'
' disabled'))
msg = _("Instance snapshots are not permitted at this time.")
raise webob.exc.HTTPBadRequest(explanation=msg)
return f(*args, **kwargs)
return inner
| apache-2.0 | -6,424,597,816,870,545,000 | 31.011628 | 78 | 0.609517 | false |
mariusbaumann/pyload | module/plugins/accounts/PremiumTo.py | 1 | 1286 | # -*- coding: utf-8 -*-
from module.plugins.Account import Account
class PremiumTo(Account):
__name__ = "PremiumTo"
__type__ = "account"
__version__ = "0.06"
__description__ = """Premium.to account plugin"""
__license__ = "GPLv3"
__authors__ = [("RaNaN", "[email protected]"),
("zoidberg", "[email protected]"),
("stickell", "[email protected]")]
def loadAccountInfo(self, user, req):
traffic = req.load("http://premium.to/api/traffic.php",
get={'username': self.username, 'password': self.password})
if "wrong username" not in traffic:
trafficleft = float(traffic.strip()) / 1024 #@TODO: Remove `/ 1024` in 0.4.10
return {'premium': True, 'trafficleft': trafficleft, 'validuntil': -1}
else:
return {'premium': False, 'trafficleft': None, 'validuntil': None}
def login(self, user, data, req):
self.username = user
self.password = data['password']
authcode = req.load("http://premium.to/api/getauthcode.php",
get={'username': user, 'password': self.password}).strip()
if "wrong username" in authcode:
self.wrongPassword()
| gpl-3.0 | 4,571,049,506,377,407,500 | 34.722222 | 90 | 0.547434 | false |
drewmoore/python-koans | python3/koans/about_generators.py | 1 | 4571 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Written in place of AboutBlocks in the Ruby Koans
#
# Note: Both blocks and generators use a yield keyword, but they behave
# a lot differently
#
from runner.koan import *
class AboutGenerators(Koan):
def test_generating_values_on_the_fly(self):
result = list()
bacon_generator = (n + ' bacon' for n in ['crunchy','veggie','danish'])
for bacon in bacon_generator:
result.append(bacon)
self.assertEqual(['crunchy bacon', 'veggie bacon', 'danish bacon'], result)
def test_generators_are_different_to_list_comprehensions(self):
num_list = [x*2 for x in range(1,3)]
num_generator = (x*2 for x in range(1,3))
self.assertEqual(2, num_list[0])
# A generator has to be iterated through.
with self.assertRaises(TypeError): num = num_generator[0]
self.assertEqual(2, list(num_generator)[0])
# Both list comprehensions and generators can be iterated though. However, a generator
# function is only called on the first iteration. The values are generated on the fly
# instead of stored.
#
# Generators are more memory friendly, but less versatile
def test_generator_expressions_are_a_one_shot_deal(self):
dynamite = ('Boom!' for n in range(3))
attempt1 = list(dynamite)
attempt2 = list(dynamite)
self.assertEqual(['Boom!', 'Boom!', 'Boom!'], list(attempt1))
self.assertEqual([], list(attempt2))
# ------------------------------------------------------------------
def simple_generator_method(self):
yield 'peanut'
yield 'butter'
yield 'and'
yield 'jelly'
def test_generator_method_will_yield_values_during_iteration(self):
result = list()
for item in self.simple_generator_method():
result.append(item)
self.assertEqual(['peanut', 'butter', 'and', 'jelly'], result)
def test_coroutines_can_take_arguments(self):
result = self.simple_generator_method()
self.assertEqual('peanut', next(result))
self.assertEqual('butter', next(result))
result.close()
# ------------------------------------------------------------------
def square_me(self, seq):
for x in seq:
yield x * x
def test_generator_method_with_parameter(self):
result = self.square_me(range(2,5))
self.assertEqual([4, 9, 16], list(result))
# ------------------------------------------------------------------
def sum_it(self, seq):
value = 0
for num in seq:
# The local state of 'value' will be retained between iterations
value += num
yield value
def test_generator_keeps_track_of_local_variables(self):
result = self.sum_it(range(2,5))
self.assertEqual([2, 5, 9], list(result))
# ------------------------------------------------------------------
def generator_with_coroutine(self):
result = yield
yield result
def test_generators_can_take_coroutines(self):
generator = self.generator_with_coroutine()
# THINK ABOUT IT:
# Why is this line necessary?
#
# Hint: Read the "Specification: Sending Values into Generators"
# section of http://www.python.org/dev/peps/pep-0342/
next(generator)
self.assertEqual(3, generator.send(1 + 2))
def test_before_sending_a_value_to_a_generator_next_must_be_called(self):
generator = self.generator_with_coroutine()
try:
generator.send(1+2)
except TypeError as ex:
ex2 = ex
self.assertRegexpMatches(ex2.args[0], 'just-started')
# ------------------------------------------------------------------
def yield_tester(self):
value = yield
if value:
yield value
else:
yield 'no value'
def test_generators_can_see_if_they_have_been_called_with_a_value(self):
generator = self.yield_tester()
next(generator)
self.assertEqual('with value', generator.send('with value'))
generator2 = self.yield_tester()
next(generator2)
self.assertEqual('no value', next(generator2))
def test_send_none_is_equivalent_to_next(self):
generator = self.yield_tester()
next(generator)
# 'next(generator)' is exactly equivalent to 'generator.send(None)'
self.assertEqual('no value', generator.send(None))
| mit | -7,604,709,203,030,380,000 | 30.308219 | 94 | 0.564209 | false |
lkundrak/scraperwiki | uml/httpproxy/swproxy.py | 1 | 1826 | import sys
from twisted.web import proxy, http
from twisted.python import log
log.startLogging(sys.stdout)
class ScraperProxyClient(proxy.ProxyClient):
def handleHeader( self, key, value ):
proxy.ProxyClient.handleHeader(self, key, value)
def handleResponsePart(self, data):
proxy.ProxyClient.handleResponsePart(self,data)
def handleResponseEnd(self):
proxy.ProxyClient.handleResponseEnd(self)
class ScraperProxyClientFactory(proxy.ProxyClientFactory):
def buildProtocol(self, addr):
client = proxy.ProxyClientFactory.buildProtocol(self, addr)
client.__class__ = ScraperProxyClient
return client
class ScraperProxyRequest(proxy.ProxyRequest):
protocols = { 'http': ScraperProxyClientFactory }
def __init__(self, *args):
proxy.ProxyRequest.__init__(self, *args)
def process(self):
# TODO Process self.uri to see if we are allowed to access it
# We probably want to do an ident with the current controller and
# probably a notify as well. Once we know we can carry on then
# we should
proxy.ProxyRequest.process(self)
class ScraperProxy(proxy.Proxy):
def __init__(self):
proxy.Proxy.__init__(self)
def requestFactory(self, *args):
return ScraperProxyRequest(*args)
class ScraperProxyFactory(http.HTTPFactory):
def __init__(self):
http.HTTPFactory.__init__(self)
def buildProtocol(self, addr):
protocol = ScraperProxy()
return protocol
if __name__ == '__main__':
from twisted.internet import reactor
px = ScraperProxyFactory()
reactor.listenTCP(9000, px)
reactor.run()
| agpl-3.0 | -5,316,903,653,732,609,000 | 24.375 | 74 | 0.629244 | false |
cperrin88/PAMMySQLTools | doc/source/conf.py | 1 | 10650 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# PAMMySQLTools documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 11 02:34:00 2016.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
#import sys
#import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('../../'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'PAMMySQLTools'
copyright = '2016, Christopher Perrin'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1.1'
# The full version, including alpha/beta/rc tags.
release = '0.1.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'PAMMySQLToolsdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'PAMMySQLTools.tex', 'PAMMySQLTools Documentation',
'Christopher Perrin', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'pammysqltools', 'PAMMySQLTools Documentation',
['Christopher Perrin'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'PAMMySQLTools', 'PAMMySQLTools Documentation',
'Christopher Perrin', 'PAMMySQLTools', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = 'PAMMySQLTools'
epub_author = 'Christopher Perrin'
epub_publisher = 'Christopher Perrin'
epub_copyright = '2016, Christopher Perrin'
# The basename for the epub file. It defaults to the project name.
#epub_basename = 'PAMMySQLTools'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
| mit | -9,175,278,118,781,340,000 | 30.231672 | 80 | 0.709014 | false |
BartMassey/oregon-mileage | reduce-mileages.py | 1 | 1800 | # Copyright © 2014 Bart Massey
# [This work is licensed under the "MIT License"]
# Please see the file COPYING in the source
# distribution of this software for license terms.
# Given a mileage list, reduce it to a minimal set of edges
# that imply the same mileages.
# Strategy: Fill the whole list of edges into the
# result, then loop over all triples deleting implied edges.
from sys import stdin, stderr
def read_graph(f):
vs = set()
es = {}
for line in f:
(v1, v2, w) = line.split()
w = int(w)
vs |= {v1}
vs |= {v2}
e = tuple({v1, v2})
if e in es and es[e] != w:
print("warning: edge {%s, %s} mismatch %d / %d" % \
(v1, v2, es[e], w), file=stderr)
continue
es[e] = w
return (vs, es)
def reduced(g):
(vs, es) = g
res = es.copy()
for e in es:
for v in vs:
if e not in res:
continue
(v1, v2) = e
if v1 == v or v2 == v:
continue
e1 = tuple({v1, v})
e2 = tuple({v, v2})
if e1 not in es or e2 not in es:
continue
if es[e1] > es[e] or es[e2] > es[e]:
continue
if es[e1] + es[e2] > es[e]:
continue
if es[e1] + es[e2] < es[e]:
print("triangle error: %d vs %s-%d-%s-%d-%s" % \
(es[e], v1, es[e1], v, es[e2], v2), file=stderr)
continue
print("removing redundant %s %s" % (v1, v2), file=stderr)
del res[e]
return (vs, res)
def write_graph(g):
(_, es) = g
for e in es:
(v1, v2) = e
w = es[e]
print(v1, v2, w)
g = read_graph(stdin)
rg = reduced(g)
write_graph(rg)
| mit | -6,409,248,396,742,954,000 | 26.676923 | 70 | 0.473596 | false |
eukaryote/asn1crypto | asn1crypto/tsp.py | 1 | 8059 | # coding: utf-8
"""
ASN.1 type classes for the time stamp protocol (TSP). Exports the following
items:
- TimeStampReq()
- TimeStampResp()
Also adds TimeStampedData() support to asn1crypto.cms.ContentInfo(),
TimeStampedData() and TSTInfo() support to
asn1crypto.cms.EncapsulatedContentInfo() and some oids and value parsers to
asn1crypto.cms.CMSAttribute().
Other type classes are defined that help compose the types listed above.
"""
from __future__ import unicode_literals, division, absolute_import, print_function
from .algos import DigestAlgorithm
from .cms import (
CMSAttribute,
CMSAttributeType,
ContentInfo,
ContentType,
EncapsulatedContentInfo,
)
from .core import (
Any,
BitString,
Boolean,
Choice,
GeneralizedTime,
IA5String,
Integer,
ObjectIdentifier,
OctetString,
Sequence,
SequenceOf,
SetOf,
UTF8String,
)
from .crl import CertificateList
from .x509 import (
Attributes,
CertificatePolicies,
GeneralName,
GeneralNames,
)
# The structures in this file are based on https://tools.ietf.org/html/rfc3161,
# https://tools.ietf.org/html/rfc4998, https://tools.ietf.org/html/rfc5544,
# https://tools.ietf.org/html/rfc5035, https://tools.ietf.org/html/rfc2634
class Version(Integer):
_map = {
0: 'v0',
1: 'v1',
2: 'v2',
3: 'v3',
4: 'v4',
5: 'v5',
}
class MessageImprint(Sequence):
_fields = [
('hash_algorithm', DigestAlgorithm),
('hashed_message', OctetString),
]
class Accuracy(Sequence):
_fields = [
('seconds', Integer, {'optional': True}),
('millis', Integer, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
('micros', Integer, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
]
class Extension(Sequence):
_fields = [
('extn_id', ObjectIdentifier),
('critical', Boolean, {'default': False}),
('extn_value', OctetString),
]
class Extensions(SequenceOf):
_child_spec = Extension
class TSTInfo(Sequence):
_fields = [
('version', Version),
('policy', ObjectIdentifier),
('message_imprint', MessageImprint),
('serial_number', Integer),
('gen_time', GeneralizedTime),
('accuracy', Accuracy, {'optional': True}),
('ordering', Boolean, {'default': False}),
('nonce', Integer, {'optional': True}),
('tsa', GeneralName, {'tag_type': 'explicit', 'tag': 0, 'optional': True}),
('extensions', Extensions, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
]
class TimeStampReq(Sequence):
_fields = [
('version', Version),
('message_imprint', MessageImprint),
('req_policy', ObjectIdentifier, {'optional': True}),
('nonce', Integer, {'optional': True}),
('cert_req', Boolean, {'default': False}),
('extensions', Extensions, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
]
class PKIStatus(Integer):
_map = {
0: 'granted',
1: 'granted_with_mods',
2: 'rejection',
3: 'waiting',
4: 'revocation_warning',
5: 'revocation_notification',
}
class PKIFreeText(SequenceOf):
_child_spec = UTF8String
class PKIFailureInfo(BitString):
_map = {
0: 'bad_alg',
2: 'bad_request',
5: 'bad_data_format',
14: 'time_not_available',
15: 'unaccepted_policy',
16: 'unaccepted_extensions',
17: 'add_info_not_available',
25: 'system_failure',
}
class PKIStatusInfo(Sequence):
_fields = [
('status', PKIStatus),
('status_string', PKIFreeText, {'optional': True}),
('fail_info', PKIFailureInfo, {'optional': True}),
]
class TimeStampResp(Sequence):
_fields = [
('status', PKIStatusInfo),
('time_stamp_token', ContentInfo),
]
class MetaData(Sequence):
_fields = [
('hash_protected', Boolean),
('file_name', UTF8String, {'optional': True}),
('media_type', IA5String, {'optional': True}),
('other_meta_data', Attributes, {'optional': True}),
]
class TimeStampAndCRL(SequenceOf):
_fields = [
('time_stamp', EncapsulatedContentInfo),
('crl', CertificateList, {'optional': True}),
]
class TimeStampTokenEvidence(SequenceOf):
_child_spec = TimeStampAndCRL
class DigestAlgorithms(SequenceOf):
_child_spec = DigestAlgorithm
class EncryptionInfo(Sequence):
_fields = [
('encryption_info_type', ObjectIdentifier),
('encryption_info_value', Any),
]
class PartialHashtree(SequenceOf):
_child_spec = OctetString
class PartialHashtrees(SequenceOf):
_child_spec = PartialHashtree
class ArchiveTimeStamp(Sequence):
_fields = [
('digest_algorithm', DigestAlgorithm, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
('attributes', Attributes, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
('reduced_hashtree', PartialHashtrees, {'tag_type': 'implicit', 'tag': 2, 'optional': True}),
('time_stamp', ContentInfo),
]
class ArchiveTimeStampSequence(SequenceOf):
_child_spec = ArchiveTimeStamp
class EvidenceRecord(Sequence):
_fields = [
('version', Version),
('digest_algorithms', DigestAlgorithms),
('crypto_infos', Attributes, {'tag_type': 'implicit', 'tag': 0, 'optional': True}),
('encryption_info', EncryptionInfo, {'tag_type': 'implicit', 'tag': 1, 'optional': True}),
('archive_time_stamp_sequence', ArchiveTimeStampSequence),
]
class OtherEvidence(Sequence):
_fields = [
('oe_type', ObjectIdentifier),
('oe_value', Any),
]
class Evidence(Choice):
_alternatives = [
('tst_evidence', TimeStampTokenEvidence, {'tag_type': 'implicit', 'tag': 0}),
('ers_evidence', EvidenceRecord, {'tag_type': 'implicit', 'tag': 1}),
('other_evidence', OtherEvidence, {'tag_type': 'implicit', 'tag': 2}),
]
class TimeStampedData(Sequence):
_fields = [
('version', Version),
('data_uri', IA5String, {'optional': True}),
('meta_data', MetaData, {'optional': True}),
('content', OctetString, {'optional': True}),
('temporal_evidence', Evidence),
]
class IssuerSerial(Sequence):
_fields = [
('issuer', GeneralNames),
('serial_number', Integer),
]
class ESSCertID(Sequence):
_fields = [
('cert_hash', OctetString),
('issuer_serial', IssuerSerial, {'optional': True}),
]
class ESSCertIDs(SequenceOf):
_child_spec = ESSCertID
class SigningCertificate(Sequence):
_fields = [
('certs', ESSCertIDs),
('policies', CertificatePolicies, {'optional': True}),
]
class SetOfSigningCertificates(SetOf):
_child_spec = SigningCertificate
class ESSCertIDv2(Sequence):
_fields = [
('hash_algorithm', DigestAlgorithm, {'default': 'sha256'}),
('cert_hash', OctetString),
('issuer_serial', IssuerSerial, {'optional': True}),
]
class ESSCertIDv2s(SequenceOf):
_child_spec = ESSCertIDv2
class SigningCertificateV2(Sequence):
_fields = [
('certs', ESSCertIDv2s),
('policies', CertificatePolicies, {'optional': True}),
]
class SetOfSigningCertificatesV2(SetOf):
_child_spec = SigningCertificateV2
EncapsulatedContentInfo._oid_specs['tst_info'] = TSTInfo
EncapsulatedContentInfo._oid_specs['timestamped_data'] = TimeStampedData
ContentInfo._oid_specs['timestamped_data'] = TimeStampedData
ContentType._map['1.2.840.113549.1.9.16.1.4'] = 'tst_info'
ContentType._map['1.2.840.113549.1.9.16.1.31'] = 'timestamped_data'
CMSAttributeType._map['1.2.840.113549.1.9.16.2.12'] = 'signing_certificate'
CMSAttribute._oid_specs['signing_certificate'] = SetOfSigningCertificates
CMSAttributeType._map['1.2.840.113549.1.9.16.2.47'] = 'signing_certificate_v2'
CMSAttribute._oid_specs['signing_certificate_v2'] = SetOfSigningCertificatesV2
| mit | -1,249,433,928,822,161,400 | 24.996774 | 101 | 0.617446 | false |
zjurelinac/pyFrisc | assembler.py | 1 | 14214 | from itertools import chain
from math import ceil, log
from utils import *
import os.path
import re
import sys
import yaml
data = dict()
labels = dict()
memory = []
maxnum = 0
# Helper
def show_error( s ):
return s, False
# Helper
def round_to_word( i ):
return int( int( i/data[ 'consts' ][ 'words_per_line' ] + 1 ) * data[ 'consts' ][ 'words_per_line' ]
if i%data[ 'consts' ][ 'words_per_line' ] != 0 else i )
# Helper?
def args_len( args ):
n = 0
while args:
_, args = parse_constant( args )
n += 1
return n
# Procedure
def place_in_mem( res, n ):
for i in range( 0, data[ 'consts' ][ 'words_per_line' ] ):
memory[ n ] = res[ data[ 'consts' ][ 'word_size' ]*i : data[ 'consts' ][ 'word_size' ]*(i+1) ]
n += 1
# Helper
def to_little_endian( x, n ):
i = 0
arr = []
for i in range( 0, n ):
arr.append( x[ data[ 'consts' ][ 'word_size' ]*i : data[ 'consts' ][ 'word_size' ]*(i+1) ] )
return ''.join( reversed( arr ) )
# Function, helper
def parse_constant( args, leftovers = True ):
if not args:
raise ValueError( 'Nothing to parse.' )
if args[ 0 ][ 0 ] == '%':
r = int( args[ 1 ], data[ 'consts' ][ 'base_code' ][ args[ 0 ][ 1 ] ] )
a = args[ 2: ] if len( args ) > 2 else []
elif args[ 0 ][ 0 ].isdigit():
r = int( args[ 0 ], data[ 'consts' ][ 'default_base' ] )
a = args[ 1: ] if len( args ) > 1 else []
elif args[ 0 ][ 0 ] == '-':
r = -int( args[ 1 ] , data[ 'consts' ][ 'default_base' ] )
a = args[ 2: ] if len( args ) > 2 else []
elif args[ 0 ] in labels:
r = labels[ args[ 0 ] ]
a = args[ 1: ] if len( args ) > 1 else []
else:
raise ValueError( 'Unknown arguments, cannot parse.' )
if abs( r ) > 2**32:
raise ValueError( 'Constant larger than 32 bits.' )
if not leftovers:
if a: raise ValueError( 'Extra symbols in line.' )
else: return r
else:
return [ r, a ]
# Function, helper
def parse_reg( arg ):
return data[ 'codes' ][ 'REG' ][ arg ]
# Function, helper
def parse_src2( args ):
try:
res = parse_reg( args[ 0 ] ), args[ 1: ]
except KeyError:
res = parse_constant( args, True )
res[ 0 ] = extend20( res[ 0 ] )
return res
# Function
def parse_aluop( cmd, args ):
src1 = parse_reg( args[ 0 ] )
src2, args = parse_src2( args[ 1: ] )
dest = parse_reg( args[ 0 ] )
result = to_little_endian( data[ 'codes' ][ 'ALU' ][ cmd ] + ( '0' if len( src2 ) == 3 else '1' )
+ dest + src1 + src2 + ( '0'*17 if len( src2 ) != 20 else '' ), data[ 'consts' ][ 'words_per_line' ] )
return result
# Function
def parse_memop( cmd, args ):
reg = parse_reg( args[ 0 ] )
result = data[ 'codes' ][ 'MEM' ][ cmd ]
if args[ 1 ] != '(':
raise ValueError
try:
loc = parse_reg( args[ 2 ] )
shift = 0
sign = '+'
if args[ 3 ] != ')':
sign = args[ 3 ]
shift, args = parse_constant( args[ 4: ], True )
if len( args ) != 1 and args[ 0 ] != ')':
raise ValueError( 'Incorrect command form.' )
shift = extend20( ( -1 if sign == '-' else 1 ) * shift )
result += '1' + reg + loc + shift
except KeyError:
if args[ 2 ] in labels:
loc = labels[ args[ 2 ] ]
if args[ 3 ] != ')':
raise ValueError( 'Incorrect absolute addressing.' )
else:
loc, args = parse_constant( args[ 2: ], True )
if len( args ) != 1 and args[ 0 ] != '=':
raise ValueError( 'Incorrect command form.' )
loc = extend20( loc )
result += '0' + reg + '000' + loc
result = to_little_endian( result, data[ 'consts' ][ 'words_per_line' ] )
return result
# Function
def parse_stackop( cmd, args ):
dest = parse_reg( args[ 0 ] )
result = to_little_endian( data[ 'codes' ][ 'STACK' ][ cmd ] + '0' + dest + '0'*23, data[ 'consts' ][ 'words_per_line' ] )
return result
# Function
def parse_ctrlop( cmd, args ):
if args[ 0 ] == '_':
flag = args[ 1 ]
args = args[ 2: ] if len( args ) > 2 else []
else: flag = '$'
if args[ 0 ] == '(':
op = '0'
loc = parse_reg( args[ 1 ] ) + '0'*17
else:
op = '1'
loc = extend20( parse_constant( args, False ) )
result = to_little_endian( data[ 'codes' ][ 'CTRL' ][ cmd ] + op + data[ 'codes' ][ 'COND' ][ flag ]
+ '00' + loc, data[ 'consts' ][ 'words_per_line' ] )
return result
# Function
def parse_retop( cmd, args ):
flag = args[ 1 ] if args and args[ 0 ] == '_' else '$'
result = to_little_endian( data[ 'codes' ][ 'RET' ][ cmd ] + '0' + data[ 'codes' ][ 'COND' ][ flag ]
+ 20*'0' + data[ 'codes' ][ 'RET_CODE' ][ cmd ], data[ 'consts' ][ 'words_per_line' ] )
return result
# Function
def parse_moveop( cmd, args ):
a = '0'
src = '000'
srcSR = False
dest = '000'
destSR = False
if args[ 0 ] == 'SR':
args = args[ 1: ]
srcSR = True
elif args[ 0 ] in data[ 'codes' ][ 'REG' ]:
src = parse_reg( args[ 0 ] )
args = args[ 1: ]
else:
a = '1'
src, args = parse_constant( args, True )
src = extend20( src )
if args[ 0 ] != 'SR':
dest = parse_reg( args[ 0 ] )
else:
destSR = True
result = to_little_endian( data[ 'codes' ][ 'MOVE' ] + a + dest + '0' + '{:b}{:b}'.format( srcSR, destSR )
+ src + ( '0'*17 if len( src ) == 3 else '' ), data[ 'consts' ][ 'words_per_line' ] )
return result
# Function
def parse_jr( cmd, args, n ):
if args[ 0 ] == '_':
flag = args[ 1 ]
args = args[ 2: ] if len( args ) > 2 else []
else: flag = '$'
# TODO: Beware, if label, that's ok, if a bare number, NOT OK, won't jump N places forward but to address N
offset = parse_constant( args, False )
result = to_little_endian( data[ 'codes' ][ 'JR' ] + '1' + data[ 'codes' ][ 'COND' ][ flag ] + '00'
+ extend20( offset - n - 4 ), data[ 'consts' ][ 'words_per_line' ] )
return result
# Function
def parse_cmp( cmd, args ):
src1 = parse_reg( args[ 0 ] )
src2, args = parse_src2( args[ 1: ] )
result = to_little_endian( data[ 'codes' ][ 'CMP' ] + ( '0' if len( src2 ) == 3 else '1' )
+ '000' + src1 + src2 + ( '0'*17 if len( src2 ) != 20 else '' ), data[ 'consts' ][ 'words_per_line' ] )
return result
# Function
def define_data( cmd, args, n ):
if len(args) < 1:
raise ValueError('Incorrect command format.')
size = data[ 'consts' ][ 'define_data' ][ cmd ]*data[ 'consts' ][ 'word_size' ]
p = []
while args:
x, args = parse_constant( args )
if not fits_into( x, size ):
raise ValueError( 'Cannot place data in memory, {} is too big for {} bits.'.format( x, size ) )
t = to_little_endian( ( '{:0>' + str( size ) + 'b}' ).format( x ), size // data[ 'consts' ][ 'word_size' ] )
p.append( t )
for i in range( 0, size ):
y = t[ data[ 'consts' ][ 'word_size' ]*i : data[ 'consts' ][ 'word_size' ]*(i+1) ]
memory[ n ] = y
n += 1
return p
# Function
def define_space( cmd, args, n ):
len = parse_constant( args, False )
for i in range( 0, len ):
memory[ n+i ] = '0'*data[ 'consts' ][ 'word_size' ]
return [ '0'*data[ 'consts' ][ 'line_size' ] ]* ceil( len/data[ 'consts' ][ 'words_per_line' ] )
# Function
def parse_lines( ls ):
lines = []
num = 0
for l in ls:
res = { 'original' : l }
sl = l.upper().split( ';', maxsplit = 1 )[ 0 ]
if sl:
res[ 'blank' ] = False
if sl[ 0 ].isspace(): lab = ''
else:
t = sl.split( maxsplit = 1 )
lab = t[ 0 ]
sl = t[ 1 ] if len( t ) > 1 else ''
ls = re.split( data[ 'consts' ][ 'separators' ], sl.strip() )
res[ 'cmd' ] = ls[ 0 ]
res[ 'args' ] = [ x for x in ls[ 1: ] if x ] if len( ls ) > 1 else []
if not res[ 'cmd' ]: res[ 'blank' ] = True
if res[ 'cmd' ] == data[ 'consts' ][ 'origin_cmd' ]:
nnum = round_to_word( parse_constant( res[ 'args' ] )[ 0 ] )
if nnum < num:
raise ValueError( res[ 'original' ] + ' :: Impossible origin, location too small' )
num = nnum
if lab: labels[ lab ] = num
res[ 'blank' ] = True
elif res[ 'cmd' ] == data[ 'consts' ][ 'equals_cmd' ]:
if lab: labels[ lab ] = parse_constant( res[ 'args' ] )[ 0 ]
res[ 'blank' ] = True
elif res[ 'cmd' ] in data[ 'consts' ][ 'define_data' ]:
if lab: labels[ lab ] = num
res[ 'num' ] = num
num += round_to_word( args_len( res[ 'args' ] )*data[ 'consts' ][ 'define_data' ][ res[ 'cmd' ] ] )
elif res[ 'cmd' ] == data[ 'consts' ][ 'define_space' ]:
if lab: labels[ lab ] = num
res[ 'num' ] = num
num += round_to_word( parse_constant( res[ 'args' ] )[ 0 ] )
elif res[ 'cmd' ]:
if lab: labels[ lab ] = num
res[ 'num' ] = num
num += data[ 'consts' ][ 'words_per_line' ]
else:
if lab: labels[ lab ] = num
if 'num' not in res:
res[ 'num' ] = -1
else:
res[ 'blank' ] = True
res[ 'num' ] = -1
lines.append( res )
if num >= data[ 'consts' ][ 'max_memory' ]:
raise ValueError( 'Too much memory used' )
global maxnum
maxnum = num
return lines
# Main function
def assemble( f ):
""" Assembles the contents of a file f
This function takes a name f of a file containing FRISC assembler code,
and translates it into machine code.
Two new files are created:
1. readable file containing the machine code together with it's source
2. binary file containing only the machine code
"""
global data, memory, maxnum
data = yaml.load( open( 'config/definitions/frisc.lang.yaml', 'r' ).read() )
memory = [ '00000000' ] * data[ 'consts' ][ 'max_memory' ]
try:
pls = parse_lines( open( f ).read().splitlines() )
except Exception as e:
return show_error( 'An error occurred in first pass: ' + str( e ) )
adr_len = data[ 'consts' ][ 'line_size' ] // 4
prt_len = len( bin_to_pretty_hex( '0' * data[ 'consts' ][ 'line_size' ] ) )
path = os.path.abspath( f )
base = path.rsplit( '.', maxsplit = 1 )[ 0 ]
pfile = open( base + '.p', 'w' )
# efile = open( base + '.e', 'wb' )
j = 1
for p in pls:
if not p[ 'blank' ]:
try:
multiple = False
if p[ 'cmd' ] == 'END':
break
elif p[ 'cmd' ] in data[ 'codes' ][ 'ALU' ]:
p[ 'parsed' ] = parse_aluop( p[ 'cmd' ], p[ 'args' ] )
place_in_mem( p[ 'parsed' ], p[ 'num' ] )
elif p[ 'cmd' ] in data[ 'codes' ][ 'MEM' ]:
p[ 'parsed' ] = parse_memop( p[ 'cmd' ], p[ 'args' ] )
place_in_mem( p[ 'parsed' ], p[ 'num' ] )
elif p[ 'cmd' ] in data[ 'codes' ][ 'STACK' ]:
p[ 'parsed' ] = parse_stackop( p[ 'cmd' ], p[ 'args' ] )
place_in_mem( p[ 'parsed' ], p[ 'num' ] )
elif p[ 'cmd' ] in data[ 'codes' ][ 'CTRL' ]:
p[ 'parsed' ] = parse_ctrlop( p[ 'cmd' ], p[ 'args' ] )
place_in_mem( p[ 'parsed' ], p[ 'num' ] )
elif p[ 'cmd' ] in data[ 'codes' ][ 'RET' ]:
p[ 'parsed' ] = parse_retop( p[ 'cmd' ], p[ 'args' ] )
place_in_mem( p[ 'parsed' ], p[ 'num' ] )
elif p[ 'cmd' ] == 'MOVE':
p[ 'parsed' ] = parse_moveop( p[ 'cmd' ], p[ 'args' ] )
place_in_mem( p[ 'parsed' ], p[ 'num' ] )
elif p[ 'cmd' ] == 'CMP':
p[ 'parsed' ] = parse_cmp( p[ 'cmd' ], p[ 'args' ] )
place_in_mem( p[ 'parsed' ], p[ 'num' ] )
elif p[ 'cmd' ] == 'JR':
p[ 'parsed' ] = parse_jr( p[ 'cmd' ], p[ 'args' ], p[ 'num' ] )
place_in_mem( p[ 'parsed' ], p[ 'num' ] )
elif p[ 'cmd' ] in data[ 'consts' ][ 'define_data' ]:
p[ 'parsed' ] = define_data( p[ 'cmd' ], p[ 'args' ], p[ 'num' ] )
multiple = True
elif p[ 'cmd' ] == data[ 'consts' ][ 'define_space' ]:
p[ 'blank' ] = True
else:
print( p )
raise ValueError( 'Unknown command' )
except Exception as e:
return show_error( 'An error occurred in second pass on line ' + str( j )
+ ':' + p[ 'original' ] + ' :: ' + str( e ) )
if p[ 'blank' ]:
pfile.write(( ' ' * ( adr_len + prt_len + 4 ) + p[ 'original' ] )
[ :data[ 'consts' ][ 'max_source_line_length' ] ] + '\n')
else:
if multiple:
pfile.write(( ('{:0>' + str( adr_len ) + 'X} ' ).format( p[ 'num' ] ) +
bin_to_pretty_hex( p[ 'parsed' ][ 0 ] ) + ' ' + p[ 'original' ] )
[ :data[ 'consts' ][ 'max_source_line_length' ] ] + '\n')
for i in p[ 'parsed' ][ 1: ]:
pfile.write( ' '*( adr_len + 2 ) + bin_to_pretty_hex( i ) + '\n' )
else:
pfile.write(( ('{:0>' + str( adr_len ) + 'X} ' ).format( p[ 'num' ] ) +
bin_to_pretty_hex( p[ 'parsed' ] ) + ' ' + p[ 'original' ] )
[ :data[ 'consts' ][ 'max_source_line_length' ] ] + '\n')
j += 1
pfile.close()
# efile.close()
return 'Source successfully assembled.', True
if __name__ == "__main__":
print( assemble( sys.argv[ 1 ] ) )
| mit | 4,892,226,237,462,093,000 | 34.358209 | 126 | 0.459195 | false |
rscarson/Lavendeux | bin/extensions/pokedex.py | 1 | 11842 | from lavendeux import Types, Errors
def call(args):
# Check number of arguments
if len(args) != 1:
return (Types.ERROR, Errors.INVALID_ARGS)
if isinstance(args[0], basestring) and str(args[0].capitalize()) in pokedex.values():
return (Types.INT, pokedex.keys()[pokedex.values().index(str(args[0].capitalize()))])
elif int(args[0]) in pokedex.keys():
return (Types.STRING, pokedex[int(args[0])])
return (Types.STRING, 'MissingNo')
def help():
return "Function taking 1 argument. A pokemon's name, returning it's ID number or a a pokedex ID, returning it's name"
pokedex = {
1: 'Bulbasaur',
2: 'Ivysaur',
3: 'Venusaur',
4: 'Charmander',
5: 'Charmeleon',
6: 'Charizard',
7: 'Squirtle',
8: 'Wartortle',
9: 'Blastoise',
10: 'Caterpie',
11: 'Metapod',
12: 'Butterfree',
13: 'Weedle',
14: 'Kakuna',
15: 'Beedrill',
16: 'Pidgey',
17: 'Pidgeotto',
18: 'Pidgeot',
19: 'Rattata',
20: 'Raticate',
21: 'Spearow',
22: 'Fearow',
23: 'Ekans',
24: 'Arbok',
25: 'Pikachu',
26: 'Raichu',
27: 'Sandshrew',
28: 'Sandslash',
29: 'Nidoran',
30: 'Nidorina',
31: 'Nidoqueen',
32: 'Nidoran',
33: 'Nidorino',
34: 'Nidoking',
35: 'Clefairy',
36: 'Clefable',
37: 'Vulpix',
38: 'Ninetales',
39: 'Jigglypuff',
40: 'Wigglytuff',
41: 'Zubat',
42: 'Golbat',
43: 'Oddish',
44: 'Gloom',
45: 'Vileplume',
46: 'Paras',
47: 'Parasect',
48: 'Venonat',
49: 'Venomoth',
50: 'Diglett',
51: 'Dugtrio',
52: 'Meowth',
53: 'Persian',
54: 'Psyduck',
55: 'Golduck',
56: 'Mankey',
57: 'Primeape',
58: 'Growlithe',
59: 'Arcanine',
60: 'Poliwag',
61: 'Poliwhirl',
62: 'Poliwrath',
63: 'Abra',
64: 'Kadabra',
65: 'Alakazam',
66: 'Machop',
67: 'Machoke',
68: 'Machamp',
69: 'Bellsprout',
70: 'Weepinbell',
71: 'Victreebel',
72: 'Tentacool',
73: 'Tentacruel',
74: 'Geodude',
75: 'Graveler',
76: 'Golem',
77: 'Ponyta',
78: 'Rapidash',
79: 'Slowpoke',
80: 'Slowbro',
81: 'Magnemite',
82: 'Magneton',
83: 'Farfetch',
84: 'Doduo',
85: 'Dodrio',
86: 'Seel',
87: 'Dewgong',
88: 'Grimer',
89: 'Muk',
90: 'Shellder',
91: 'Cloyster',
92: 'Gastly',
93: 'Haunter',
94: 'Gengar',
95: 'Onix',
96: 'Drowzee',
97: 'Hypno',
98: 'Krabby',
99: 'Kingler',
100: 'Voltorb',
101: 'Electrode',
102: 'Exeggcute',
103: 'Exeggutor',
104: 'Cubone',
105: 'Marowak',
106: 'Hitmonlee',
107: 'Hitmonchan',
108: 'Lickitung',
109: 'Koffing',
110: 'Weezing',
111: 'Rhyhorn',
112: 'Rhydon',
113: 'Chansey',
114: 'Tangela',
115: 'Kangaskhan',
116: 'Horsea',
117: 'Seadra',
118: 'Goldeen',
119: 'Seaking',
120: 'Staryu',
121: 'Starmie',
122: 'Mr',
123: 'Scyther',
124: 'Jynx',
125: 'Electabuzz',
126: 'Magmar',
127: 'Pinsir',
128: 'Tauros',
129: 'Magikarp',
130: 'Gyarados',
131: 'Lapras',
132: 'Ditto',
133: 'Eevee',
134: 'Vaporeon',
135: 'Jolteon',
136: 'Flareon',
137: 'Porygon',
138: 'Omanyte',
139: 'Omastar',
140: 'Kabuto',
141: 'Kabutops',
142: 'Aerodactyl',
143: 'Snorlax',
144: 'Articuno',
145: 'Zapdos',
146: 'Moltres',
147: 'Dratini',
148: 'Dragonair',
149: 'Dragonite',
150: 'Mewtwo',
151: 'Mew',
152: 'Chikorita',
153: 'Bayleef',
154: 'Meganium',
155: 'Cyndaquil',
156: 'Quilava',
157: 'Typhlosion',
158: 'Totodile',
159: 'Croconaw',
160: 'Feraligatr',
161: 'Sentret',
162: 'Furret',
163: 'Hoothoot',
164: 'Noctowl',
165: 'Ledyba',
166: 'Ledian',
167: 'Spinarak',
168: 'Ariados',
169: 'Crobat',
170: 'Chinchou',
171: 'Lanturn',
172: 'Pichu',
173: 'Cleffa',
174: 'Igglybuff',
175: 'Togepi',
176: 'Togetic',
177: 'Natu',
178: 'Xatu',
179: 'Mareep',
180: 'Flaaffy',
181: 'Ampharos',
182: 'Bellossom',
183: 'Marill',
184: 'Azumarill',
185: 'Sudowoodo',
186: 'Politoed',
187: 'Hoppip',
188: 'Skiploom',
189: 'Jumpluff',
190: 'Aipom',
191: 'Sunkern',
192: 'Sunflora',
193: 'Yanma',
194: 'Wooper',
195: 'Quagsire',
196: 'Espeon',
197: 'Umbreon',
198: 'Murkrow',
199: 'Slowking',
200: 'Misdreavus',
201: 'Unown',
202: 'Wobbuffet',
203: 'Girafarig',
204: 'Pineco',
205: 'Forretress',
206: 'Dunsparce',
207: 'Gligar',
208: 'Steelix',
209: 'Snubbull',
210: 'Granbull',
211: 'Qwilfish',
212: 'Scizor',
213: 'Shuckle',
214: 'Heracross',
215: 'Sneasel',
216: 'Teddiursa',
217: 'Ursaring',
218: 'Slugma',
219: 'Magcargo',
220: 'Swinub',
221: 'Piloswine',
222: 'Corsola',
223: 'Remoraid',
224: 'Octillery',
225: 'Delibird',
226: 'Mantine',
227: 'Skarmory',
228: 'Houndour',
229: 'Houndoom',
230: 'Kingdra',
231: 'Phanpy',
232: 'Donphan',
233: 'Porygon',
234: 'Stantler',
235: 'Smeargle',
236: 'Tyrogue',
237: 'Hitmontop',
238: 'Smoochum',
239: 'Elekid',
240: 'Magby',
241: 'Miltank',
242: 'Blissey',
243: 'Raikou',
244: 'Entei',
245: 'Suicune',
246: 'Larvitar',
247: 'Pupitar',
248: 'Tyranitar',
249: 'Lugia',
250: 'Ho',
251: 'Celebi',
252: 'Treecko',
253: 'Grovyle',
254: 'Sceptile',
255: 'Torchic',
256: 'Combusken',
257: 'Blaziken',
258: 'Mudkip',
259: 'Marshtomp',
260: 'Swampert',
261: 'Poochyena',
262: 'Mightyena',
263: 'Zigzagoon',
264: 'Linoone',
265: 'Wurmple',
266: 'Silcoon',
267: 'Beautifly',
268: 'Cascoon',
269: 'Dustox',
270: 'Lotad',
271: 'Lombre',
272: 'Ludicolo',
273: 'Seedot',
274: 'Nuzleaf',
275: 'Shiftry',
276: 'Taillow',
277: 'Swellow',
278: 'Wingull',
279: 'Pelipper',
280: 'Ralts',
281: 'Kirlia',
282: 'Gardevoir',
283: 'Surskit',
284: 'Masquerain',
285: 'Shroomish',
286: 'Breloom',
287: 'Slakoth',
288: 'Vigoroth',
289: 'Slaking',
290: 'Nincada',
291: 'Ninjask',
292: 'Shedinja',
293: 'Whismur',
294: 'Loudred',
295: 'Exploud',
296: 'Makuhita',
297: 'Hariyama',
298: 'Azurill',
299: 'Nosepass',
300: 'Skitty',
301: 'Delcatty',
302: 'Sableye',
303: 'Mawile',
304: 'Aron',
305: 'Lairon',
306: 'Aggron',
307: 'Meditite',
308: 'Medicham',
309: 'Electrike',
310: 'Manectric',
311: 'Plusle',
312: 'Minun',
313: 'Volbeat',
314: 'Illumise',
315: 'Roselia',
316: 'Gulpin',
317: 'Swalot',
318: 'Carvanha',
319: 'Sharpedo',
320: 'Wailmer',
321: 'Wailord',
322: 'Numel',
323: 'Camerupt',
324: 'Torkoal',
325: 'Spoink',
326: 'Grumpig',
327: 'Spinda',
328: 'Trapinch',
329: 'Vibrava',
330: 'Flygon',
331: 'Cacnea',
332: 'Cacturne',
333: 'Swablu',
334: 'Altaria',
335: 'Zangoose',
336: 'Seviper',
337: 'Lunatone',
338: 'Solrock',
339: 'Barboach',
340: 'Whiscash',
341: 'Corphish',
342: 'Crawdaunt',
343: 'Baltoy',
344: 'Claydol',
345: 'Lileep',
346: 'Cradily',
347: 'Anorith',
348: 'Armaldo',
349: 'Feebas',
350: 'Milotic',
351: 'Castform',
352: 'Kecleon',
353: 'Shuppet',
354: 'Banette',
355: 'Duskull',
356: 'Dusclops',
357: 'Tropius',
358: 'Chimecho',
359: 'Absol',
360: 'Wynaut',
361: 'Snorunt',
362: 'Glalie',
363: 'Spheal',
364: 'Sealeo',
365: 'Walrein',
366: 'Clamperl',
367: 'Huntail',
368: 'Gorebyss',
369: 'Relicanth',
370: 'Luvdisc',
371: 'Bagon',
372: 'Shelgon',
373: 'Salamence',
374: 'Beldum',
375: 'Metang',
376: 'Metagross',
377: 'Regirock',
378: 'Regice',
379: 'Registeel',
380: 'Latias',
381: 'Latios',
382: 'Kyogre',
383: 'Groudon',
384: 'Rayquaza',
385: 'Jirachi',
386: 'Deoxys',
387: 'Turtwig',
388: 'Grotle',
389: 'Torterra',
390: 'Chimchar',
391: 'Monferno',
392: 'Infernape',
393: 'Piplup',
394: 'Prinplup',
395: 'Empoleon',
396: 'Starly',
397: 'Staravia',
398: 'Staraptor',
399: 'Bidoof',
400: 'Bibarel',
401: 'Kricketot',
402: 'Kricketune',
403: 'Shinx',
404: 'Luxio',
405: 'Luxray',
406: 'Budew',
407: 'Roserade',
408: 'Cranidos',
409: 'Rampardos',
410: 'Shieldon',
411: 'Bastiodon',
412: 'Burmy',
413: 'Wormadam',
414: 'Mothim',
415: 'Combee',
416: 'Vespiquen',
417: 'Pachirisu',
418: 'Buizel',
419: 'Floatzel',
420: 'Cherubi',
421: 'Cherrim',
422: 'Shellos',
423: 'Gastrodon',
424: 'Ambipom',
425: 'Drifloon',
426: 'Drifblim',
427: 'Buneary',
428: 'Lopunny',
429: 'Mismagius',
430: 'Honchkrow',
431: 'Glameow',
432: 'Purugly',
433: 'Chingling',
434: 'Stunky',
435: 'Skuntank',
436: 'Bronzor',
437: 'Bronzong',
438: 'Bonsly',
439: 'Mime',
440: 'Happiny',
441: 'Chatot',
442: 'Spiritomb',
443: 'Gible',
444: 'Gabite',
445: 'Garchomp',
446: 'Munchlax',
447: 'Riolu',
448: 'Lucario',
449: 'Hippopotas',
450: 'Hippowdon',
451: 'Skorupi',
452: 'Drapion',
453: 'Croagunk',
454: 'Toxicroak',
455: 'Carnivine',
456: 'Finneon',
457: 'Lumineon',
458: 'Mantyke',
459: 'Snover',
460: 'Abomasnow',
461: 'Weavile',
462: 'Magnezone',
463: 'Lickilicky',
464: 'Rhyperior',
465: 'Tangrowth',
466: 'Electivire',
467: 'Magmortar',
468: 'Togekiss',
469: 'Yanmega',
470: 'Leafeon',
471: 'Glaceon',
472: 'Gliscor',
473: 'Mamoswine',
474: 'Porygon',
475: 'Gallade',
476: 'Probopass',
477: 'Dusknoir',
478: 'Froslass',
479: 'Rotom',
480: 'Uxie',
481: 'Mesprit',
482: 'Azelf',
483: 'Dialga',
484: 'Palkia',
485: 'Heatran',
486: 'Regigigas',
487: 'Giratina',
488: 'Cresselia',
489: 'Phione',
490: 'Manaphy',
491: 'Darkrai',
492: 'Shaymin',
493: 'Arceus',
494: 'Victini',
495: 'Snivy',
496: 'Servine',
497: 'Serperior',
498: 'Tepig',
499: 'Pignite',
500: 'Emboar',
501: 'Oshawott',
502: 'Dewott',
503: 'Samurott',
504: 'Patrat',
505: 'Watchog',
506: 'Lillipup',
507: 'Herdier',
508: 'Stoutland',
509: 'Purrloin',
510: 'Liepard',
511: 'Pansage',
512: 'Simisage',
513: 'Pansear',
514: 'Simisear',
515: 'Panpour',
516: 'Simipour',
517: 'Munna',
518: 'Musharna',
519: 'Pidove',
520: 'Tranquill',
521: 'Unfezant',
522: 'Blitzle',
523: 'Zebstrika',
524: 'Roggenrola',
525: 'Boldore',
526: 'Gigalith',
527: 'Woobat',
528: 'Swoobat',
529: 'Drilbur',
530: 'Excadrill',
531: 'Audino',
532: 'Timburr',
533: 'Gurdurr',
534: 'Conkeldurr',
535: 'Tympole',
536: 'Palpitoad',
537: 'Seismitoad',
538: 'Throh',
539: 'Sawk',
540: 'Sewaddle',
541: 'Swadloon',
542: 'Leavanny',
543: 'Venipede',
544: 'Whirlipede',
545: 'Scolipede',
546: 'Cottonee',
547: 'Whimsicott',
548: 'Petilil',
549: 'Lilligant',
550: 'Basculin',
551: 'Sandile',
552: 'Krokorok',
553: 'Krookodile',
554: 'Darumaka',
555: 'Darmanitan',
556: 'Maractus',
557: 'Dwebble',
558: 'Crustle',
559: 'Scraggy',
560: 'Scrafty',
561: 'Sigilyph',
562: 'Yamask',
563: 'Cofagrigus',
564: 'Tirtouga',
565: 'Carracosta',
566: 'Archen',
567: 'Archeops',
568: 'Trubbish',
569: 'Garbodor',
570: 'Zorua',
571: 'Zoroark',
572: 'Minccino',
573: 'Cinccino',
574: 'Gothita',
575: 'Gothorita',
576: 'Gothitelle',
577: 'Solosis',
578: 'Duosion',
579: 'Reuniclus',
580: 'Ducklett',
581: 'Swanna',
582: 'Vanillite',
583: 'Vanillish',
584: 'Vanilluxe',
585: 'Deerling',
586: 'Sawsbuck',
587: 'Emolga',
588: 'Karrablast',
589: 'Escavalier',
590: 'Foongus',
591: 'Amoonguss',
592: 'Frillish',
593: 'Jellicent',
594: 'Alomomola',
595: 'Joltik',
596: 'Galvantula',
597: 'Ferroseed',
598: 'Ferrothorn',
599: 'Klink',
600: 'Klang',
601: 'Klinklang',
602: 'Tynamo',
603: 'Eelektrik',
604: 'Eelektross',
605: 'Elgyem',
606: 'Beheeyem',
607: 'Litwick',
608: 'Lampent',
609: 'Chandelure',
610: 'Axew',
611: 'Fraxure',
612: 'Haxorus',
613: 'Cubchoo',
614: 'Beartic',
615: 'Cryogonal',
616: 'Shelmet',
617: 'Accelgor',
618: 'Stunfisk',
619: 'Mienfoo',
620: 'Mienshao',
621: 'Druddigon',
622: 'Golett',
623: 'Golurk',
624: 'Pawniard',
625: 'Bisharp',
626: 'Bouffalant',
627: 'Rufflet',
628: 'Braviary',
629: 'Vullaby',
630: 'Mandibuzz',
631: 'Heatmor',
632: 'Durant',
633: 'Deino',
634: 'Zweilous',
635: 'Hydreigon',
636: 'Larvesta',
637: 'Volcarona',
638: 'Cobalion',
639: 'Terrakion',
640: 'Virizion',
641: 'Tornadus',
642: 'Thundurus',
643: 'Reshiram',
644: 'Zekrom',
645: 'Landorus',
646: 'Kyurem',
647: 'Keldeo',
648: 'Meloetta',
649: 'Genesect',
650: 'Pok',
651: 'Manaphy',
653: 'Brycen',
657: 'Monica',
661: 'Majin',
684: 'Smeargle',
} | mit | 4,413,845,295,945,331,700 | 16.571217 | 119 | 0.598041 | false |
deontp/misc | zenoic_api/cve-search-master/sbin/db_mgmt_cwe.py | 1 | 3714 | #!/usr/bin/env python3
#
# Import script of NIST CWE Common Weakness Enumeration.
#
# Until now, the import is only import Weakness description.
#
# The format is the following:
#
# { "_id" : ObjectId("52b70521b261026f36818515"), "weaknessabs" : "Variant",
# "name" : "ASP.NET Misconfiguration: Missing Custom Error Page",
# "description_summary" : "An ASP .NET application must enable custom error
# pages in order to prevent attackers from mining information from the
# framework's built-in responses.An ASP .NET application must enable custom
# error pages in order to prevent attackers from mining information from the
# framework's built-in responses.", "status" : "Draft", "id" : "12" }
#
# Software is free software released under the "Modified BSD license"
#
# Copyright (c) 2013-2014 Alexandre Dulaunoy - [email protected]
# Copyright (c) 2015-2016 Pieter-Jan Moreels - [email protected]
# Imports
import os
import sys
runPath = os.path.dirname(os.path.realpath(__file__))
sys.path.append(os.path.join(runPath, ".."))
from dateutil.parser import parse as parse_datetime
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
import argparse
import zipfile
import tempfile
from lib.ProgressBar import progressbar
from lib.Config import Configuration
import lib.DatabaseLayer as db
argparser = argparse.ArgumentParser(description='populate/update NIST CWE Common Weakness Enumeration database')
argparser.add_argument('-v', action='store_true', help='verbose output')
args = argparser.parse_args()
class CWEHandler(ContentHandler):
def __init__(self):
self.cwe = []
self.description_summary_tag = False
self.weakness_tag = False
def startElement(self, name, attrs):
if name == 'Weakness':
self.weakness_tag = True
self.statement = ""
self.weaknessabs = attrs.get('Weakness_Abstraction')
self.name = attrs.get('Name')
self.idname = attrs.get('ID')
self.status = attrs.get('Status')
self.cwe.append({'name': self.name, 'id': self.idname, 'status': self.status, 'weaknessabs': self.weaknessabs})
elif name == 'Description_Summary' and self.weakness_tag:
self.description_summary_tag = True
self.description_summary = ""
def characters(self, ch):
if self.description_summary_tag:
self.description_summary += ch.replace(" ", "")
def endElement(self, name):
if name == 'Description_Summary' and self.weakness_tag:
self.description_summary_tag = False
self.description_summary = self.description_summary + self.description_summary
self.cwe[-1]['description_summary'] = self.description_summary.replace("\n", "")
elif name == 'Weakness':
self.weakness_tag = False
# make parser
parser = make_parser()
ch = CWEHandler()
parser.setContentHandler(ch)
# check modification date
try:
(f, r) = Configuration.getFeedData('cwe')
except Exception as e:
print(e)
sys.exit("Cannot open url %s. Bad URL or not connected to the internet?"%(Configuration.getFeedURL("cwe")))
lastmodified = parse_datetime(r.headers['last-modified'], ignoretz=True)
i = db.getLastModified('cwe')
if i is not None:
if lastmodified == i:
print("Not modified")
sys.exit(0)
# parse xml and store in database
parser.parse(f)
cweList=[]
for cwe in progressbar(ch.cwe):
cwe['description_summary']=cwe['description_summary'].replace("\t\t\t\t\t", " ")
if args.v:
print (cwe)
cweList.append(cwe)
db.bulkUpdate('cwe', cweList)
#update database info after successful program-run
db.setColUpdate('cwe', lastmodified)
| gpl-3.0 | 4,526,201,600,850,099,700 | 34.711538 | 123 | 0.686322 | false |
mennis/oTTo | src/otto/lib/ethdrv.py | 1 | 8288 | from otto.lib.compute import average, standard_dev
from otto.lib.otypes import ReturnCode
from otto.lib.solaris import release_parse
from otto.utils import timefmt
def cmp_aoestat_devices(a, d):
# if a.device != d.device or a.size != d.size:
if a.size != d.size:
return ReturnCode(False, 'aoestat %s does not match device %s' % (a, d))
return ReturnCode(True)
def cmp_aoestat_targets(a, t):
# Confirm aoestats.paths in targets.ea
for l in a.port:
for m in a.paths[l].address:
found = False
for n in t:
mask = bin(n.ports)[2:][::-1]
if a.paths[l].port < len(mask) and mask[a.paths[l].port] == '1' and m == n.ea:
found = True
if not found:
return ReturnCode(False, 'aoestat %s does not match targets %s' % (a, t))
# Confirm targets.ea in aoestats.paths
for l in t:
mask = bin(l.ports)[2:][::-1]
for m in range(len(mask)):
if mask[m] == '1':
if l.ea not in a.paths[m].address:
return ReturnCode(False, 'targets %s does not match aoestat %s' % (t, a))
return ReturnCode(True)
def cmp_acbs_ca(a, c):
if a.index != c.index or a.wnd != c.wnd:
return ReturnCode(False, 'acbs %s does not match ca %s' % (a, c))
return ReturnCode(True)
def cmp_hba_ports(h, p):
checks = (h['port'] != str(p.index),
h['mac'] != p.ea,
h['type'] != p.name,
h['link']['max'] != str(p.maxlink),
h['link']['speed'] != str(p.currentlink))
if True in checks:
return ReturnCode(False, 'hba %s does not match ports %s' % (h, p))
return ReturnCode(True)
def cmp_hba_ifstats(h, i):
if h['port'] != str(i.port) or h['type'] != i.model or h['link']['speed'] != str(i.link):
return ReturnCode(False, 'hba %s does not match ifstats %s' % (h, i))
return ReturnCode(True)
def cmp_ports_ifstats(p, i):
if p.index != i.port or p.name != i.model or p.currentlink != i.link:
return ReturnCode(False, 'ports %s does not match ifstats %s' % (p, i))
return ReturnCode(True)
def verify_local(initiator):
aoestat = initiator.aoestat
acbs = initiator.ethdrv.acbs
ca = initiator.ethdrv.ca
cfg = initiator.ethdrv.config
devices = initiator.ethdrv.devices
targets = initiator.ethdrv.targets
for i in aoestat:
if i not in acbs:
return ReturnCode(False, 'aoestat %s not in acbs:\%s' % (i, initiator.ethdrv.acbs))
if i not in ca:
return ReturnCode(False, 'aoestat %s not in ca' % i)
if i not in cfg:
return ReturnCode(False, 'aoestat %s not in config' % i)
if i in devices:
n = cmp_aoestat_devices(aoestat[i], devices[i])
if not n:
return n
else:
return ReturnCode(False, 'aoestat %s not in devices' % i)
if i in targets:
n = cmp_aoestat_targets(aoestat[i], targets[i])
if not n:
return n
else:
return ReturnCode(False, 'aoestat %s not in targets' % i)
for i in acbs:
if i not in aoestat:
return ReturnCode(False, 'acbs %s not in aoestat' % i)
if i in ca:
n = cmp_acbs_ca(acbs[i], ca[i])
if not n:
return n
else:
return ReturnCode(False, 'acbs %s not in aoestat' % i)
if i not in cfg:
return ReturnCode(False, 'acbs %s not in config' % i)
if i not in devices:
return ReturnCode(False, 'acbs %s not in devices' % i)
if i not in targets:
return ReturnCode(False, 'acbs %s not in targets' % i)
for i in ca:
if i not in aoestat:
return ReturnCode(False, 'ca %s not in aoestat' % i)
if i in acbs:
n = cmp_acbs_ca(acbs[i], ca[i])
if not n:
return n
else:
return ReturnCode(False, 'ca %s not in acbs' % i)
if i not in cfg:
return ReturnCode(False, 'ca %s not in config' % i)
if i not in devices:
return ReturnCode(False, 'ca %s not in devices' % i)
if i not in targets:
return ReturnCode(False, 'ca %s not in targets' % i)
for i in cfg:
if i not in aoestat:
return ReturnCode(False, 'config %s not in aoestat' % i)
if i not in acbs:
return ReturnCode(False, 'config %s not in acbs' % i)
if i not in ca:
return ReturnCode(False, 'config %s not in ca' % i)
if i not in devices:
return ReturnCode(False, 'config %s not in devices' % i)
if i not in targets:
return ReturnCode(False, 'config %s not in targets' % i)
for i in devices:
if i in aoestat:
n = cmp_aoestat_devices(aoestat[i], devices[i])
if not n:
return n
else:
return ReturnCode(False, 'devices %s not in aoestat' % i)
if i not in acbs:
return ReturnCode(False, 'devices %s not in acbs' % i)
if i not in ca:
return ReturnCode(False, 'devices %s not in ca' % i)
if i not in cfg:
return ReturnCode(False, 'devices %s not in config' % i)
if i not in targets:
return ReturnCode(False, 'devices %s not in targets' % i)
for i in targets:
# check for stale target
seen = False
for j in targets[i]:
if j.ports != 0:
seen = True
if not seen:
continue
if i in aoestat:
n = cmp_aoestat_targets(aoestat[i], targets[i])
if not n:
return n
else:
return ReturnCode(False, 'targets %s not in aoestat' % i)
if i not in acbs:
return ReturnCode(False, 'targets %s not in acbs' % i)
if i not in ca:
return ReturnCode(False, 'targets %s not in ca' % i)
if i not in devices:
return ReturnCode(False, 'targets %s not in devices' % i)
if i not in targets:
return ReturnCode(False, 'targets %s not in targets' % i)
hba = initiator.hba_ports
ports = initiator.ethdrv.ports
ifstats = initiator.ethdrv.ifstats
for i in hba:
if int(i) in ports:
n = cmp_hba_ports(hba[i], ports[int(i)])
if not n:
return n
else:
return ReturnCode(False, 'hba %s not in ports' % i)
if int(i) in ifstats:
n = cmp_hba_ifstats(hba[i], ifstats[int(i)])
if not n:
return n
else:
return ReturnCode(False, 'hba %s not in ifstats' % i)
for i in ports:
if str(i) in hba:
n = cmp_hba_ports(hba[str(i)], ports[i])
if not n:
return n
else:
return ReturnCode(False, 'ports %s not in hba' % i)
if i in ifstats:
n = cmp_ports_ifstats(ports[i], ifstats[i])
if not n:
return n
else:
return ReturnCode(False, 'ports %s not in ifstats' % i)
for i in ifstats:
if str(i) in hba:
n = cmp_hba_ifstats(hba[str(i)], ifstats[i])
if not n:
return n
else:
return ReturnCode(False, 'ifstats %s not in hba' % i)
if i in ports:
n = cmp_ports_ifstats(ports[i], ifstats[i])
if not n:
return n
else:
return ReturnCode(False, 'ifstats %s not in ports' % i)
v = initiator.aoeversion
r = release_parse(initiator.ethdrv.release)
if r != v:
return ReturnCode(False, 'release %s does not match version %s' % (r, v))
# just read; nothing to compare with
_ = initiator.ethdrv.corestats
_ = initiator.ethdrv.ctl
_ = initiator.ethdrv.units
_ = initiator.ethdrv.elstats
return ReturnCode(True)
def list_stats(l):
stats = '\tsamples:%s' % len(l)
stats += '\taverage:%s' % timefmt(average(l))
stats += '\tstddev:%s' % timefmt(standard_dev(l))
stats += '\tmax:%s' % max(l)
stats += '\tmin:%s' % min(l)
return stats
| bsd-3-clause | -7,500,629,519,187,409,000 | 33.106996 | 95 | 0.534146 | false |
PhillsProgrammingExperiments/runtime_context | contexting_lib/decorators.py | 1 | 1661 | from contexting_lib.runtime_context import ContextDependentFunction, RuntimeContext
def default(foo):
runtime_context = RuntimeContext()
def_p_ctx = runtime_context.default_process_context
def_t_ctx = runtime_context.default_thread_context
runtime_context._contexts_to_foo_mapping \
[foo.__qualname__] \
[def_p_ctx] \
[def_t_ctx] = foo
return ContextDependentFunction(foo.__name__, foo.__qualname__)
def thread_bound(thread_context):
def decorator(foo):
runtime_context = RuntimeContext()
def_p_ctx = runtime_context.default_process_context
runtime_context._contexts_to_foo_mapping \
[foo.__qualname__] \
[def_p_ctx] \
[thread_context] = foo
return ContextDependentFunction(foo.__name__, foo.__qualname__)
return decorator
def process_bound(process_context):
def decorator(foo):
runtime_context = RuntimeContext()
def_t_ctx = runtime_context.default_thread_context
runtime_context._contexts_to_foo_mapping \
[foo.__qualname__] \
[process_context] \
[def_t_ctx] = foo
return ContextDependentFunction(foo.__name__, foo.__qualname__)
return decorator
def context_bound(process_context, thread_context):
def decorator(foo):
runtime_context = RuntimeContext()
runtime_context._contexts_to_foo_mapping \
[foo.__qualname__] \
[process_context] \
[thread_context] = foo
return ContextDependentFunction(foo.__name__, foo.__qualname__)
return decorator | mit | -6,346,212,727,357,678,000 | 35.130435 | 83 | 0.614088 | false |
AdriaanRol/AutoDepGraph | autodepgraph/tests/test_graph.py | 1 | 7160 | from unittest import TestCase, expectedFailure
from autodepgraph import visualization as vis
import autodepgraph as adg
import networkx as nx
from autodepgraph.graph import AutoDepGraph_DAG
import yaml
import os
test_dir = os.path.join(adg.__path__[0], 'tests', 'test_data')
class Test_Graph(TestCase):
@classmethod
def setUpClass(self):
cal_True_delayed = ('autodepgraph.node_functions.calibration_functions'
'.test_calibration_True_delayed')
test_graph = AutoDepGraph_DAG('test graph')
for node in ['A', 'B', 'C', 'D', 'E']:
test_graph.add_node(node, calibrate_function=cal_True_delayed)
test_graph.add_edge('C', 'A')
test_graph.add_edge('C', 'B')
test_graph.add_edge('B', 'A')
test_graph.add_edge('D', 'A')
test_graph.add_edge('E', 'D')
self.test_graph = test_graph
def test_default_not_implemented_cal(self):
test_graph = AutoDepGraph_DAG('test graph')
test_graph.add_node('A')
self.assertEqual(test_graph.nodes()['A']['state'], 'unknown')
with self.assertRaises(ValueError):
test_graph.maintain_node('A')
self.assertEqual(test_graph.nodes()['A']['state'], 'bad')
with self.assertRaises(ValueError):
test_graph.maintain_A()
def test_tolerance_check(self):
# The default check returns 1.0
self.test_graph.nodes['A']['tolerance'] = 0
self.assertEqual(self.test_graph.check_node('A'), 'needs calibration')
self.test_graph.nodes['A']['tolerance'] = 2
self.assertEqual(self.test_graph.check_node('A'), 'good')
self.test_graph.nodes['A']['tolerance'] = 0
self.assertEqual(self.test_graph.check_node('A'), 'needs calibration')
def test_maintain_node_assume_unkown_is_good(self):
self.test_graph.set_all_node_states(
'unknown')
self.test_graph.maintain_node('C')
self.assertEqual(self.test_graph.nodes()['C']['state'], 'good')
self.assertEqual(self.test_graph.nodes()['B']['state'], 'unknown')
def test_calibration_state(self):
s = self.test_graph.calibration_state()
assert( isinstance(s, dict))
def test_set_function(self):
self.test_graph.set_node_attribute('A', 'myattribute', 10)
self.assertEqual(self.test_graph.get_node_attribute('A', 'myattribute'), 10)
self.test_graph.set_node_description('A', 'explain node A')
self.assertEqual(self.test_graph.get_node_attribute('A', 'description'), 'explain node A')
def test_maintain_node_require_cal(self):
self.test_graph.set_all_node_states(
'needs calibration')
self.test_graph.maintain_node('C')
self.assertEqual(self.test_graph.nodes()['C']['state'], 'good')
self.assertEqual(self.test_graph.nodes()['B']['state'], 'good')
self.assertEqual(self.test_graph.nodes()['D']['state'],
'needs calibration')
def test_bad_node(self):
cal_True_delayed = ('autodepgraph.node_functions.calibration_functions'
'.test_calibration_True_delayed')
test_graph = AutoDepGraph_DAG('test graph')
for node in ['A', 'B', 'C', 'D', 'E']:
test_graph.add_node(node, calibrate_function=cal_True_delayed)
test_graph.add_edge('C', 'A')
test_graph.add_edge('C', 'B')
test_graph.add_edge('B', 'A')
test_graph.add_edge('D', 'A')
test_graph.add_edge('E', 'D')
test_graph.set_all_node_states('unknown')
self.assertEqual(test_graph.nodes()['C']['state'], 'unknown')
self.assertEqual(test_graph.nodes()['B']['state'], 'unknown')
self.assertEqual(test_graph.nodes()['A']['state'], 'unknown')
cal_False = ('autodepgraph.node_functions.calibration_functions'
'.test_calibration_False')
test_graph.nodes['C']['calibrate_function'] = cal_False
# Failure to calibrate should raise an error
with self.assertRaises(ValueError):
test_graph.maintain_node('C')
# In the process of trying to fix node C it should try to
# calibrate it's requirements
self.assertEqual(test_graph.nodes()['C']['state'], 'bad')
self.assertEqual(test_graph.nodes()['B']['state'], 'good')
self.assertEqual(test_graph.nodes()['A']['state'], 'good')
cal_True_delayed = ('autodepgraph.node_functions.calibration_functions'
'.test_calibration_True_delayed')
def test_plotting_mpl(self):
self.test_graph.draw_mpl()
self.test_graph.cfg_plot_mode = 'matplotlib'
self.test_graph.update_monitor()
# call twice to have both creation and update of plot
self.test_graph.update_monitor()
def test_plotting_svg(self):
self.test_graph.draw_svg()
self.test_graph.cfg_plot_mode = 'svg'
self.test_graph.update_monitor()
# call twice to have both creation and update of plot
self.test_graph.update_monitor()
def test_dummy_cal_three_qubit_graph(self):
fn = os.path.join(test_dir, 'three_qubit_graph.yaml')
DAG = nx.readwrite.read_yaml(fn)
DAG.set_all_node_states('needs calibration')
DAG.cfg_plot_mode = None
DAG.maintain_node('Chevron q0-q1')
self.assertEqual(DAG.get_node_state('Chevron q0-q1'), 'good')
self.assertEqual(DAG.get_node_state('CZ q0-q1'), 'needs calibration')
def test_write_read_yaml(self):
"""
Mostly an example on how to read and write, but also test for
weird objects being present.
"""
self.test_graph.nodes()['C']['state'] = 'good'
self.test_graph.nodes()['B']['state'] = 'unknown'
fn = os.path.join(test_dir, 'nx_test_graph.yaml')
nx.readwrite.write_yaml(self.test_graph, fn)
read_testgraph = nx.readwrite.read_yaml(fn)
self.assertTrue(isinstance(read_testgraph, AutoDepGraph_DAG))
self.assertEqual(read_testgraph.nodes()['C']['state'], 'good')
self.assertEqual(read_testgraph.nodes()['B']['state'], 'unknown')
def test_adding_edge_nonexistent_node(self):
test_graph = AutoDepGraph_DAG('test graph')
test_graph.add_node('A')
with self.assertRaises(KeyError):
test_graph.add_edge('A', 'B')
with self.assertRaises(KeyError):
test_graph.add_edge('B', 'A')
# def propagate_error(self, state):
# '''
# Sets the state of this node to 'state' and calls this method for all
# child nodes (nodes that depend on this node). Used for recursively
# propagate errors.
# '''
# self.state(state)
# for child_name in self.children():
# # This will result in a depth-first search through the graph
# # that is quite inefficient and can visit many nodes multiple
# # times. We don't really care though, since the graph shouldn't
# # larger than ~100 nodes.
# self.find_instrument(child_name).propagate_error(state)
| mit | 1,161,567,346,362,417,000 | 40.627907 | 98 | 0.609358 | false |
crateio/crate.web | crate/web/dashboard/modules.py | 2 | 1435 | import collections
import datetime
import redis
from django.conf import settings
from django.utils.timezone import utc
from admin_tools.dashboard.modules import DashboardModule
class StatusModule(DashboardModule):
title = "Status"
template = "admin_tools/dashboard/modules/status.html"
def init_with_context(self, context):
if hasattr(settings, "PYPI_DATASTORE"):
datastore = redis.StrictRedis(**dict([(x.lower(), y) for x, y in settings.REDIS[settings.PYPI_DATASTORE].items()]))
if datastore.get("crate:pypi:since") is not None:
self.last_sync = datetime.datetime.fromtimestamp(float(datastore.get("crate:pypi:since")))
self.last_sync.replace(tzinfo=utc)
else:
self.last_sync = None
self.celery_queue_length = datastore.llen("celery")
def is_empty(self):
return False
class RedisStatusModule(DashboardModule):
title = "Redis Status"
template = "admin_tools/dashboard/modules/redis.html"
def init_with_context(self, context):
if hasattr(settings, "PYPI_DATASTORE"):
datastore = redis.StrictRedis(**dict([(x.lower(), y) for x, y in settings.REDIS[settings.PYPI_DATASTORE].items()]))
self.redis_info = collections.OrderedDict(sorted([(k, v) for k, v in datastore.info().iteritems()], key=lambda x: x[0]))
def is_empty(self):
return False
| bsd-2-clause | 6,472,937,229,180,296,000 | 31.613636 | 132 | 0.661324 | false |
zillians/supercell | scripts/set_xml.py | 1 | 1678 | #!/usr/bin/env python
from xml.dom.minidom import parse, parseString
import getopt
import sys
class DomHandler():
def __init__(self, file_name):
self.dom = parse(file_name)
def setValue(self, attr_name, attr_value):
result = False
for node in self.dom.getElementsByTagName('parameter'):
if node.getAttribute('name') == attr_name:
""" parameter name is equal to attr_name """
print "find attribute name: %s" % (attr_name)
result = True
if node.getAttribute('value') == attr_value:
continue
else:
node.setAttribute('value', attr_value)
print "set attribute value: %s" % (attr_value)
return result
def save(self, file_name):
f = open(file_name, 'w')
f.write(self.dom.toxml())
f.close
def main():
if len(sys.argv) < 4:
usage()
sys.exit(2)
fileName = sys.argv[1]
attrName = sys.argv[2]
attrValue = sys.argv[3]
simpleDom = DomHandler(fileName)
result = simpleDom.setValue(attrName, attrValue)
if not result:
print "set attribute fail"
else:
simpleDom.save(fileName)
def usage():
print "usage: %s [file] [name] [value]" % (__file__)
print\
"""
[file] xml file
[name] attribute name
[value] value to set to that attribute
"""
def test():
dom1 = parse( "/nfs/home/zac/zillians/lib/node/world-server/WorldServerModule.module" ) # parse an XML file
dom2 = parseString( "<myxml>Some data <empty/> some more data</myxml>" )
print dom1.toxml()
#print dom2.toxml()
for node in dom1.getElementsByTagName('parameter'): # visit every node <bar />
if node.getAttribute("name") == "local_id":
print "node attribute value: %s" % (node.getAttribute("value"))
if __name__ == "__main__":
main()
| agpl-3.0 | 1,887,179,124,444,890,400 | 24.815385 | 110 | 0.662694 | false |
kyuupichan/electrum | lib/tests/test_commands.py | 1 | 1821 | import unittest
from decimal import Decimal
from lib.commands import Commands
class TestCommands(unittest.TestCase):
def test_setconfig_non_auth_number(self):
self.assertEqual(7777, Commands._setconfig_normalize_value('rpcport', "7777"))
self.assertEqual(7777, Commands._setconfig_normalize_value('rpcport', '7777'))
self.assertAlmostEqual(Decimal(2.3), Commands._setconfig_normalize_value('somekey', '2.3'))
def test_setconfig_non_auth_number_as_string(self):
self.assertEqual("7777", Commands._setconfig_normalize_value('somekey', "'7777'"))
def test_setconfig_non_auth_boolean(self):
self.assertEqual(True, Commands._setconfig_normalize_value('show_console_tab', "true"))
self.assertEqual(True, Commands._setconfig_normalize_value('show_console_tab', "True"))
def test_setconfig_non_auth_list(self):
self.assertEqual(['file:///var/www/', 'https://electrum.org'],
Commands._setconfig_normalize_value('url_rewrite', "['file:///var/www/','https://electrum.org']"))
self.assertEqual(['file:///var/www/', 'https://electrum.org'],
Commands._setconfig_normalize_value('url_rewrite', '["file:///var/www/","https://electrum.org"]'))
def test_setconfig_auth(self):
self.assertEqual("7777", Commands._setconfig_normalize_value('rpcuser', "7777"))
self.assertEqual("7777", Commands._setconfig_normalize_value('rpcuser', '7777'))
self.assertEqual("7777", Commands._setconfig_normalize_value('rpcpassword', '7777'))
self.assertEqual("2asd", Commands._setconfig_normalize_value('rpcpassword', '2asd'))
self.assertEqual("['file:///var/www/','https://electrum.org']",
Commands._setconfig_normalize_value('rpcpassword', "['file:///var/www/','https://electrum.org']"))
| mit | -1,035,599,767,380,382,300 | 54.181818 | 110 | 0.674904 | false |
DavidCain/mitoc-trips | ws/tests/test_auth.py | 1 | 5561 | import unittest.mock
from typing import ClassVar
from urllib.parse import parse_qs, urlparse
from django.urls import reverse
from ws import models
from ws.tests import TestCase, factories
from ws.tests.helpers import PermHelpers
login_required_routes = [
'all_trips_medical',
'account_change_password',
'manage_leaders',
#'manage_applications',
#'manage_trips',
'participant_lookup',
'trip_signup',
'leader_trip_signup',
'discounts',
'lottery_preferences',
'lottery_pairing',
]
class AuthTests(TestCase):
"""Test user authentication and authorization.
These tests hit a lot of major routes in checking our access control system,
but this is not meant as an exhaustive test of all available routes. Rather,
it's supposed to test the authorization mechanisms themselves.
There are a few levels of authorization in the app:
- anonymous:
can view some public-facing pages
- authenticated:
those with user accounts, but who may not have not supplied necessary
medical information. They essentially have the same browsing
privileges as anonymous users until completing the participant form.
- participants:
anyone who has filled out the participation form includes leaders,
admins, activity chairs (i.e. everyone)
- privileged participants:
privileges are based on groups. Some participants belong to the
leaders group, others are activity chairs
"""
user: ClassVar[models.User]
@classmethod
def setUpTestData(cls):
cls.user = factories.UserFactory.create(
email='[email protected]', password='password'
)
def login(self):
return self.client.login(email=self.user.email, password='password')
def assertProfileRedirectedTo(self, response, desired_page):
"""Check for edit profile redirect on a given response."""
self.assertEqual(response.status_code, 302)
parsed = urlparse(response.url)
self.assertEqual(parsed.path, reverse('edit_profile'))
qs = parse_qs(parsed.query)
self.assertEqual(qs['next'], [desired_page])
def test_open_pages(self):
"""Anonymous users can browse a number of pages."""
for open_url in [
'contact',
'help-home',
'help-about',
'help-personal_info',
'help-lottery',
'help-signups',
'all_trips',
'upcoming_trips',
'stats',
]:
response = self.client.get(reverse(open_url))
self.assertEqual(response.status_code, 200)
def test_viewing_trips(self):
"""Anonymous users can view trips (they just can't sign up)."""
trip = factories.TripFactory.create()
view_trip = self.client.get(reverse('view_trip', kwargs={'pk': trip.pk}))
self.assertEqual(view_trip.status_code, 200)
def test_unregistered_participant_pages(self):
"""Unregistered users are prompted to log in on restricted pages."""
# Non-exhaustive list of restricted URLs (some require more than login)
for login_required in login_required_routes:
response = self.client.get(reverse(login_required))
self.assertEqual(response.status_code, 302)
self.assertIn('login', response.url)
def test_registered_participant_pages(self):
"""Registered users will be redirected on participant-only pages."""
desired_page = reverse('all_trips_medical')
self.login()
response = self.client.get(desired_page)
self.assertProfileRedirectedTo(response, desired_page)
@unittest.mock.patch('ws.decorators.profile_needs_update')
def test_participant_pages(self, profile_needs_update):
"""Participants are allowed to view certain pages."""
par_only_page = reverse('discounts')
self.login()
# When authenticated, but not a participant: redirected to edit profile
no_par_response = self.client.get(par_only_page)
self.assertProfileRedirectedTo(no_par_response, par_only_page)
PermHelpers.mark_participant(self.user)
profile_needs_update.return_value = False
# When authenticated and a participant: success
par_response = self.client.get(par_only_page)
self.assertEqual(par_response.status_code, 200)
@unittest.mock.patch('ws.decorators.profile_needs_update')
def test_leader_pages(self, profile_needs_update):
"""Participants are given forbidden messages on leader-only pages.
Leaders are able to view these pages as normal.
"""
self.login()
# Membership in participant group is sufficient to validate participant
# (Making profile_needs_update return False skips participant checks)
PermHelpers.mark_participant(self.user)
profile_needs_update.return_value = False
# leader-only GET pages that don't require pks
leader_pages = ['leaders', 'participant_lookup']
# HTTP Forbidden on leader pages without group membership
for leader_page in leader_pages:
response = self.client.get(reverse(leader_page))
self.assertEqual(response.status_code, 403)
# HTTP OK when the user is marked as a leader
PermHelpers.mark_leader(self.user)
for leader_page in leader_pages:
response = self.client.get(reverse(leader_page))
self.assertEqual(response.status_code, 200)
| gpl-3.0 | -6,594,381,456,617,199,000 | 36.829932 | 81 | 0.661572 | false |
ablil98/python | bhp/joomla_killer.py | 1 | 3126 | #!/usr/bin/env python3
import threading
import queue
import http.cookiejar
import urllib.request
import urllib.parse
import time
import pdb
from html.parser import HTMLParser
# general variables
threads = 1
username = 'admin'
wordlist_file = 'passwords.lst'
resume = None # password to resume from
# target variables
target_url = 'http://localhost/dvwa/login.php'
target_post = 'http://localhost/dvwa/login.php'
# form variables
username_field = 'username'
password_field = 'password'
class BruteParser(HTMLParser):
"""get form field"""
def __init__(self):
super(BruteParser, self).__init__()
self.tag_results = dict()
def handle_starttag(self, tag, attrs):
if tag == 'input':
tag_name = None
tag_value = None
for name, value in attrs :
if name == 'name':
tag_name = value
if name == 'value':
tag_value = value
if tag_name is not None:
self.tag_results[tag_name] = value
class Bruter():
def __init__(self, username, words):
self.username = username
self.passwords_q = words
self.found = False # Set to True if we found the password
print("[+] Finished setting up for {} ".format(self.username))
def run_bruteforce(self):
for i in range(threads):
print("[*] Spawning thread {} ...".format(i))
t = threading.Thread(target=self.web_bruter)
t.start()
def web_bruter(self):
while not self.passwords_q.empty() and not self.found:
password_try = self.passwords_q.get()
# create cookie jar
cookie_jar = http.cookiejar.FileCookieJar("cookies")
# handle cookie jar for urllib library
cookie_handler = urllib.request.HTTPCookieProcessor(cookie_jar)
# get url respone
opener = urllib.request.build_opener(cookie_handler)
response = opener.open(target_url)
page = response.read()
print("[*] Trying : {} - {} ( {} left )".format(self.username, password_try, self.passwords_q.qsize()))
# parse html data
parser = BruteParser()
parser.feed(page.decode())
# set our username and password
post_tags = parser.tag_results
post_tags[username_field] = self.username
post_tags[password_field] = password_try
login_data = urllib.parse.urlencode(post_tags)
login_response = opener.open(target_post, login_data.encode())
login_result = login_response.read()
# if found the password
if 'Login failed' not in login_result.decode():
self.found = True
print("Brute Foce successffull Yeah")
print("\n[+] Username : {}".format(self.username))
print("[+] Password : {}\n".format(password_try))
print("[*] Waiting for other processes to stop ...")
def build_wordlist(wordlist_file):
wordlist_queue = queue.Queue()
with open(wordlist_file, 'r') as f:
raw_data = f.readlines()
for word in raw_data:
wordlist_queue.put(word.rstrip())
return wordlist_queue
if __name__ == '__main__':
print('[*] Reading wordlist : {} '.format(wordlist_file))
passwords_q = build_wordlist(wordlist_file)
print('[+] Finished reading wordlist successfully ({} passwords)'.format(passwords_q.qsize()))
bruteforcer = Bruter('admin', passwords_q)
bruteforcer.run_bruteforce()
| gpl-3.0 | 1,010,688,772,363,187,200 | 24.008 | 106 | 0.680102 | false |
slek120/phys534 | HW4/Aw.py | 1 | 1191 | from scipy import *
from scipy.integrate import simps
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
def Compute_Aw(om, DOS, Sig, ommesh, delta=0.1j):
Aw = zeros(len(om))
for i in range(len(om)):
DMFTW = DOS / (om[i] - Sig[i] - ommesh + delta)
Aw[i] = simps(-1.*DMFTW.imag/pi, ommesh)
return Aw
# Load DOS
DOSfile = loadtxt('2D_SL_DOS')
# 1st column as energies
ommesh = DOSfile[:,0]
# 2nd column as DOS
DOS = DOSfile[:,1]
# Normalize
DOS = DOS / simps(DOS, ommesh)
# Load Sig
Sigfile = loadtxt('Sig.out.U6')
# 1st column as frequencies
om = Sigfile[:,0]
# 2nd, 3rd column as self energy
Sig = Sigfile[:,1] + 1j * Sigfile[:,2]
Aw = Compute_Aw(om, DOS, Sig, ommesh)
plt.plot(om, Aw)
plt.xlim(0,50)
plt.legend(['U=6'], loc='best')
plt.ylabel('$A(\omega)$')
plt.xlabel('$\omega$')
plt.savefig('Aw_U6.png')
# Load Sig
Sigfile = loadtxt('Sig.out.U12')
# 1st column as frequencies
om = Sigfile[:,0]
# 2nd, 3rd column as self energy
Sig = Sigfile[:,1] + 1j * Sigfile[:,2]
Aw = Compute_Aw(om, DOS, Sig, ommesh)
plt.plot(om, Aw)
plt.xlim(0,50)
plt.legend(['U=12'], loc='best')
plt.ylabel('$A(\omega)$')
plt.xlabel('$\omega$')
plt.savefig('Aw_U12.png')
| gpl-3.0 | -4,123,881,550,766,441,000 | 21.903846 | 51 | 0.650714 | false |
google/ffn | ffn/training/import_util.py | 1 | 1882 | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains a utility function for dynamically importing symbols from modules.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import importlib
import logging
def import_symbol(specifier, default_packages='ffn.training.models'):
"""Imports a symbol from a python module.
The calling module must have the target module for the import as dependency.
Args:
specifier: full path specifier in format
[<packages>.]<module_name>.<model_class>, if packages is missing
``default_packages`` is used.
default_packages: chain of packages before module in format
<top_pack>.<sub_pack>.<subsub_pack> etc.
Returns:
symbol: object from module
"""
module_path, symbol_name = specifier.rsplit('.', 1)
try:
logging.info('Importing symbol %s from %s.%s',
symbol_name, default_packages, module_path)
module = importlib.import_module(default_packages + '.' + module_path)
except ImportError as e:
logging.info(e)
logging.info('Importing symbol %s from %s', symbol_name, module_path)
module = importlib.import_module(module_path)
symbol = getattr(module, symbol_name)
return symbol
| apache-2.0 | 2,617,406,135,293,789,000 | 35.192308 | 80 | 0.689692 | false |
guixing/simplecmdb | hostinfo/models.py | 1 | 1100 | from django.db import models
class Host(models.Model):
"""store host information"""
hostname = models.CharField(max_length=30)
osver = models.CharField(max_length=30)
vendor = models.CharField(max_length=30)
product = models.CharField(max_length=30)
cpu_model = models.CharField(max_length=30)
cpu_num = models.IntegerField(max_length=2)
memory = models.IntegerField(max_length=8)
sn = models.CharField(max_length=30)
ipaddr = models.IPAddressField(max_length=15)
identity = models.CharField(max_length=32)
def __unicode__(self):
return self.hostname
class HostGroup(models.Model):
name = models.CharField(max_length=30)
members = models.ManyToManyField(Host)
def __unicode__(self):
return self.name
def handle_hostsave_signal(sender, **kwargs):
new_host = kwargs['instance']
old_host = Host.objects.get(identity=new_host.identity)
if new_host.hostname != old_host.hostname:
change_hostname(new_host.ipaddr, new_host.hostname)
#models.signals.pre_save.connect(handle_hostsave_signal, sender=Host)
| bsd-3-clause | 6,485,501,293,390,478,000 | 32.333333 | 69 | 0.702727 | false |
asoliveira/NumShip | scripts/plot/brl-velo-u-zz-plt.py | 1 | 3039 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#É adimensional?
adi = False
#É para salvar as figuras(True|False)?
save = True
#Caso seja para salvar, qual é o formato desejado?
formato = 'jpg'
#Caso seja para salvar, qual é o diretório que devo salvar?
dircg = 'fig-sen'
#Caso seja para salvar, qual é o nome do arquivo?
nome = 'brl-velo-u-zz'
#Qual título colocar no gráficos?
titulo = ''#'Curva de ZigZag'
titulo2=''
#Qual a cor dos gráficos?
pc = 'k'
r1c = 'b'
r2c = 'y'
r3c = 'r'
#Estilo de linha
ps = '-'
r1s = '-'
r2s = '-'
r3s = '-'
import os
import scipy as sp
import matplotlib.pyplot as plt
from libplot import *
acelhis = sp.genfromtxt('../entrada/padrao/CurvaZigZag/velo.dat')
acelhis2 = sp.genfromtxt('../entrada/brl/saida1.1/CurvaZigZag/velo.dat')
acelhis3 = sp.genfromtxt('../entrada/brl/saida1.2/CurvaZigZag/velo.dat')
acelhis4 = sp.genfromtxt('../entrada/brl/saida1.3/CurvaZigZag/velo.dat')
lemehis = sp.genfromtxt('../entrada/padrao/CurvaZigZag/leme.dat')
lemehis2 = sp.genfromtxt('../entrada/brl/saida1.1/CurvaZigZag/leme.dat')
lemehis3 = sp.genfromtxt('../entrada/brl/saida1.2/CurvaZigZag/leme.dat')
lemehis4 = sp.genfromtxt('../entrada/brl/saida1.3/CurvaZigZag/leme.dat')
axl = [0, 1000, 4., 9.]
axl2 = [0, 1000, -25, 25]#do leme
#Plotando a Curva de Giro
if adi:
ylabel = r'$t\prime$'
xacellabel = r'$u\prime$'
else:
ylabel = r'$u \quad m/s$'
xacellabel = r'$t \quad segundos$'
plt.subplot2grid((1,4),(0,0), colspan=3)
#Padrao
plt.plot(acelhis[:, 0], acelhis[:, 1], color = pc, linestyle = ps,
linewidth = 2, label=ur'padrão')
plt.plot(acelhis2[:, 0], acelhis2[:, 1], color = r1c,linestyle = r1s,
linewidth = 2, label=ur'1.1--$brl$')
plt.plot(acelhis3[:, 0], acelhis3[:, 1], color = r2c, linestyle = r2s,
linewidth = 2, label=ur'1.2--$brl$')
plt.plot(acelhis4[:, 0], acelhis4[:, 1], color = r3c, linestyle = r3s,
linewidth = 2, label=ur'1.3--$brl$')
plt.title(titulo)
plt.legend(bbox_to_anchor=(1.1, 1), loc=2, borderaxespad=0.)
plt.ylabel(ylabel)
plt.xlabel(xacellabel)
plt.axis(axl)
plt.grid(True)
plt.twinx()
plt.plot(lemehis[:, 0], lemehis[:, 1] * (180/sp.pi), color = pc, linestyle =
"--",
linewidth = 1, label=ur'leme--padrão')
plt.plot(lemehis2[:, 0], lemehis2[:, 1] * (180/sp.pi), color = r1c, linestyle
= "--",
linewidth = 1, label=ur'leme--1.1$brl$')
plt.plot(lemehis3[:, 0], lemehis3[:, 1] * (180/sp.pi), color = r2c, linestyle
= "--",
linewidth = 1, label=ur'leme--1.2$brl$')
plt.plot(lemehis4[:, 0], lemehis4[:, 1] * (180/sp.pi), color = r3c, linestyle
= "--",
linewidth = 1, label=ur'leme--1.3$brl$')
plt.title(titulo2)
plt.legend(bbox_to_anchor=(1.1, 0), loc=3, borderaxespad=0.)
plt.ylabel(r"$\delta_R$")
plt.axis(axl2)
plt.grid(False)
if save:
if not os.path.exists(dircg):
os.makedirs(dircg)
if os.path.exists(dircg + '/' + nome + '.' + formato):
os.remove(dircg + '/' + nome + '.' + formato)
plt.savefig(dircg + '/' + nome + '.' + formato , format=formato)
else:
plt.show()
| gpl-3.0 | -8,618,775,435,432,782,000 | 28.398058 | 79 | 0.638045 | false |
rlefevre1/hpp-rbprm-corba | script/dynamic/downSlope_hrp2_pathKino.py | 1 | 6111 | from hpp.corbaserver.rbprm.rbprmbuilder import Builder
from hpp.gepetto import Viewer
from hpp.corbaserver import Client
from hpp.corbaserver.robot import Robot as Parent
from hpp.corbaserver.rbprm.problem_solver import ProblemSolver
class Robot (Parent):
rootJointType = 'freeflyer'
packageName = 'hpp-rbprm-corba'
meshPackageName = 'hpp-rbprm-corba'
# URDF file describing the trunk of the robot HyQ
urdfName = 'hrp2_trunk_flexible'
urdfSuffix = ""
srdfSuffix = ""
def __init__ (self, robotName, load = True):
Parent.__init__ (self, robotName, self.rootJointType, load)
self.tf_root = "base_footprint"
self.client.basic = Client ()
self.load = load
rootJointType = 'freeflyer'
packageName = 'hpp-rbprm-corba'
meshPackageName = 'hpp-rbprm-corba'
urdfName = 'hrp2_trunk_flexible'
urdfNameRom = ['hrp2_larm_rom','hrp2_rarm_rom','hrp2_lleg_rom','hrp2_rleg_rom']
urdfSuffix = ""
srdfSuffix = ""
vMax = 4;
aMax = 6;
extraDof = 6
# Creating an instance of the helper class, and loading the robot
rbprmBuilder = Builder ()
rbprmBuilder.loadModel(urdfName, urdfNameRom, rootJointType, meshPackageName, packageName, urdfSuffix, srdfSuffix)
#rbprmBuilder.setJointBounds ("base_joint_xyz", [-1.25,2, -0.5, 5.5, 0.6, 1.8])
rbprmBuilder.setJointBounds ("base_joint_xyz", [-2,4, 0.5, 1.5, 0.25, 1.8])
rbprmBuilder.setJointBounds('CHEST_JOINT0',[0,0])
rbprmBuilder.setJointBounds('CHEST_JOINT1',[-0.35,0.1])
rbprmBuilder.setJointBounds('HEAD_JOINT0',[0,0])
rbprmBuilder.setJointBounds('HEAD_JOINT1',[0,0])
# The following lines set constraint on the valid configurations:
# a configuration is valid only if all limbs can create a contact ...
rbprmBuilder.setFilter(['hrp2_lleg_rom','hrp2_rleg_rom'])
rbprmBuilder.setAffordanceFilter('hrp2_lleg_rom', ['Support',])
rbprmBuilder.setAffordanceFilter('hrp2_rleg_rom', ['Support'])
# We also bound the rotations of the torso. (z, y, x)
rbprmBuilder.boundSO3([-0.1,0.1,-0.65,0.65,-0.2,0.2])
rbprmBuilder.client.basic.robot.setDimensionExtraConfigSpace(extraDof)
rbprmBuilder.client.basic.robot.setExtraConfigSpaceBounds([-4,4,-1,1,-2,2,0,0,0,0,0,0])
indexECS = rbprmBuilder.getConfigSize() - rbprmBuilder.client.basic.robot.getDimensionExtraConfigSpace()
# Creating an instance of HPP problem solver and the viewer
ps = ProblemSolver( rbprmBuilder )
ps.client.problem.setParameter("aMax",aMax)
ps.client.problem.setParameter("vMax",vMax)
ps.client.problem.setParameter("tryJump",vMax)
ps.client.problem.setParameter("sizeFootX",0.24)
ps.client.problem.setParameter("sizeFootY",0.14)
r = Viewer (ps)
from hpp.corbaserver.affordance.affordance import AffordanceTool
afftool = AffordanceTool ()
afftool.setAffordanceConfig('Support', [0.5, 0.03, 0.00005])
afftool.loadObstacleModel (packageName, "downSlope", "planning", r)
#r.loadObstacleModel (packageName, "ground", "planning")
afftool.visualiseAffordances('Support', r, [0.25, 0.5, 0.5])
r.addLandmark(r.sceneName,1)
# Setting initial and goal configurations
q_init = rbprmBuilder.getCurrentConfig ();
q_init[3:7] = [1,0,0,0]
q_init[8] = -0.2
q_init [0:3] = [-1.6, 1, 1.75]; r (q_init)
#q_init[3:7] = [0.7071,0,0,0.7071]
#q_init [0:3] = [1, 1, 0.65]
rbprmBuilder.setCurrentConfig (q_init)
q_goal = q_init [::]
q_goal[3:7] = [1,0,0,0]
q_goal[8] = 0
q_goal [0:3] = [2.5, 1, 0.5]; r (q_goal)
r (q_goal)
#~ q_goal [0:3] = [-1.5, 0, 0.63]; r (q_goal)
# Choosing a path optimizer
ps.setInitialConfig (q_init)
ps.addGoalConfig (q_goal)
# Choosing RBPRM shooter and path validation methods.
ps.client.problem.selectConFigurationShooter("RbprmShooter")
ps.client.problem.selectPathValidation("RbprmPathValidation",0.05)
# Choosing kinodynamic methods :
ps.selectSteeringMethod("RBPRMKinodynamic")
ps.selectDistance("KinodynamicDistance")
ps.selectPathPlanner("DynamicPlanner")
#solve the problem :
r(q_init)
#r.solveAndDisplay("rm",1,0.01)
t = ps.solve ()
from hpp.gepetto import PathPlayer
pp = PathPlayer (rbprmBuilder.client.basic, r)
pp.dt=0.03
pp.displayVelocityPath(0)
r.client.gui.setVisibility("path_0_root","ALWAYS_ON_TOP")
"""
if isinstance(t, list):
t = t[0]* 3600000 + t[1] * 60000 + t[2] * 1000 + t[3]
f = open('log.txt', 'a')
f.write("path computation " + str(t) + "\n")
f.close()
"""
"""
for i in range(0,9):
t = ps.solve()
if isinstance(t, list):
ts = t[0]* 3600. + t[1] * 60. + t[2] + t[3]/1000.
f= open("/local/dev_hpp/logs/benchHrp2_slope_LP.txt","a")
f.write("t = "+str(ts) + "\n")
f.write("path_length = "+str(ps.client.problem.pathLength(i)) + "\n")
f.close()
print "problem "+str(i)+" solved \n"
ps.clearRoadmap()
"""
#ps.client.problem.prepareSolveStepByStep()
#ps.client.problem.finishSolveStepByStep()
q_far = q_init[::]
q_far[2] = -3
r(q_far)
"""
camera = [0.6293167471885681,
-9.560577392578125,
10.504343032836914,
0.9323806762695312,
0.36073973774909973,
0.008668755181133747,
0.02139890193939209]
r.client.gui.setCameraTransform(0,camera)
"""
"""
r.client.gui.removeFromGroup("rm",r.sceneName)
r.client.gui.removeFromGroup("rmstart",r.sceneName)
r.client.gui.removeFromGroup("rmgoal",r.sceneName)
for i in range(0,ps.numberNodes()):
r.client.gui.removeFromGroup("vecRM"+str(i),r.sceneName)
"""
"""
# for seed 1486657707
ps.client.problem.extractPath(0,0,2.15)
# Playing the computed path
from hpp.gepetto import PathPlayer
pp = PathPlayer (rbprmBuilder.client.basic, r)
pp.dt=0.03
pp.displayVelocityPath(1)
r.client.gui.setVisibility("path_1_root","ALWAYS_ON_TOP")
#display path
pp.speed=0.3
#pp (0)
"""
#display path with post-optimisation
"""
q_far = q_init[::]
q_far[2] = -3
r(q_far)
"""
# Manually add waypoints to roadmap:
"""
ps.client.problem.prepareSolveStepByStep()
pbCl = rbprmBuilder.client.basic.problem
q1= [0.6, 1, 0.5, 1, 0, 0, 0, 0.0, 0, 0.0, 0.0, 3, 0.0, -1.5, 0.0, 0.0, 0.0]
pbCl.addConfigToRoadmap (q1)
pbCl.directPath(q1,q_goal,True)
pbCl.directPath(q_init,q1,False)
r.client.gui.removeFromGroup("path_"+str(ps.numberPaths()-2)+"_root",r.sceneName)
pp.displayVelocityPath(ps.numberPaths()-1)
pbCl.addEdgeToRoadmap (q_init, q1, 1, False)
pbCl.addEdgeToRoadmap (q1, q_goal, 0, False)
"""
| lgpl-3.0 | -5,419,027,049,570,452,000 | 27.03211 | 114 | 0.715104 | false |
nico-hn/color_contrast_calc_py | tests/sorter/test_sorter.py | 1 | 11908 | import unittest
import operator
from color_contrast_calc.color import Color
from color_contrast_calc.sorter import sorter
from color_contrast_calc import utils
class TestSorter(unittest.TestCase):
def setup(self):
pass
def test_is_hsl_order(self):
self.assertTrue(sorter.is_hsl_order('hsl'))
self.assertTrue(sorter.is_hsl_order('HSL'))
self.assertTrue(sorter.is_hsl_order('lHs'))
self.assertFalse(sorter.is_hsl_order('rgb'))
self.assertFalse(sorter.is_hsl_order('bRg'))
def test_color_component_pos(self):
rgb = 'rgb'
hsl = 'hsl'
pos = sorter.color_component_pos('hsl', hsl)
self.assertEqual(pos, (0, 1, 2))
pos = sorter.color_component_pos('hLs', hsl)
self.assertEqual(pos, (0, 2, 1))
pos = sorter.color_component_pos('rgb', rgb)
self.assertEqual(pos, (0, 1, 2))
pos = sorter.color_component_pos('bgr', rgb)
self.assertEqual(pos, (2, 1, 0))
def test_parse_color_order(self):
order = sorter.parse_color_order('hsl')
pos = sorted(order['funcs'].keys())
descend = tuple(order['funcs'][i](1) for i in pos)
self.assertEqual(order['pos'], (0, 1, 2))
self.assertEqual(descend, (1, 1, 1))
order = sorter.parse_color_order('HSL')
pos = sorted(order['funcs'].keys())
descend = tuple(order['funcs'][i](1) for i in pos)
self.assertEqual(order['pos'], (0, 1, 2))
self.assertEqual(descend, (-1, -1, -1))
order = sorter.parse_color_order('lHs')
pos = sorted(order['funcs'].keys())
descend = tuple(order['funcs'][i](1) for i in pos)
print(order)
self.assertEqual(order['pos'], (2, 0, 1))
self.assertEqual(descend, (-1, 1, 1))
order = sorter.parse_color_order('rgb')
pos = sorted(order['funcs'].keys())
descend = tuple(order['funcs'][i](1) for i in pos)
self.assertEqual(order['pos'], (0, 1, 2))
self.assertEqual(descend, (1, 1, 1))
order = sorter.parse_color_order('bRG')
pos = sorted(order['funcs'].keys())
descend = tuple(order['funcs'][i](1) for i in pos)
self.assertEqual(order['pos'], (2, 0, 1))
self.assertEqual(descend, (-1, -1, 1))
def test_compile_components_sort_key_function(self):
key_func = sorter.compile_components_sort_key_function('hsl')
self.assertEqual(key_func((1, 2, 3)), (1, 2, 3))
key_func = sorter.compile_components_sort_key_function('HSL')
self.assertEqual(key_func((1, 2, 3)), (-1, -2, -3))
key_func = sorter.compile_components_sort_key_function('lHs')
self.assertEqual(key_func((1, 2, 3)), (3, -1, 2))
key_func = sorter.compile_components_sort_key_function('rgb')
self.assertEqual(key_func((1, 2, 3)), (1, 2, 3))
key_func = sorter.compile_components_sort_key_function('bRG')
self.assertEqual(key_func((1, 2, 3)), (3, -1, -2))
def test_compile_hex_sort_key_function(self):
hsl_hex = utils.hsl_to_hex((20, 80, 50))
rgb_hex = utils.rgb_to_hex((10, 165, 70))
key_func = sorter.compile_hex_sort_key_function('hsl')
for k, h in zip(key_func(hsl_hex), (20, 80, 50)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compile_hex_sort_key_function('HSL')
for k, h in zip(key_func(hsl_hex), (-20, -80, -50)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compile_hex_sort_key_function('lHs')
for k, h in zip(key_func(hsl_hex), (50, -20, 80)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compile_hex_sort_key_function('rgb')
for k, h in zip(key_func(rgb_hex), (10, 165, 70)):
self.assertEqual(k, h, 0)
key_func = sorter.compile_hex_sort_key_function('bRG')
for k, h in zip(key_func(rgb_hex), (70, -10, -165)):
self.assertEqual(k, h, 0)
def test_compile_color_sort_key_function(self):
hsl = Color.from_hsl((20, 80, 50))
rgb = Color((10, 165, 70))
key_func = sorter.compile_color_sort_key_function('hsl')
for k, h in zip(key_func(hsl), (20, 80, 50)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compile_color_sort_key_function('HSL')
for k, h in zip(key_func(hsl), (-20, -80, -50)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compile_color_sort_key_function('lHs')
for k, h in zip(key_func(hsl), (50, -20, 80)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compile_color_sort_key_function('rgb')
for k, h in zip(key_func(rgb), (10, 165, 70)):
self.assertEqual(k, h, 0)
key_func = sorter.compile_color_sort_key_function('bRG')
for k, h in zip(key_func(rgb), (70, -10, -165)):
self.assertEqual(k, h, 0)
def test_compose_key_function(self):
hsl = Color.from_hsl((20, 80, 50))
rgb = Color((10, 165, 70))
hsl_func = sorter.compile_color_sort_key_function('lHs')
rgb_func = sorter.compile_color_sort_key_function('bRG')
key_func = sorter.compose_key_function(hsl_func)
for k, h in zip(key_func(hsl), (50, -20, 80)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compose_key_function(hsl_func, operator.itemgetter(0))
for k, h in zip(key_func([hsl]), (50, -20, 80)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compose_key_function(rgb_func)
for k, h in zip(key_func(rgb), (70, -10, -165)):
self.assertAlmostEqual(k, h, 0)
key_func = sorter.compose_key_function(rgb_func, operator.itemgetter(0))
for k, h in zip(key_func([rgb]), (70, -10, -165)):
self.assertAlmostEqual(k, h, 0)
class TestSorterSortedColor(unittest.TestCase):
def setUp(self):
self.color_names = [
'black',
'gray',
'orange',
'yellow',
'springgreen',
'blue'
]
self.color_names2 = [
'white',
'red',
'yellow',
'lime',
'blue'
]
self.prepare_colors()
def prepare_colors(self):
self.colors = [Color.from_name(c) for c in self.color_names]
self.colors2 = [Color.from_name(c) for c in self.color_names2]
self.key = None
def __assert_sorted_result(self, order, before, after):
self.assertEqual(sorter.sorted(before, order, self.key), after)
def test_rgb(self):
black, gray, orange, yellow, springgreen, blue = self.colors
order = 'rgb'
self.__assert_sorted_result(order,
[black, yellow, orange],
[black, orange, yellow])
self.__assert_sorted_result(order,
[black, yellow, orange, springgreen],
[black, springgreen, orange, yellow])
self.__assert_sorted_result(order,
[yellow, black, orange],
[black, orange, yellow])
self.__assert_sorted_result(order,
[yellow, black, orange, gray],
[black, gray, orange, yellow])
self.__assert_sorted_result(order,
[yellow, black, orange, blue],
[black, blue, orange, yellow])
order = 'grb'
self.__assert_sorted_result(order,
[black, yellow, orange],
[black, orange, yellow])
self.__assert_sorted_result(order,
[black, yellow, orange, springgreen],
[black, orange, springgreen, yellow])
self.__assert_sorted_result(order,
[yellow, black, orange],
[black, orange, yellow])
self.__assert_sorted_result(order,
[yellow, black, orange, gray],
[black, gray, orange, yellow])
self.__assert_sorted_result(order,
[yellow, black, orange, blue],
[black, blue, orange, yellow])
order = 'brg'
self.__assert_sorted_result(order,
[black, yellow, orange],
[black, orange, yellow])
self.__assert_sorted_result(order,
[black, yellow, orange, springgreen],
[black, orange, yellow, springgreen])
self.__assert_sorted_result(order,
[yellow, black, orange],
[black, orange, yellow])
self.__assert_sorted_result(order,
[yellow, black, orange, gray],
[black, orange, yellow, gray])
self.__assert_sorted_result(order,
[yellow, black, orange, blue],
[black, orange, yellow, blue])
order = 'Rgb'
self.__assert_sorted_result(order,
[black, yellow, orange],
[orange, yellow, black])
self.__assert_sorted_result(order,
[black, yellow, orange, springgreen],
[orange, yellow, black, springgreen])
self.__assert_sorted_result(order,
[yellow, black, orange],
[orange, yellow, black])
self.__assert_sorted_result(order,
[yellow, black, orange, gray],
[orange, yellow, gray, black])
self.__assert_sorted_result(order,
[yellow, black, orange, blue],
[orange, yellow, black, blue])
def test_hsl(self):
white, red, yellow, lime, blue = self.colors2
order = 'hLS'
self.__assert_sorted_result(order,
[blue, yellow, white, red, lime],
[white, red, yellow, lime, blue])
class TestSorterSortedRGB(TestSorterSortedColor):
def prepare_colors(self):
self.colors = [Color.from_name(c).rgb for c in self.color_names]
self.colors2 = [Color.from_name(c).hsl for c in self.color_names2]
self.key = None
class TestSorterSortedHex(TestSorterSortedColor):
def prepare_colors(self):
self.colors = [Color.from_name(c).hex for c in self.color_names]
self.colors2 = [Color.from_name(c).hex for c in self.color_names2]
self.key = None
class TestSorterSortedColorInArray(TestSorterSortedColor):
def prepare_colors(self):
self.colors = [[Color.from_name(c)] for c in self.color_names]
self.colors2 = [[Color.from_name(c)] for c in self.color_names2]
self.key = operator.itemgetter(0)
class TestSorterSortedRGBInArray(TestSorterSortedColor):
def prepare_colors(self):
self.colors = [[Color.from_name(c).rgb] for c in self.color_names]
self.colors2 = [[Color.from_name(c).hsl] for c in self.color_names2]
self.key = operator.itemgetter(0)
class TestSorterSortedHexInArray(TestSorterSortedColor):
def prepare_colors(self):
self.colors = [[Color.from_name(c).hex] for c in self.color_names]
self.colors2 = [[Color.from_name(c).hex] for c in self.color_names2]
self.key = operator.itemgetter(0)
| mit | -5,100,496,469,579,807,000 | 40.347222 | 80 | 0.525949 | false |
andyzsf/django | django/contrib/gis/gdal/driver.py | 1 | 3257 | from ctypes import c_void_p
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.error import OGRException
from django.contrib.gis.gdal.prototypes import ds as vcapi, raster as rcapi
from django.utils import six
from django.utils.encoding import force_bytes, force_text
class Driver(GDALBase):
"""
Wraps a GDAL/OGR Data Source Driver.
For more information, see the C API source code:
http://www.gdal.org/gdal_8h.html - http://www.gdal.org/ogr__api_8h.html
"""
# Case-insensitive aliases for some GDAL/OGR Drivers.
# For a complete list of original driver names see
# http://www.gdal.org/ogr_formats.html (vector)
# http://www.gdal.org/formats_list.html (raster)
_alias = {
# vector
'esri': 'ESRI Shapefile',
'shp': 'ESRI Shapefile',
'shape': 'ESRI Shapefile',
'tiger': 'TIGER',
'tiger/line': 'TIGER',
# raster
'tiff': 'GTiff',
'tif': 'GTiff',
'jpeg': 'JPEG',
'jpg': 'JPEG',
}
def __init__(self, dr_input):
"""
Initializes an GDAL/OGR driver on either a string or integer input.
"""
if isinstance(dr_input, six.string_types):
# If a string name of the driver was passed in
self.ensure_registered()
# Checking the alias dictionary (case-insensitive) to see if an
# alias exists for the given driver.
if dr_input.lower() in self._alias:
name = self._alias[dr_input.lower()]
else:
name = dr_input
# Attempting to get the GDAL/OGR driver by the string name.
for iface in (vcapi, rcapi):
driver = iface.get_driver_by_name(force_bytes(name))
if driver:
break
elif isinstance(dr_input, int):
self.ensure_registered()
for iface in (vcapi, rcapi):
driver = iface.get_driver(dr_input)
if driver:
break
elif isinstance(dr_input, c_void_p):
driver = dr_input
else:
raise OGRException('Unrecognized input type for GDAL/OGR Driver: %s' % str(type(dr_input)))
# Making sure we get a valid pointer to the OGR Driver
if not driver:
raise OGRException('Could not initialize GDAL/OGR Driver on input: %s' % str(dr_input))
self.ptr = driver
def __str__(self):
return self.name
@classmethod
def ensure_registered(cls):
"""
Attempts to register all the data source drivers.
"""
# Only register all if the driver count is 0 (or else all drivers
# will be registered over and over again)
if not cls.driver_count():
vcapi.register_all()
rcapi.register_all()
@classmethod
def driver_count(cls):
"""
Returns the number of GDAL/OGR data source drivers registered.
"""
return vcapi.get_driver_count() + rcapi.get_driver_count()
@property
def name(self):
"""
Returns description/name string for this driver.
"""
return force_text(rcapi.get_driver_description(self.ptr))
| bsd-3-clause | 3,520,037,014,693,149,700 | 32.57732 | 103 | 0.580596 | false |
airbnb/streamalert | publishers/community/generic.py | 1 | 9345 | """
Copyright 2017-present Airbnb, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from collections import deque, OrderedDict
import re
from streamalert.shared.publisher import Register, AlertPublisher
from streamalert.shared.normalize import Normalizer
from streamalert.shared.utils import get_keys
@Register
def add_record(alert, publication):
"""Publisher that adds the alert.record to the publication."""
publication['record'] = alert.record
return publication
@Register
def blank(*_):
"""Erases all fields on existing publications and returns a blank dict"""
return {}
@Register
def remove_internal_fields(_, publication):
"""This publisher removes fields from DefaultPublisher that are only useful internally"""
publication.pop('staged', None)
publication.pop('publishers', None)
publication.pop('outputs', None)
return publication
def _delete_dictionary_fields(publication, regexp):
"""Deeply destroys all nested dict keys matching the given regexp string
Args:
publication (dict): A publication
regexp (str): A String that is valid regexp
Returns:
dict
(!) warning, will modify the original publication
"""
# Python is bad at recursion so I managed to tip toe around that with BFS using a queue.
# This heavily takes advantage of internal references being maintained properly as the loop
# does not actually track the "current scope" of the next_item.
fringe = deque()
fringe.append(publication)
while len(fringe) > 0:
next_item = fringe.popleft()
if isinstance(next_item, dict):
# work on a copy of the keys to avoid modifying the dict while iterating over it
for key in list(next_item.keys()):
if re.search(regexp, key):
next_item.pop(key, None)
for key, item in next_item.items():
fringe.append(item)
elif isinstance(next_item, list):
fringe.extend(next_item)
else:
# It's a leaf node, or it's some strange object that doesn't belong here
pass
return publication
@Register
def remove_fields(alert, publication):
"""This publisher deletes fields from the current publication.
The publisher uses the alert's context to determine which fields to delete. Example:
context={
'remove_fields': ['^field1$', '^field2$', ...]
}
"remove_fields" should be an array of strings that are valid regular expressions.
The algorithm deeply searches the publication for any dict key that matches the given regular
expression. Any such key is removed, and if the value is a nested dict, the entire dict
branch underneath is removed.
"""
fields = alert.context.get('remove_fields', [])
for field in fields:
publication = _delete_dictionary_fields(publication, field)
return publication
@Register
def remove_streamalert_normalization(_, publication):
"""This publisher removes the super heavyweight 'streamalert_normalization' fields"""
return _delete_dictionary_fields(publication, Normalizer.NORMALIZATION_KEY)
@Register
def enumerate_fields(_, publication):
"""Flattens all currently published fields.
By default, publications are deeply nested dict structures. This can be very hard to read
when rendered in certain outputs. PagerDuty is one example where the default UI does a very
poor job rendering nested dicts.
This publisher collapses deeply nested fields into a single-leveled dict with keys that
correspond to the original path of each value in a deeply nested dict. For example:
{
"top1": {
"mid1": "low",
"mid2": [ "low1", "low2", "low3" ],
"mid3": {
"low1": "verylow"
}
},
"top2": "mid"
}
.. would collapse into the following structure:
{
"top1.mid1": "low",
"top1.mid2[0]": "low1",
"top1.mid2[1]": "low1",
"top1.mid2[2]": "low1",
"top1.mid3.low1: "verylow",
"top2": "mid"
}
The output dict is an OrderedDict with keys sorted in alphabetical order.
"""
def _recursive_enumerate_fields(structure, output_reference, path=''):
if isinstance(structure, list):
for index, item in enumerate(structure):
_recursive_enumerate_fields(item, output_reference, '{}[{}]'.format(path, index))
elif isinstance(structure, dict):
for key in structure:
_recursive_enumerate_fields(structure[key], output_reference, '{prefix}{key}'.format(
prefix='{}.'.format(path) if path else '', # Omit first period
key=key
))
else:
output_reference[path] = structure
output = {}
_recursive_enumerate_fields(publication, output)
return OrderedDict(sorted(output.items()))
@Register
def populate_fields(alert, publication):
"""This publisher moves all requested fields to the top level and ignores everything else.
It uses the context to determine which fields to keep. Example:
context={
'populate_fields': [ 'field1', 'field2', 'field3' ]
}
"populate_fields" should be an array of strings that are exact matches to the field names.
The algorithm deeply searches the publication for any dict key that exactly matches one of the
given fields. It then takes the contents of that field and moves them up to the top level.
It discovers ALL values matching each field, so if a field is returned multiple times, the
resulting top level field will be an array. In the special case where exactly one entry is
returned for a populate_field, the value will instead be equal to that value (instead of an
array with 1 element being that value). In the special case when no entries are returned for
an extract_field, the value will be None.
Aside from the moved fields, this publisher throws away everything else in the original
publication.
NOTE: It is possible for moved fields to continue to contain nested dicts, so do not assume
this publisher will result in a flat dictionary publication.
"""
new_publication = {}
for populate_field in alert.context.get('populate_fields', []):
extractions = get_keys(publication, populate_field)
new_publication[populate_field] = extractions
return new_publication
@Register
class StringifyArrays(AlertPublisher):
"""Deeply navigates a dict publication and coverts all scalar arrays to strings
Any array discovered with only scalar values will be joined into a single string with the
given DELIMITER. Subclass implementations of this can override the delimiter to join the
string differently.
"""
DELIMITER = '\n'
def publish(self, alert, publication):
fringe = deque()
fringe.append(publication)
while len(fringe) > 0:
next_item = fringe.popleft()
if isinstance(next_item, dict):
# Check all keys
for key, item in next_item.items():
if self.is_scalar_array(item):
next_item[key] = self.stringify(item)
else:
fringe.append(item)
elif isinstance(next_item, list):
# At this point, if the item is a list we assert that it is not a SCALAR array;
# because it is too late to stringify it, since we do not have a back reference
# to the object that contains it
fringe.extend(next_item)
else:
# It's a leaf node, or it's some strange object that doesn't belong here
pass
return publication
@staticmethod
def is_scalar_array(item):
"""Returns if the given item is a python list containing only scalar elements
NOTE: This method assumes that the 'item' provided comes from a valid JSON compliant dict.
It does not account for strange or complicated types, such as references to functions
or class definitions or other stuff.
Args:
item (mixed): The python variable to check
Returns:
bool
"""
if not isinstance(item, list):
return False
for element in item:
if isinstance(element, dict) or isinstance(element, list):
return False
return True
@classmethod
def stringify(cls, array):
"""Given a list of elements, will join them together with the publisher's DELIMITER
Args:
array (list): The array of elements.
Returns:
str
"""
return cls.DELIMITER.join([str(elem) for elem in array])
| apache-2.0 | 7,216,611,972,438,715,000 | 33.230769 | 101 | 0.656929 | false |
i-kiwamu/jwis | jwis/__init__.py | 1 | 1554 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import date, timedelta
import sys
import pandas as pd
import codecs
from .jwislib import JWIS
sys_encoding = sys.stdout.encoding
try:
input = raw_input
except NameError:
pass
def ask_date():
print("Beginning date")
d_start_year = int(input(" year? "))
d_start_month = int(input(" month? "))
d_start_date = int(input(" date? "))
d_start = date(d_start_year, d_start_month, d_start_date)
print("Final date")
d_end_year = int(input(" year? "))
d_end_month = int(input(" month? "))
d_end_date = int(input(" date? "))
d_end = date(d_end_year, d_end_month, d_end_date)
return (d_start, d_end)
def ask_obs_type():
print("Choose type of observation")
print(" 1: flow rate & height")
print(" 2: dam")
obs_type = input(" Selection: ")
return int(obs_type)
def ask_observatory():
obs_id = input("Input observatory ID: ")
return obs_id
def main():
date_periods = ask_date()
d_start = date_periods[0]
d_end = date_periods[1]
if d_start > d_end:
d_start, d_end = d_end, d_start
obs_type = ask_obs_type()
obs_id = ask_observatory()
output_filename = input("saving file name? ")
jwis = JWIS(obs_type, obs_id, d_start, d_end, "NO")
if obs_type == 1: # flow rate & height
jwis_table = jwis.retrieve_hq_data()
elif obs_type == 2: # dam
jwis_table = jwis.retrieve_data('1')
jwis_table.to_csv(output_filename)
print("Done")
| gpl-3.0 | -16,430,081,994,334,884 | 22.907692 | 61 | 0.593951 | false |
ericmjl/influenza-reassortment-detector | second_search.py | 1 | 1571 | import networkx as nx
import numpy as np
import pickle as pkl
import sys
class SecondSearchIdentifier(object):
"""
SecondSearchIdentifier
Identifies isolates for which a source pair search will be performed.
"""
def __init__(self, handle, percentile):
super(SecondSearchIdentifier, self).__init__()
self.handle = handle
self.percentile = percentile
self.G = None
self.pwi_distribution = []
self.cutoff_pwi = None
self.source_pair_nodes = []
def run(self):
self.G = nx.read_gpickle('{0} Full Complement Graph.pkl'.format(self.handle))
self.identify_sourceless_isolates()
self.get_pwi_distribution()
self.compute_cutoff_pwi()
self.identify_lowpwi_isolates()
self.write_second_search_list()
def identify_sourceless_isolates(self):
for n, d in self.G.nodes(data=True):
if len(self.G.in_edges(n)) == 0:
self.source_pair_nodes.append(n)
def get_pwi_distribution(self):
for n1, n2, d in self.G.edges(data=True):
self.pwi_distribution.append(d['pwi'])
def compute_cutoff_pwi(self):
self.cutoff_pwi = np.percentile(self.pwi_distribution, self.percentile)
def identify_lowpwi_isolates(self):
for n1, n2, d in self.G.edges(data=True):
if d['pwi'] < self.cutoff_pwi:
self.source_pair_nodes.append(n2)
def write_second_search_list(self):
with open('{0} Isolates for Source Pair Search.pkllist'.format(self.handle), 'wb') as f:
pkl.dump(self.source_pair_nodes, f)
if __name__ == '__main__':
handle = sys.argv[1]
percentile = int(sys.argv[2])
ssi = SecondSearchIdentifier(handle, percentile)
ssi.run() | mit | -8,981,593,487,873,865,000 | 27.071429 | 90 | 0.705283 | false |
kushalbhola/MyStuff | Practice/PythonApplication/env/Lib/site-packages/pandas/core/ops/__init__.py | 1 | 55485 | """
Arithmetic operations for PandasObjects
This is not a public API.
"""
import datetime
import operator
import textwrap
from typing import Any, Callable
import warnings
import numpy as np
from pandas._libs import Timedelta, Timestamp, lib, ops as libops
from pandas.errors import NullFrequencyError
from pandas.util._decorators import Appender
from pandas.core.dtypes.cast import (
construct_1d_object_array_from_listlike,
find_common_type,
maybe_upcast_putmask,
)
from pandas.core.dtypes.common import (
ensure_object,
is_bool_dtype,
is_categorical_dtype,
is_datetime64_dtype,
is_datetime64tz_dtype,
is_datetimelike_v_numeric,
is_extension_array_dtype,
is_integer_dtype,
is_list_like,
is_object_dtype,
is_period_dtype,
is_scalar,
is_timedelta64_dtype,
needs_i8_conversion,
)
from pandas.core.dtypes.generic import (
ABCDataFrame,
ABCDatetimeArray,
ABCIndex,
ABCIndexClass,
ABCSeries,
ABCSparseArray,
ABCSparseSeries,
ABCTimedeltaArray,
)
from pandas.core.dtypes.missing import isna, notna
import pandas as pd
from pandas._typing import ArrayLike
import pandas.core.common as com
from . import missing
from .docstrings import (
_arith_doc_FRAME,
_flex_comp_doc_FRAME,
_make_flex_doc,
_op_descriptions,
)
from .roperator import ( # noqa:F401
radd,
rand_,
rdiv,
rdivmod,
rfloordiv,
rmod,
rmul,
ror_,
rpow,
rsub,
rtruediv,
rxor,
)
# -----------------------------------------------------------------------------
# Ops Wrapping Utilities
def get_op_result_name(left, right):
"""
Find the appropriate name to pin to an operation result. This result
should always be either an Index or a Series.
Parameters
----------
left : {Series, Index}
right : object
Returns
-------
name : object
Usually a string
"""
# `left` is always a pd.Series when called from within ops
if isinstance(right, (ABCSeries, ABCIndexClass)):
name = _maybe_match_name(left, right)
else:
name = left.name
return name
def _maybe_match_name(a, b):
"""
Try to find a name to attach to the result of an operation between
a and b. If only one of these has a `name` attribute, return that
name. Otherwise return a consensus name if they match of None if
they have different names.
Parameters
----------
a : object
b : object
Returns
-------
name : str or None
See Also
--------
pandas.core.common.consensus_name_attr
"""
a_has = hasattr(a, "name")
b_has = hasattr(b, "name")
if a_has and b_has:
if a.name == b.name:
return a.name
else:
# TODO: what if they both have np.nan for their names?
return None
elif a_has:
return a.name
elif b_has:
return b.name
return None
def maybe_upcast_for_op(obj):
"""
Cast non-pandas objects to pandas types to unify behavior of arithmetic
and comparison operations.
Parameters
----------
obj: object
Returns
-------
out : object
Notes
-----
Be careful to call this *after* determining the `name` attribute to be
attached to the result of the arithmetic operation.
"""
if type(obj) is datetime.timedelta:
# GH#22390 cast up to Timedelta to rely on Timedelta
# implementation; otherwise operation against numeric-dtype
# raises TypeError
return Timedelta(obj)
elif isinstance(obj, np.timedelta64) and not isna(obj):
# In particular non-nanosecond timedelta64 needs to be cast to
# nanoseconds, or else we get undesired behavior like
# np.timedelta64(3, 'D') / 2 == np.timedelta64(1, 'D')
# The isna check is to avoid casting timedelta64("NaT"), which would
# return NaT and incorrectly be treated as a datetime-NaT.
return Timedelta(obj)
elif isinstance(obj, np.ndarray) and is_timedelta64_dtype(obj):
# GH#22390 Unfortunately we need to special-case right-hand
# timedelta64 dtypes because numpy casts integer dtypes to
# timedelta64 when operating with timedelta64
return pd.TimedeltaIndex(obj)
return obj
# -----------------------------------------------------------------------------
def make_invalid_op(name):
"""
Return a binary method that always raises a TypeError.
Parameters
----------
name : str
Returns
-------
invalid_op : function
"""
def invalid_op(self, other=None):
raise TypeError(
"cannot perform {name} with this index type: "
"{typ}".format(name=name, typ=type(self).__name__)
)
invalid_op.__name__ = name
return invalid_op
def _gen_eval_kwargs(name):
"""
Find the keyword arguments to pass to numexpr for the given operation.
Parameters
----------
name : str
Returns
-------
eval_kwargs : dict
Examples
--------
>>> _gen_eval_kwargs("__add__")
{}
>>> _gen_eval_kwargs("rtruediv")
{'reversed': True, 'truediv': True}
"""
kwargs = {}
# Series appear to only pass __add__, __radd__, ...
# but DataFrame gets both these dunder names _and_ non-dunder names
# add, radd, ...
name = name.replace("__", "")
if name.startswith("r"):
if name not in ["radd", "rand", "ror", "rxor"]:
# Exclude commutative operations
kwargs["reversed"] = True
if name in ["truediv", "rtruediv"]:
kwargs["truediv"] = True
if name in ["ne"]:
kwargs["masker"] = True
return kwargs
def _get_frame_op_default_axis(name):
"""
Only DataFrame cares about default_axis, specifically:
special methods have default_axis=None and flex methods
have default_axis='columns'.
Parameters
----------
name : str
Returns
-------
default_axis: str or None
"""
if name.replace("__r", "__") in ["__and__", "__or__", "__xor__"]:
# bool methods
return "columns"
elif name.startswith("__"):
# __add__, __mul__, ...
return None
else:
# add, mul, ...
return "columns"
def _get_opstr(op, cls):
"""
Find the operation string, if any, to pass to numexpr for this
operation.
Parameters
----------
op : binary operator
cls : class
Returns
-------
op_str : string or None
"""
# numexpr is available for non-sparse classes
subtyp = getattr(cls, "_subtyp", "")
use_numexpr = "sparse" not in subtyp
if not use_numexpr:
# if we're not using numexpr, then don't pass a str_rep
return None
return {
operator.add: "+",
radd: "+",
operator.mul: "*",
rmul: "*",
operator.sub: "-",
rsub: "-",
operator.truediv: "/",
rtruediv: "/",
operator.floordiv: "//",
rfloordiv: "//",
operator.mod: None, # TODO: Why None for mod but '%' for rmod?
rmod: "%",
operator.pow: "**",
rpow: "**",
operator.eq: "==",
operator.ne: "!=",
operator.le: "<=",
operator.lt: "<",
operator.ge: ">=",
operator.gt: ">",
operator.and_: "&",
rand_: "&",
operator.or_: "|",
ror_: "|",
operator.xor: "^",
rxor: "^",
divmod: None,
rdivmod: None,
}[op]
def _get_op_name(op, special):
"""
Find the name to attach to this method according to conventions
for special and non-special methods.
Parameters
----------
op : binary operator
special : bool
Returns
-------
op_name : str
"""
opname = op.__name__.strip("_")
if special:
opname = "__{opname}__".format(opname=opname)
return opname
# -----------------------------------------------------------------------------
# Masking NA values and fallbacks for operations numpy does not support
def fill_binop(left, right, fill_value):
"""
If a non-None fill_value is given, replace null entries in left and right
with this value, but only in positions where _one_ of left/right is null,
not both.
Parameters
----------
left : array-like
right : array-like
fill_value : object
Returns
-------
left : array-like
right : array-like
Notes
-----
Makes copies if fill_value is not None
"""
# TODO: can we make a no-copy implementation?
if fill_value is not None:
left_mask = isna(left)
right_mask = isna(right)
left = left.copy()
right = right.copy()
# one but not both
mask = left_mask ^ right_mask
left[left_mask & mask] = fill_value
right[right_mask & mask] = fill_value
return left, right
def mask_cmp_op(x, y, op):
"""
Apply the function `op` to only non-null points in x and y.
Parameters
----------
x : array-like
y : array-like
op : binary operation
Returns
-------
result : ndarray[bool]
"""
xrav = x.ravel()
result = np.empty(x.size, dtype=bool)
if isinstance(y, (np.ndarray, ABCSeries)):
yrav = y.ravel()
mask = notna(xrav) & notna(yrav)
result[mask] = op(np.array(list(xrav[mask])), np.array(list(yrav[mask])))
else:
mask = notna(xrav)
result[mask] = op(np.array(list(xrav[mask])), y)
if op == operator.ne: # pragma: no cover
np.putmask(result, ~mask, True)
else:
np.putmask(result, ~mask, False)
result = result.reshape(x.shape)
return result
def masked_arith_op(x, y, op):
"""
If the given arithmetic operation fails, attempt it again on
only the non-null elements of the input array(s).
Parameters
----------
x : np.ndarray
y : np.ndarray, Series, Index
op : binary operator
"""
# For Series `x` is 1D so ravel() is a no-op; calling it anyway makes
# the logic valid for both Series and DataFrame ops.
xrav = x.ravel()
assert isinstance(x, (np.ndarray, ABCSeries)), type(x)
if isinstance(y, (np.ndarray, ABCSeries, ABCIndexClass)):
dtype = find_common_type([x.dtype, y.dtype])
result = np.empty(x.size, dtype=dtype)
# PeriodIndex.ravel() returns int64 dtype, so we have
# to work around that case. See GH#19956
yrav = y if is_period_dtype(y) else y.ravel()
mask = notna(xrav) & notna(yrav)
if yrav.shape != mask.shape:
# FIXME: GH#5284, GH#5035, GH#19448
# Without specifically raising here we get mismatched
# errors in Py3 (TypeError) vs Py2 (ValueError)
# Note: Only = an issue in DataFrame case
raise ValueError("Cannot broadcast operands together.")
if mask.any():
with np.errstate(all="ignore"):
result[mask] = op(xrav[mask], com.values_from_object(yrav[mask]))
else:
assert is_scalar(y), type(y)
assert isinstance(x, np.ndarray), type(x)
# mask is only meaningful for x
result = np.empty(x.size, dtype=x.dtype)
mask = notna(xrav)
# 1 ** np.nan is 1. So we have to unmask those.
if op == pow:
mask = np.where(x == 1, False, mask)
elif op == rpow:
mask = np.where(y == 1, False, mask)
if mask.any():
with np.errstate(all="ignore"):
result[mask] = op(xrav[mask], y)
result, changed = maybe_upcast_putmask(result, ~mask, np.nan)
result = result.reshape(x.shape) # 2D compat
return result
def invalid_comparison(left, right, op):
"""
If a comparison has mismatched types and is not necessarily meaningful,
follow python3 conventions by:
- returning all-False for equality
- returning all-True for inequality
- raising TypeError otherwise
Parameters
----------
left : array-like
right : scalar, array-like
op : operator.{eq, ne, lt, le, gt}
Raises
------
TypeError : on inequality comparisons
"""
if op is operator.eq:
res_values = np.zeros(left.shape, dtype=bool)
elif op is operator.ne:
res_values = np.ones(left.shape, dtype=bool)
else:
raise TypeError(
"Invalid comparison between dtype={dtype} and {typ}".format(
dtype=left.dtype, typ=type(right).__name__
)
)
return res_values
# -----------------------------------------------------------------------------
# Dispatch logic
def should_series_dispatch(left, right, op):
"""
Identify cases where a DataFrame operation should dispatch to its
Series counterpart.
Parameters
----------
left : DataFrame
right : DataFrame
op : binary operator
Returns
-------
override : bool
"""
if left._is_mixed_type or right._is_mixed_type:
return True
if not len(left.columns) or not len(right.columns):
# ensure obj.dtypes[0] exists for each obj
return False
ldtype = left.dtypes.iloc[0]
rdtype = right.dtypes.iloc[0]
if (is_timedelta64_dtype(ldtype) and is_integer_dtype(rdtype)) or (
is_timedelta64_dtype(rdtype) and is_integer_dtype(ldtype)
):
# numpy integer dtypes as timedelta64 dtypes in this scenario
return True
if is_datetime64_dtype(ldtype) and is_object_dtype(rdtype):
# in particular case where right is an array of DateOffsets
return True
return False
def dispatch_to_series(left, right, func, str_rep=None, axis=None):
"""
Evaluate the frame operation func(left, right) by evaluating
column-by-column, dispatching to the Series implementation.
Parameters
----------
left : DataFrame
right : scalar or DataFrame
func : arithmetic or comparison operator
str_rep : str or None, default None
axis : {None, 0, 1, "index", "columns"}
Returns
-------
DataFrame
"""
# Note: we use iloc to access columns for compat with cases
# with non-unique columns.
import pandas.core.computation.expressions as expressions
right = lib.item_from_zerodim(right)
if lib.is_scalar(right) or np.ndim(right) == 0:
def column_op(a, b):
return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))}
elif isinstance(right, ABCDataFrame):
assert right._indexed_same(left)
def column_op(a, b):
return {i: func(a.iloc[:, i], b.iloc[:, i]) for i in range(len(a.columns))}
elif isinstance(right, ABCSeries) and axis == "columns":
# We only get here if called via left._combine_match_columns,
# in which case we specifically want to operate row-by-row
assert right.index.equals(left.columns)
def column_op(a, b):
return {i: func(a.iloc[:, i], b.iloc[i]) for i in range(len(a.columns))}
elif isinstance(right, ABCSeries):
assert right.index.equals(left.index) # Handle other cases later
def column_op(a, b):
return {i: func(a.iloc[:, i], b) for i in range(len(a.columns))}
else:
# Remaining cases have less-obvious dispatch rules
raise NotImplementedError(right)
new_data = expressions.evaluate(column_op, str_rep, left, right)
result = left._constructor(new_data, index=left.index, copy=False)
# Pin columns instead of passing to constructor for compat with
# non-unique columns case
result.columns = left.columns
return result
def dispatch_to_index_op(op, left, right, index_class):
"""
Wrap Series left in the given index_class to delegate the operation op
to the index implementation. DatetimeIndex and TimedeltaIndex perform
type checking, timezone handling, overflow checks, etc.
Parameters
----------
op : binary operator (operator.add, operator.sub, ...)
left : Series
right : object
index_class : DatetimeIndex or TimedeltaIndex
Returns
-------
result : object, usually DatetimeIndex, TimedeltaIndex, or Series
"""
left_idx = index_class(left)
# avoid accidentally allowing integer add/sub. For datetime64[tz] dtypes,
# left_idx may inherit a freq from a cached DatetimeIndex.
# See discussion in GH#19147.
if getattr(left_idx, "freq", None) is not None:
left_idx = left_idx._shallow_copy(freq=None)
try:
result = op(left_idx, right)
except NullFrequencyError:
# DatetimeIndex and TimedeltaIndex with freq == None raise ValueError
# on add/sub of integers (or int-like). We re-raise as a TypeError.
raise TypeError(
"incompatible type for a datetime/timedelta "
"operation [{name}]".format(name=op.__name__)
)
return result
def dispatch_to_extension_op(op, left, right):
"""
Assume that left or right is a Series backed by an ExtensionArray,
apply the operator defined by op.
"""
# The op calls will raise TypeError if the op is not defined
# on the ExtensionArray
# unbox Series and Index to arrays
if isinstance(left, (ABCSeries, ABCIndexClass)):
new_left = left._values
else:
new_left = left
if isinstance(right, (ABCSeries, ABCIndexClass)):
new_right = right._values
else:
new_right = right
res_values = op(new_left, new_right)
res_name = get_op_result_name(left, right)
if op.__name__ in ["divmod", "rdivmod"]:
return _construct_divmod_result(left, res_values, left.index, res_name)
return _construct_result(left, res_values, left.index, res_name)
# -----------------------------------------------------------------------------
# Functions that add arithmetic methods to objects, given arithmetic factory
# methods
def _get_method_wrappers(cls):
"""
Find the appropriate operation-wrappers to use when defining flex/special
arithmetic, boolean, and comparison operations with the given class.
Parameters
----------
cls : class
Returns
-------
arith_flex : function or None
comp_flex : function or None
arith_special : function
comp_special : function
bool_special : function
Notes
-----
None is only returned for SparseArray
"""
if issubclass(cls, ABCSparseSeries):
# Be sure to catch this before ABCSeries and ABCSparseArray,
# as they will both come see SparseSeries as a subclass
arith_flex = _flex_method_SERIES
comp_flex = _flex_method_SERIES
arith_special = _arith_method_SPARSE_SERIES
comp_special = _arith_method_SPARSE_SERIES
bool_special = _bool_method_SERIES
# TODO: I don't think the functions defined by bool_method are tested
elif issubclass(cls, ABCSeries):
# Just Series; SparseSeries is caught above
arith_flex = _flex_method_SERIES
comp_flex = _flex_method_SERIES
arith_special = _arith_method_SERIES
comp_special = _comp_method_SERIES
bool_special = _bool_method_SERIES
elif issubclass(cls, ABCDataFrame):
# Same for DataFrame and SparseDataFrame
arith_flex = _arith_method_FRAME
comp_flex = _flex_comp_method_FRAME
arith_special = _arith_method_FRAME
comp_special = _comp_method_FRAME
bool_special = _arith_method_FRAME
return arith_flex, comp_flex, arith_special, comp_special, bool_special
def _create_methods(cls, arith_method, comp_method, bool_method, special):
# creates actual methods based upon arithmetic, comp and bool method
# constructors.
have_divmod = issubclass(cls, ABCSeries)
# divmod is available for Series and SparseSeries
# yapf: disable
new_methods = dict(
add=arith_method(cls, operator.add, special),
radd=arith_method(cls, radd, special),
sub=arith_method(cls, operator.sub, special),
mul=arith_method(cls, operator.mul, special),
truediv=arith_method(cls, operator.truediv, special),
floordiv=arith_method(cls, operator.floordiv, special),
# Causes a floating point exception in the tests when numexpr enabled,
# so for now no speedup
mod=arith_method(cls, operator.mod, special),
pow=arith_method(cls, operator.pow, special),
# not entirely sure why this is necessary, but previously was included
# so it's here to maintain compatibility
rmul=arith_method(cls, rmul, special),
rsub=arith_method(cls, rsub, special),
rtruediv=arith_method(cls, rtruediv, special),
rfloordiv=arith_method(cls, rfloordiv, special),
rpow=arith_method(cls, rpow, special),
rmod=arith_method(cls, rmod, special))
# yapf: enable
new_methods["div"] = new_methods["truediv"]
new_methods["rdiv"] = new_methods["rtruediv"]
if have_divmod:
# divmod doesn't have an op that is supported by numexpr
new_methods["divmod"] = arith_method(cls, divmod, special)
new_methods["rdivmod"] = arith_method(cls, rdivmod, special)
new_methods.update(
dict(
eq=comp_method(cls, operator.eq, special),
ne=comp_method(cls, operator.ne, special),
lt=comp_method(cls, operator.lt, special),
gt=comp_method(cls, operator.gt, special),
le=comp_method(cls, operator.le, special),
ge=comp_method(cls, operator.ge, special),
)
)
if bool_method:
new_methods.update(
dict(
and_=bool_method(cls, operator.and_, special),
or_=bool_method(cls, operator.or_, special),
# For some reason ``^`` wasn't used in original.
xor=bool_method(cls, operator.xor, special),
rand_=bool_method(cls, rand_, special),
ror_=bool_method(cls, ror_, special),
rxor=bool_method(cls, rxor, special),
)
)
if special:
dunderize = lambda x: "__{name}__".format(name=x.strip("_"))
else:
dunderize = lambda x: x
new_methods = {dunderize(k): v for k, v in new_methods.items()}
return new_methods
def add_methods(cls, new_methods):
for name, method in new_methods.items():
# For most methods, if we find that the class already has a method
# of the same name, it is OK to over-write it. The exception is
# inplace methods (__iadd__, __isub__, ...) for SparseArray, which
# retain the np.ndarray versions.
force = not (issubclass(cls, ABCSparseArray) and name.startswith("__i"))
if force or name not in cls.__dict__:
setattr(cls, name, method)
# ----------------------------------------------------------------------
# Arithmetic
def add_special_arithmetic_methods(cls):
"""
Adds the full suite of special arithmetic methods (``__add__``,
``__sub__``, etc.) to the class.
Parameters
----------
cls : class
special methods will be defined and pinned to this class
"""
_, _, arith_method, comp_method, bool_method = _get_method_wrappers(cls)
new_methods = _create_methods(
cls, arith_method, comp_method, bool_method, special=True
)
# inplace operators (I feel like these should get passed an `inplace=True`
# or just be removed
def _wrap_inplace_method(method):
"""
return an inplace wrapper for this method
"""
def f(self, other):
result = method(self, other)
# this makes sure that we are aligned like the input
# we are updating inplace so we want to ignore is_copy
self._update_inplace(
result.reindex_like(self, copy=False)._data, verify_is_copy=False
)
return self
f.__name__ = "__i{name}__".format(name=method.__name__.strip("__"))
return f
new_methods.update(
dict(
__iadd__=_wrap_inplace_method(new_methods["__add__"]),
__isub__=_wrap_inplace_method(new_methods["__sub__"]),
__imul__=_wrap_inplace_method(new_methods["__mul__"]),
__itruediv__=_wrap_inplace_method(new_methods["__truediv__"]),
__ifloordiv__=_wrap_inplace_method(new_methods["__floordiv__"]),
__imod__=_wrap_inplace_method(new_methods["__mod__"]),
__ipow__=_wrap_inplace_method(new_methods["__pow__"]),
)
)
new_methods.update(
dict(
__iand__=_wrap_inplace_method(new_methods["__and__"]),
__ior__=_wrap_inplace_method(new_methods["__or__"]),
__ixor__=_wrap_inplace_method(new_methods["__xor__"]),
)
)
add_methods(cls, new_methods=new_methods)
def add_flex_arithmetic_methods(cls):
"""
Adds the full suite of flex arithmetic methods (``pow``, ``mul``, ``add``)
to the class.
Parameters
----------
cls : class
flex methods will be defined and pinned to this class
"""
flex_arith_method, flex_comp_method, _, _, _ = _get_method_wrappers(cls)
new_methods = _create_methods(
cls, flex_arith_method, flex_comp_method, bool_method=None, special=False
)
new_methods.update(
dict(
multiply=new_methods["mul"],
subtract=new_methods["sub"],
divide=new_methods["div"],
)
)
# opt out of bool flex methods for now
assert not any(kname in new_methods for kname in ("ror_", "rxor", "rand_"))
add_methods(cls, new_methods=new_methods)
# -----------------------------------------------------------------------------
# Series
def _align_method_SERIES(left, right, align_asobject=False):
""" align lhs and rhs Series """
# ToDo: Different from _align_method_FRAME, list, tuple and ndarray
# are not coerced here
# because Series has inconsistencies described in #13637
if isinstance(right, ABCSeries):
# avoid repeated alignment
if not left.index.equals(right.index):
if align_asobject:
# to keep original value's dtype for bool ops
left = left.astype(object)
right = right.astype(object)
left, right = left.align(right, copy=False)
return left, right
def _construct_result(left, result, index, name, dtype=None):
"""
If the raw op result has a non-None name (e.g. it is an Index object) and
the name argument is None, then passing name to the constructor will
not be enough; we still need to override the name attribute.
"""
out = left._constructor(result, index=index, dtype=dtype)
out = out.__finalize__(left)
out.name = name
return out
def _construct_divmod_result(left, result, index, name, dtype=None):
"""divmod returns a tuple of like indexed series instead of a single series.
"""
return (
_construct_result(left, result[0], index=index, name=name, dtype=dtype),
_construct_result(left, result[1], index=index, name=name, dtype=dtype),
)
def _arith_method_SERIES(cls, op, special):
"""
Wrapper function for Series arithmetic operations, to avoid
code duplication.
"""
str_rep = _get_opstr(op, cls)
op_name = _get_op_name(op, special)
eval_kwargs = _gen_eval_kwargs(op_name)
construct_result = (
_construct_divmod_result if op in [divmod, rdivmod] else _construct_result
)
def na_op(x, y):
"""
Return the result of evaluating op on the passed in values.
If native types are not compatible, try coersion to object dtype.
Parameters
----------
x : array-like
y : array-like or scalar
Returns
-------
array-like
Raises
------
TypeError : invalid operation
"""
import pandas.core.computation.expressions as expressions
try:
result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs)
except TypeError:
result = masked_arith_op(x, y, op)
return missing.dispatch_fill_zeros(op, x, y, result)
def wrapper(left, right):
if isinstance(right, ABCDataFrame):
return NotImplemented
left, right = _align_method_SERIES(left, right)
res_name = get_op_result_name(left, right)
right = maybe_upcast_for_op(right)
if is_categorical_dtype(left):
raise TypeError(
"{typ} cannot perform the operation "
"{op}".format(typ=type(left).__name__, op=str_rep)
)
elif is_datetime64_dtype(left) or is_datetime64tz_dtype(left):
# Give dispatch_to_index_op a chance for tests like
# test_dt64_series_add_intlike, which the index dispatching handles
# specifically.
result = dispatch_to_index_op(op, left, right, pd.DatetimeIndex)
return construct_result(
left, result, index=left.index, name=res_name, dtype=result.dtype
)
elif is_extension_array_dtype(left) or (
is_extension_array_dtype(right) and not is_scalar(right)
):
# GH#22378 disallow scalar to exclude e.g. "category", "Int64"
return dispatch_to_extension_op(op, left, right)
elif is_timedelta64_dtype(left):
result = dispatch_to_index_op(op, left, right, pd.TimedeltaIndex)
return construct_result(left, result, index=left.index, name=res_name)
elif is_timedelta64_dtype(right):
# We should only get here with non-scalar or timedelta64('NaT')
# values for right
# Note: we cannot use dispatch_to_index_op because
# that may incorrectly raise TypeError when we
# should get NullFrequencyError
orig_right = right
if is_scalar(right):
# broadcast and wrap in a TimedeltaIndex
assert np.isnat(right)
right = np.broadcast_to(right, left.shape)
right = pd.TimedeltaIndex(right)
assert isinstance(right, (pd.TimedeltaIndex, ABCTimedeltaArray, ABCSeries))
try:
result = op(left._values, right)
except NullFrequencyError:
if orig_right is not right:
# i.e. scalar timedelta64('NaT')
# We get a NullFrequencyError because we broadcast to
# TimedeltaIndex, but this should be TypeError.
raise TypeError(
"incompatible type for a datetime/timedelta "
"operation [{name}]".format(name=op.__name__)
)
raise
# We do not pass dtype to ensure that the Series constructor
# does inference in the case where `result` has object-dtype.
return construct_result(left, result, index=left.index, name=res_name)
elif isinstance(right, (ABCDatetimeArray, pd.DatetimeIndex)):
result = op(left._values, right)
return construct_result(left, result, index=left.index, name=res_name)
lvalues = left.values
rvalues = right
if isinstance(rvalues, (ABCSeries, ABCIndexClass)):
rvalues = rvalues._values
with np.errstate(all="ignore"):
result = na_op(lvalues, rvalues)
return construct_result(
left, result, index=left.index, name=res_name, dtype=None
)
wrapper.__name__ = op_name
return wrapper
def _comp_method_OBJECT_ARRAY(op, x, y):
if isinstance(y, list):
y = construct_1d_object_array_from_listlike(y)
if isinstance(y, (np.ndarray, ABCSeries, ABCIndex)):
if not is_object_dtype(y.dtype):
y = y.astype(np.object_)
if isinstance(y, (ABCSeries, ABCIndex)):
y = y.values
result = libops.vec_compare(x, y, op)
else:
result = libops.scalar_compare(x, y, op)
return result
def _comp_method_SERIES(cls, op, special):
"""
Wrapper function for Series arithmetic operations, to avoid
code duplication.
"""
op_name = _get_op_name(op, special)
masker = _gen_eval_kwargs(op_name).get("masker", False)
def na_op(x, y):
# TODO:
# should have guarantess on what x, y can be type-wise
# Extension Dtypes are not called here
# Checking that cases that were once handled here are no longer
# reachable.
assert not (is_categorical_dtype(y) and not is_scalar(y))
if is_object_dtype(x.dtype):
result = _comp_method_OBJECT_ARRAY(op, x, y)
elif is_datetimelike_v_numeric(x, y):
return invalid_comparison(x, y, op)
else:
# we want to compare like types
# we only want to convert to integer like if
# we are not NotImplemented, otherwise
# we would allow datetime64 (but viewed as i8) against
# integer comparisons
# we have a datetime/timedelta and may need to convert
assert not needs_i8_conversion(x)
mask = None
if not is_scalar(y) and needs_i8_conversion(y):
mask = isna(x) | isna(y)
y = y.view("i8")
x = x.view("i8")
method = getattr(x, op_name, None)
if method is not None:
with np.errstate(all="ignore"):
result = method(y)
if result is NotImplemented:
return invalid_comparison(x, y, op)
else:
result = op(x, y)
if mask is not None and mask.any():
result[mask] = masker
return result
def wrapper(self, other, axis=None):
# Validate the axis parameter
if axis is not None:
self._get_axis_number(axis)
res_name = get_op_result_name(self, other)
if isinstance(other, list):
# TODO: same for tuples?
other = np.asarray(other)
if isinstance(other, ABCDataFrame): # pragma: no cover
# Defer to DataFrame implementation; fail early
return NotImplemented
elif isinstance(other, ABCSeries) and not self._indexed_same(other):
raise ValueError("Can only compare identically-labeled " "Series objects")
elif is_categorical_dtype(self):
# Dispatch to Categorical implementation; pd.CategoricalIndex
# behavior is non-canonical GH#19513
res_values = dispatch_to_index_op(op, self, other, pd.Categorical)
return self._constructor(res_values, index=self.index, name=res_name)
elif is_datetime64_dtype(self) or is_datetime64tz_dtype(self):
# Dispatch to DatetimeIndex to ensure identical
# Series/Index behavior
if isinstance(other, datetime.date) and not isinstance(
other, datetime.datetime
):
# https://github.com/pandas-dev/pandas/issues/21152
# Compatibility for difference between Series comparison w/
# datetime and date
msg = (
"Comparing Series of datetimes with 'datetime.date'. "
"Currently, the 'datetime.date' is coerced to a "
"datetime. In the future pandas will not coerce, "
"and {future}. "
"To retain the current behavior, "
"convert the 'datetime.date' to a datetime with "
"'pd.Timestamp'."
)
if op in {operator.lt, operator.le, operator.gt, operator.ge}:
future = "a TypeError will be raised"
else:
future = (
"'the values will not compare equal to the " "'datetime.date'"
)
msg = "\n".join(textwrap.wrap(msg.format(future=future)))
warnings.warn(msg, FutureWarning, stacklevel=2)
other = Timestamp(other)
res_values = dispatch_to_index_op(op, self, other, pd.DatetimeIndex)
return self._constructor(res_values, index=self.index, name=res_name)
elif is_timedelta64_dtype(self):
res_values = dispatch_to_index_op(op, self, other, pd.TimedeltaIndex)
return self._constructor(res_values, index=self.index, name=res_name)
elif is_extension_array_dtype(self) or (
is_extension_array_dtype(other) and not is_scalar(other)
):
# Note: the `not is_scalar(other)` condition rules out
# e.g. other == "category"
return dispatch_to_extension_op(op, self, other)
elif isinstance(other, ABCSeries):
# By this point we have checked that self._indexed_same(other)
res_values = na_op(self.values, other.values)
# rename is needed in case res_name is None and res_values.name
# is not.
return self._constructor(
res_values, index=self.index, name=res_name
).rename(res_name)
elif isinstance(other, (np.ndarray, ABCIndexClass)):
# do not check length of zerodim array
# as it will broadcast
if other.ndim != 0 and len(self) != len(other):
raise ValueError("Lengths must match to compare")
res_values = na_op(self.values, np.asarray(other))
result = self._constructor(res_values, index=self.index)
# rename is needed in case res_name is None and self.name
# is not.
return result.__finalize__(self).rename(res_name)
elif is_scalar(other) and isna(other):
# numpy does not like comparisons vs None
if op is operator.ne:
res_values = np.ones(len(self), dtype=bool)
else:
res_values = np.zeros(len(self), dtype=bool)
return self._constructor(
res_values, index=self.index, name=res_name, dtype="bool"
)
else:
values = self.to_numpy()
with np.errstate(all="ignore"):
res = na_op(values, other)
if is_scalar(res):
raise TypeError(
"Could not compare {typ} type with Series".format(typ=type(other))
)
# always return a full value series here
res_values = com.values_from_object(res)
return self._constructor(
res_values, index=self.index, name=res_name, dtype="bool"
)
wrapper.__name__ = op_name
return wrapper
def _bool_method_SERIES(cls, op, special):
"""
Wrapper function for Series arithmetic operations, to avoid
code duplication.
"""
op_name = _get_op_name(op, special)
def na_op(x, y):
try:
result = op(x, y)
except TypeError:
assert not isinstance(y, (list, ABCSeries, ABCIndexClass))
if isinstance(y, np.ndarray):
# bool-bool dtype operations should be OK, should not get here
assert not (is_bool_dtype(x) and is_bool_dtype(y))
x = ensure_object(x)
y = ensure_object(y)
result = libops.vec_binop(x, y, op)
else:
# let null fall thru
assert lib.is_scalar(y)
if not isna(y):
y = bool(y)
try:
result = libops.scalar_binop(x, y, op)
except (
TypeError,
ValueError,
AttributeError,
OverflowError,
NotImplementedError,
):
raise TypeError(
"cannot compare a dtyped [{dtype}] array "
"with a scalar of type [{typ}]".format(
dtype=x.dtype, typ=type(y).__name__
)
)
return result
fill_int = lambda x: x.fillna(0)
fill_bool = lambda x: x.fillna(False).astype(bool)
def wrapper(self, other):
is_self_int_dtype = is_integer_dtype(self.dtype)
self, other = _align_method_SERIES(self, other, align_asobject=True)
res_name = get_op_result_name(self, other)
if isinstance(other, ABCDataFrame):
# Defer to DataFrame implementation; fail early
return NotImplemented
elif isinstance(other, (ABCSeries, ABCIndexClass)):
is_other_int_dtype = is_integer_dtype(other.dtype)
other = fill_int(other) if is_other_int_dtype else fill_bool(other)
ovalues = other.values
finalizer = lambda x: x
else:
# scalars, list, tuple, np.array
is_other_int_dtype = is_integer_dtype(np.asarray(other))
if is_list_like(other) and not isinstance(other, np.ndarray):
# TODO: Can we do this before the is_integer_dtype check?
# could the is_integer_dtype check be checking the wrong
# thing? e.g. other = [[0, 1], [2, 3], [4, 5]]?
other = construct_1d_object_array_from_listlike(other)
ovalues = other
finalizer = lambda x: x.__finalize__(self)
# For int vs int `^`, `|`, `&` are bitwise operators and return
# integer dtypes. Otherwise these are boolean ops
filler = fill_int if is_self_int_dtype and is_other_int_dtype else fill_bool
res_values = na_op(self.values, ovalues)
unfilled = self._constructor(res_values, index=self.index, name=res_name)
filled = filler(unfilled)
return finalizer(filled)
wrapper.__name__ = op_name
return wrapper
def _flex_method_SERIES(cls, op, special):
name = _get_op_name(op, special)
doc = _make_flex_doc(name, "series")
@Appender(doc)
def flex_wrapper(self, other, level=None, fill_value=None, axis=0):
# validate axis
if axis is not None:
self._get_axis_number(axis)
if isinstance(other, ABCSeries):
return self._binop(other, op, level=level, fill_value=fill_value)
elif isinstance(other, (np.ndarray, list, tuple)):
if len(other) != len(self):
raise ValueError("Lengths must be equal")
other = self._constructor(other, self.index)
return self._binop(other, op, level=level, fill_value=fill_value)
else:
if fill_value is not None:
self = self.fillna(fill_value)
return self._constructor(op(self, other), self.index).__finalize__(self)
flex_wrapper.__name__ = name
return flex_wrapper
# -----------------------------------------------------------------------------
# DataFrame
def _combine_series_frame(self, other, func, fill_value=None, axis=None, level=None):
"""
Apply binary operator `func` to self, other using alignment and fill
conventions determined by the fill_value, axis, and level kwargs.
Parameters
----------
self : DataFrame
other : Series
func : binary operator
fill_value : object, default None
axis : {0, 1, 'columns', 'index', None}, default None
level : int or None, default None
Returns
-------
result : DataFrame
"""
if fill_value is not None:
raise NotImplementedError(
"fill_value {fill} not supported.".format(fill=fill_value)
)
if axis is not None:
axis = self._get_axis_number(axis)
if axis == 0:
return self._combine_match_index(other, func, level=level)
else:
return self._combine_match_columns(other, func, level=level)
else:
if not len(other):
return self * np.nan
if not len(self):
# Ambiguous case, use _series so works with DataFrame
return self._constructor(
data=self._series, index=self.index, columns=self.columns
)
# default axis is columns
return self._combine_match_columns(other, func, level=level)
def _align_method_FRAME(left, right, axis):
""" convert rhs to meet lhs dims if input is list, tuple or np.ndarray """
def to_series(right):
msg = (
"Unable to coerce to Series, length must be {req_len}: " "given {given_len}"
)
if axis is not None and left._get_axis_name(axis) == "index":
if len(left.index) != len(right):
raise ValueError(
msg.format(req_len=len(left.index), given_len=len(right))
)
right = left._constructor_sliced(right, index=left.index)
else:
if len(left.columns) != len(right):
raise ValueError(
msg.format(req_len=len(left.columns), given_len=len(right))
)
right = left._constructor_sliced(right, index=left.columns)
return right
if isinstance(right, np.ndarray):
if right.ndim == 1:
right = to_series(right)
elif right.ndim == 2:
if right.shape == left.shape:
right = left._constructor(right, index=left.index, columns=left.columns)
elif right.shape[0] == left.shape[0] and right.shape[1] == 1:
# Broadcast across columns
right = np.broadcast_to(right, left.shape)
right = left._constructor(right, index=left.index, columns=left.columns)
elif right.shape[1] == left.shape[1] and right.shape[0] == 1:
# Broadcast along rows
right = to_series(right[0, :])
else:
raise ValueError(
"Unable to coerce to DataFrame, shape "
"must be {req_shape}: given {given_shape}".format(
req_shape=left.shape, given_shape=right.shape
)
)
elif right.ndim > 2:
raise ValueError(
"Unable to coerce to Series/DataFrame, dim "
"must be <= 2: {dim}".format(dim=right.shape)
)
elif is_list_like(right) and not isinstance(right, (ABCSeries, ABCDataFrame)):
# GH17901
right = to_series(right)
return right
def _arith_method_FRAME(cls, op, special):
str_rep = _get_opstr(op, cls)
op_name = _get_op_name(op, special)
eval_kwargs = _gen_eval_kwargs(op_name)
default_axis = _get_frame_op_default_axis(op_name)
def na_op(x, y):
import pandas.core.computation.expressions as expressions
try:
result = expressions.evaluate(op, str_rep, x, y, **eval_kwargs)
except TypeError:
result = masked_arith_op(x, y, op)
return missing.dispatch_fill_zeros(op, x, y, result)
if op_name in _op_descriptions:
# i.e. include "add" but not "__add__"
doc = _make_flex_doc(op_name, "dataframe")
else:
doc = _arith_doc_FRAME % op_name
@Appender(doc)
def f(self, other, axis=default_axis, level=None, fill_value=None):
other = _align_method_FRAME(self, other, axis)
if isinstance(other, ABCDataFrame):
# Another DataFrame
pass_op = op if should_series_dispatch(self, other, op) else na_op
return self._combine_frame(other, pass_op, fill_value, level)
elif isinstance(other, ABCSeries):
# For these values of `axis`, we end up dispatching to Series op,
# so do not want the masked op.
pass_op = op if axis in [0, "columns", None] else na_op
return _combine_series_frame(
self, other, pass_op, fill_value=fill_value, axis=axis, level=level
)
else:
if fill_value is not None:
self = self.fillna(fill_value)
assert np.ndim(other) == 0
return self._combine_const(other, op)
f.__name__ = op_name
return f
def _flex_comp_method_FRAME(cls, op, special):
str_rep = _get_opstr(op, cls)
op_name = _get_op_name(op, special)
default_axis = _get_frame_op_default_axis(op_name)
def na_op(x, y):
try:
with np.errstate(invalid="ignore"):
result = op(x, y)
except TypeError:
result = mask_cmp_op(x, y, op)
return result
doc = _flex_comp_doc_FRAME.format(
op_name=op_name, desc=_op_descriptions[op_name]["desc"]
)
@Appender(doc)
def f(self, other, axis=default_axis, level=None):
other = _align_method_FRAME(self, other, axis)
if isinstance(other, ABCDataFrame):
# Another DataFrame
if not self._indexed_same(other):
self, other = self.align(other, "outer", level=level, copy=False)
return dispatch_to_series(self, other, na_op, str_rep)
elif isinstance(other, ABCSeries):
return _combine_series_frame(
self, other, na_op, fill_value=None, axis=axis, level=level
)
else:
assert np.ndim(other) == 0, other
return self._combine_const(other, na_op)
f.__name__ = op_name
return f
def _comp_method_FRAME(cls, func, special):
str_rep = _get_opstr(func, cls)
op_name = _get_op_name(func, special)
@Appender("Wrapper for comparison method {name}".format(name=op_name))
def f(self, other):
other = _align_method_FRAME(self, other, axis=None)
if isinstance(other, ABCDataFrame):
# Another DataFrame
if not self._indexed_same(other):
raise ValueError(
"Can only compare identically-labeled " "DataFrame objects"
)
return dispatch_to_series(self, other, func, str_rep)
elif isinstance(other, ABCSeries):
return _combine_series_frame(
self, other, func, fill_value=None, axis=None, level=None
)
else:
# straight boolean comparisons we want to allow all columns
# (regardless of dtype to pass thru) See #4537 for discussion.
res = self._combine_const(other, func)
return res.fillna(True).astype(bool)
f.__name__ = op_name
return f
# -----------------------------------------------------------------------------
# Sparse
def _cast_sparse_series_op(left, right, opname):
"""
For SparseSeries operation, coerce to float64 if the result is expected
to have NaN or inf values
Parameters
----------
left : SparseArray
right : SparseArray
opname : str
Returns
-------
left : SparseArray
right : SparseArray
"""
from pandas.core.sparse.api import SparseDtype
opname = opname.strip("_")
# TODO: This should be moved to the array?
if is_integer_dtype(left) and is_integer_dtype(right):
# series coerces to float64 if result should have NaN/inf
if opname in ("floordiv", "mod") and (right.to_dense() == 0).any():
left = left.astype(SparseDtype(np.float64, left.fill_value))
right = right.astype(SparseDtype(np.float64, right.fill_value))
elif opname in ("rfloordiv", "rmod") and (left.to_dense() == 0).any():
left = left.astype(SparseDtype(np.float64, left.fill_value))
right = right.astype(SparseDtype(np.float64, right.fill_value))
return left, right
def _arith_method_SPARSE_SERIES(cls, op, special):
"""
Wrapper function for Series arithmetic operations, to avoid
code duplication.
"""
op_name = _get_op_name(op, special)
def wrapper(self, other):
if isinstance(other, ABCDataFrame):
return NotImplemented
elif isinstance(other, ABCSeries):
if not isinstance(other, ABCSparseSeries):
other = other.to_sparse(fill_value=self.fill_value)
return _sparse_series_op(self, other, op, op_name)
elif is_scalar(other):
with np.errstate(all="ignore"):
new_values = op(self.values, other)
return self._constructor(new_values, index=self.index, name=self.name)
else: # pragma: no cover
raise TypeError(
"operation with {other} not supported".format(other=type(other))
)
wrapper.__name__ = op_name
return wrapper
def _sparse_series_op(left, right, op, name):
left, right = left.align(right, join="outer", copy=False)
new_index = left.index
new_name = get_op_result_name(left, right)
from pandas.core.arrays.sparse import _sparse_array_op
lvalues, rvalues = _cast_sparse_series_op(left.values, right.values, name)
result = _sparse_array_op(lvalues, rvalues, op, name)
return left._constructor(result, index=new_index, name=new_name)
def maybe_dispatch_ufunc_to_dunder_op(
self: ArrayLike, ufunc: Callable, method: str, *inputs: ArrayLike, **kwargs: Any
):
"""
Dispatch a ufunc to the equivalent dunder method.
Parameters
----------
self : ArrayLike
The array whose dunder method we dispatch to
ufunc : Callable
A NumPy ufunc
method : {'reduce', 'accumulate', 'reduceat', 'outer', 'at', '__call__'}
inputs : ArrayLike
The input arrays.
kwargs : Any
The additional keyword arguments, e.g. ``out``.
Returns
-------
result : Any
The result of applying the ufunc
"""
# special has the ufuncs we dispatch to the dunder op on
special = {
"add",
"sub",
"mul",
"pow",
"mod",
"floordiv",
"truediv",
"divmod",
"eq",
"ne",
"lt",
"gt",
"le",
"ge",
"remainder",
"matmul",
}
aliases = {
"subtract": "sub",
"multiply": "mul",
"floor_divide": "floordiv",
"true_divide": "truediv",
"power": "pow",
"remainder": "mod",
"divide": "div",
"equal": "eq",
"not_equal": "ne",
"less": "lt",
"less_equal": "le",
"greater": "gt",
"greater_equal": "ge",
}
# For op(., Array) -> Array.__r{op}__
flipped = {
"lt": "__gt__",
"le": "__ge__",
"gt": "__lt__",
"ge": "__le__",
"eq": "__eq__",
"ne": "__ne__",
}
op_name = ufunc.__name__
op_name = aliases.get(op_name, op_name)
def not_implemented(*args, **kwargs):
return NotImplemented
if method == "__call__" and op_name in special and kwargs.get("out") is None:
if isinstance(inputs[0], type(self)):
name = "__{}__".format(op_name)
return getattr(self, name, not_implemented)(inputs[1])
else:
name = flipped.get(op_name, "__r{}__".format(op_name))
return getattr(self, name, not_implemented)(inputs[0])
else:
return NotImplemented
| apache-2.0 | 6,046,639,829,361,250,000 | 30.796562 | 88 | 0.572749 | false |
debalance/hp | hp/core/constants.py | 1 | 1561 | # -*- coding: utf-8 -*-
#
# This file is part of the jabber.at homepage (https://github.com/jabber-at/hp).
#
# This project is free software: you can redistribute it and/or modify it under the terms of the
# GNU General Public License as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This project is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with django-xmpp-account.
# If not, see <http://www.gnu.org/licenses/>.
from django.utils.translation import ugettext_lazy as _
REGISTRATION_WEBSITE = 0
REGISTRATION_INBAND = 1
REGISTRATION_MANUAL = 2
REGISTRATION_UNKNOWN = 9
REGISTRATION_CHOICES = (
(REGISTRATION_WEBSITE, _('Via Website')),
(REGISTRATION_INBAND, _('In-Band Registration')),
(REGISTRATION_MANUAL, _('Manually')),
(REGISTRATION_UNKNOWN, _('Unknown')),
)
TARGET_URL = 0
TARGET_NAMED_URL = 1
TARGET_MODEL = 2
TARGET_CHOICES = {
TARGET_URL: _('URL'),
TARGET_NAMED_URL: _('URL Name'),
TARGET_MODEL: _('Model'),
}
# Logged user activities
ACTIVITY_REGISTER = 0
ACTIVITY_RESET_PASSWORD = 1
ACTIVITY_SET_PASSWORD = 2
ACTIVITY_SET_EMAIL = 3
ACTIVITY_FAILED_LOGIN = 4
ACTIVITY_CONTACT = 5 # used for ratelimiting
ACTIVITY_RESEND_CONFIRMATION = 6 # When the user wants to resend the confirmation
| gpl-3.0 | -4,503,386,072,711,604,000 | 32.934783 | 99 | 0.728379 | false |
conan-io/conan | conans/test/functional/generators/visual_studio_multi_test.py | 1 | 3731 | # coding=utf-8
import os
import platform
import unittest
import pytest
from parameterized import parameterized
from conans import MSBuild, tools
from conans.client.runner import ConanRunner
from conans.test.utils.mocks import MockSettings, MockConanfile, TestBufferConanOutput
from conans.test.utils.tools import TestClient
from conans.test.assets.visual_project_files import get_vs_project_files
main_cpp = r"""#include <hello.h>
int main(){
hello();
}
"""
conanfile_txt = r"""[requires]
Hello1/0.1@lasote/testing
[generators]
{generator}
"""
hello_conanfile_py = r"""from conans import ConanFile
class HelloConan(ConanFile):
name = "Hello1"
version = "0.1"
exports = '*'
def package(self):
self.copy("*.h", dst="include")
def package_info(self):
self.cpp_info.debug.defines = ["CONAN_DEBUG"]
self.cpp_info.release.defines = ["CONAN_RELEASE"]
"""
hello_h = r"""#include <iostream>
#ifdef CONAN_DEBUG
void hello(){
std::cout << "Hello Debug!!!" << std::endl;
}
#endif
#ifdef CONAN_RELEASE
void hello(){
std::cout << "Hello Release!!!" << std::endl;
}
#endif
"""
@pytest.mark.slow
@pytest.mark.tool_visual_studio
@pytest.mark.skipif(platform.system() != "Windows", reason="Requires MSBuild")
class VisualStudioMultiTest(unittest.TestCase):
@parameterized.expand([("visual_studio", "conanbuildinfo.props"),
("visual_studio_multi", "conanbuildinfo_multi.props")])
def test_build_vs_project(self, generator, props):
client = TestClient()
client.save({"conanfile.py": hello_conanfile_py,
"hello.h": hello_h})
client.run("create . lasote/testing")
files = get_vs_project_files()
files["MyProject/main.cpp"] = main_cpp
files["conanfile.txt"] = conanfile_txt.format(generator=generator)
props = os.path.join(client.current_folder, props)
old = r'<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />'
new = old + '<Import Project="{props}" />'.format(props=props)
files["MyProject/MyProject.vcxproj"] = files["MyProject/MyProject.vcxproj"].replace(old, new)
client.save(files, clean_first=True)
for build_type in ["Debug", "Release"]:
arch = "x86"
runner = ConanRunner(print_commands_to_output=True,
generate_run_log_file=False,
log_run_to_output=True,
output=TestBufferConanOutput())
settings = MockSettings({"os": "Windows",
"build_type": build_type,
"arch": arch,
"compiler": "Visual Studio",
"compiler.version": "15",
"compiler.toolset": "v141"})
conanfile = MockConanfile(settings, runner=runner)
settings = " -s os=Windows " \
" -s build_type={build_type} " \
" -s arch={arch}" \
" -s compiler=\"Visual Studio\"" \
" -s compiler.toolset=v141" \
" -s compiler.version=15".format(build_type=build_type, arch=arch)
client.run("install . %s" % settings)
with tools.chdir(client.current_folder):
msbuild = MSBuild(conanfile)
msbuild.build(project_file="MyProject.sln", build_type=build_type, arch=arch)
output = TestBufferConanOutput()
client.run_command(r"%s\MyProject.exe" % build_type)
self.assertIn("Hello %s!!!" % build_type, client.out)
| mit | -7,941,849,674,955,083,000 | 33.869159 | 101 | 0.576253 | false |
joychugh/pgoapi | pgoapi/pgoapi.py | 1 | 31870 | """
pgoapi - Pokemon Go API
Copyright (c) 2016 tjado <https://github.com/tejado>
Modifications Copyright (c) 2016 j-e-k <https://github.com/j-e-k>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
Author: tjado <https://github.com/tejado>
Modifications by: j-e-k <https://github.com/j-e-k>
"""
from __future__ import absolute_import
import json
import logging
import os.path
import pickle
import random
from collections import defaultdict
from itertools import chain, imap
from time import sleep
from expiringdict import ExpiringDict
from pgoapi.auth_google import AuthGoogle
from pgoapi.auth_ptc import AuthPtc
from pgoapi.exceptions import AuthException, ServerBusyOrOfflineException
from pgoapi.inventory import Inventory as Player_Inventory
from pgoapi.location import *
from pgoapi.poke_utils import *
from pgoapi.protos.POGOProtos import Enums_pb2
from pgoapi.protos.POGOProtos import Inventory_pb2 as Inventory
from pgoapi.protos.POGOProtos.Networking.Requests_pb2 import RequestType
from pgoapi.rpc_api import RpcApi
from .utilities import f2i
logger = logging.getLogger(__name__)
class PGoApi:
API_ENTRY = 'https://pgorelease.nianticlabs.com/plfe/rpc'
def __init__(self, config, pokemon_names):
self.log = logging.getLogger(__name__)
self._auth_provider = None
self._api_endpoint = None
self.config = config
self._position_lat = 0 # int cooords
self._position_lng = 0
self._position_alt = 0
self._posf = (0, 0, 0) # this is floats
self._origPosF = (0, 0, 0) # this is original position in floats
self._req_method_list = []
self._heartbeat_number = 5
self._firstRun = True
self.pokemon_caught = 0
self.inventory = Player_Inventory([])
self.pokemon_names = pokemon_names
self.MIN_ITEMS = {}
for k, v in config.get("MIN_ITEMS", {}).items():
self.MIN_ITEMS[getattr(Inventory, k)] = v
self.POKEMON_EVOLUTION = {}
self.POKEMON_EVOLUTION_FAMILY = {}
for k, v in config.get("POKEMON_EVOLUTION", {}).items():
self.POKEMON_EVOLUTION[getattr(Enums_pb2, k)] = v
self.POKEMON_EVOLUTION_FAMILY[getattr(Enums_pb2, k)] = getattr(Enums_pb2, "FAMILY_" + k)
self.MIN_KEEP_IV = config.get("MIN_KEEP_IV", 0) # release anything under this if we don't have it already
self.KEEP_CP_OVER = config.get("KEEP_CP_OVER", 0) # release anything under this if we don't have it already
self.MIN_SIMILAR_POKEMON = config.get("MIN_SIMILAR_POKEMON", 1) # Keep atleast one of everything.
self.STAY_WITHIN_PROXIMITY = config.get("STAY_WITHIN_PROXIMITY", False) # Stay within proximity
self.visited_forts = ExpiringDict(max_len=120, max_age_seconds=config.get("SKIP_VISITED_FORT_DURATION", 600))
self.experimental = config.get("EXPERIMENTAL", False)
self.spin_all_forts = config.get("SPIN_ALL_FORTS", False)
self.keep_pokemon_ids = map(lambda x: getattr(Enums_pb2, x), config.get("KEEP_POKEMON_NAMES", []))
self.throw_pokemon_ids = map(lambda x: getattr(Enums_pb2, x), config.get("THROW_POKEMON_NAMES", []))
self.max_catch_attempts = config.get("MAX_CATCH_ATTEMPTS", 10)
self.game_master = parse_game_master()
def call(self):
if not self._req_method_list:
return False
if self._auth_provider is None or not self._auth_provider.is_login():
self.log.info('Not logged in')
return False
player_position = self.get_position()
request = RpcApi(self._auth_provider)
if self._api_endpoint:
api_endpoint = self._api_endpoint
else:
api_endpoint = self.API_ENTRY
self.log.debug('Execution of RPC')
response = None
try:
response = request.request(api_endpoint, self._req_method_list, player_position)
except ServerBusyOrOfflineException as e:
self.log.info('Server seems to be busy or offline - try again!')
# cleanup after call execution
self.log.debug('Cleanup of request!')
self._req_method_list = []
return response
def list_curr_methods(self):
for i in self._req_method_list:
print("{} ({})".format(RequestType.Name(i), i))
def set_logger(self, logger):
self._ = logger or logging.getLogger(__name__)
def get_position(self):
return (self._position_lat, self._position_lng, self._position_alt)
def set_position(self, lat, lng, alt):
self.log.debug('Set Position - Lat: %s Long: %s Alt: %s', lat, lng, alt)
self._posf = (lat, lng, alt)
if self._firstRun:
self._firstRun = False
self._origPosF = self._posf
self._position_lat = f2i(lat)
self._position_lng = f2i(lng)
self._position_alt = f2i(alt)
def __getattr__(self, func):
def function(**kwargs):
if not self._req_method_list:
self.log.debug('Create new request...')
name = func.upper()
if kwargs:
self._req_method_list.append({RequestType.Value(name): kwargs})
self.log.debug("Adding '%s' to RPC request including arguments", name)
self.log.debug("Arguments of '%s': \n\r%s", name, kwargs)
else:
self._req_method_list.append(RequestType.Value(name))
self.log.debug("Adding '%s' to RPC request", name)
return self
if func.upper() in RequestType.keys():
return function
else:
raise AttributeError
def update_player_inventory(self):
self.get_inventory()
res = self.call()
if 'GET_INVENTORY' in res['responses']:
self.inventory = Player_Inventory(res['responses']['GET_INVENTORY']['inventory_delta']['inventory_items'])
self.log.info("Player Items: %s", self.inventory)
def heartbeat(self):
# making a standard call to update position, etc
self.get_player()
if self._heartbeat_number % 10 == 0:
self.check_awarded_badges()
self.get_inventory()
# self.download_settings(hash="4a2e9bc330dae60e7b74fc85b98868ab4700802e")
res = self.call()
if not res or res.get("direction", -1) == 102:
self.log.error("There were a problem responses for api call: %s. Restarting!!!", res)
raise AuthException("Token probably expired?")
self.log.debug('Heartbeat dictionary: \n\r{}'.format(json.dumps(res, indent=2)))
if 'GET_PLAYER' in res['responses']:
player_data = res['responses'].get('GET_PLAYER', {}).get('player_data', {})
currencies = player_data.get('currencies', [])
currency_data = ",".join(
map(lambda x: "{0}: {1}".format(x.get('name', 'NA'), x.get('amount', 'NA')), currencies))
self.log.info("Username: %s, Currencies: %s, Pokemon Caught in this run: %s",
player_data.get('username', 'NA'), currency_data, self.pokemon_caught)
if 'GET_INVENTORY' in res['responses']:
with open("data_dumps/%s.json" % self.config['username'], "w") as f:
res['responses']['lat'] = self._posf[0]
res['responses']['lng'] = self._posf[1]
f.write(json.dumps(res['responses'], indent=2))
self.log.info(get_inventory_data(res, self.pokemon_names))
self.log.info("Player Items: %s", self.inventory)
self.inventory = Player_Inventory(res['responses']['GET_INVENTORY']['inventory_delta']['inventory_items'])
self.log.debug(self.cleanup_inventory(self.inventory.inventory_items))
self.attempt_evolve(self.inventory.inventory_items)
self.cleanup_pokemon(self.inventory.inventory_items)
self._heartbeat_number += 1
return res
def walk_to(self, loc, waypoints=[]): # location in floats of course...
steps = get_route(self._posf, loc, self.config.get("USE_GOOGLE", False), self.config.get("GMAPS_API_KEY", ""),
self.experimental and self.spin_all_forts, waypoints)
catch_attempt = 0
for step in steps:
for i, next_point in enumerate(get_increments(self._posf, step, self.config.get("STEP_SIZE", 200))):
self.set_position(*next_point)
self.heartbeat()
if self.experimental and self.spin_all_forts:
self.spin_nearest_fort()
self.log.info("On my way to the next fort! :)")
sleep(1)
while self.catch_near_pokemon() and catch_attempt <= self.max_catch_attempts:
sleep(1)
catch_attempt += 1
catch_attempt = 0
def walk_back_to_origin(self):
self.walk_to(self._origPosF)
def spin_nearest_fort(self):
map_cells = self.nearby_map_objects()['responses'].get('GET_MAP_OBJECTS', {}).get('map_cells', [])
forts = PGoApi.flatmap(lambda c: c.get('forts', []), map_cells)
destinations = filtered_forts(self._origPosF, self._posf, forts, self.STAY_WITHIN_PROXIMITY, self.visited_forts)
if destinations:
nearest_fort = destinations[0][0]
nearest_fort_dis = destinations[0][1]
if nearest_fort_dis <= 40.00:
self.fort_search_pgoapi(nearest_fort, player_postion=self.get_position(),
fort_distance=nearest_fort_dis)
if 'lure_info' in nearest_fort:
self.disk_encounter_pokemon(nearest_fort['lure_info'])
else:
self.log.info('No spinnable forts within proximity. Or server returned no map objects.')
def fort_search_pgoapi(self, fort, player_postion, fort_distance):
res = self.fort_search(fort_id=fort['id'], fort_latitude=fort['latitude'],
fort_longitude=fort['longitude'],
player_latitude=player_postion[0],
player_longitude=player_postion[1]).call()['responses']['FORT_SEARCH']
if res['result'] == 1:
self.log.debug("Fort spinned: %s", res)
self.log.info("Fort Spinned: http://maps.google.com/maps?q=%s,%s", fort['latitude'], fort['longitude'])
self.visited_forts[fort['id']] = fort
elif res['result'] == 4:
self.log.debug("For spinned but Your inventory is full : %s", res)
self.log.info("For spinned but Your inventory is full.")
self.visited_forts[fort['id']] = fort
elif res['result'] == 2:
self.log.debug("Could not spin fort - fort not in range %s", res)
self.log.info("Could not spin fort http://maps.google.com/maps?q=%s,%s, Not in Range %s", fort['latitude'],
fort['longitude'], fort_distance)
else:
self.log.debug("Could not spin fort %s", res)
self.log.info("Could not spin fort http://maps.google.com/maps?q=%s,%s, Error id: %s", fort['latitude'],
fort['longitude'], res['result'])
return False
return True
def spin_all_forts_visible(self):
res = self.nearby_map_objects()
map_cells = res['responses'].get('GET_MAP_OBJECTS', {}).get('map_cells', [])
forts = PGoApi.flatmap(lambda c: c.get('forts', []), map_cells)
destinations = filtered_forts(self._origPosF, self._posf, forts, self.STAY_WITHIN_PROXIMITY, self.visited_forts,
self.experimental)
if not destinations:
self.log.debug("No fort to walk to! %s", res)
self.log.info('No more spinnable forts within proximity. Or server error')
self.walk_back_to_origin()
return False
if len(destinations) >= 20:
destinations = destinations[:20]
furthest_fort = destinations[0][0]
self.log.info("Walking to fort at http://maps.google.com/maps?q=%s,%s", furthest_fort['latitude'],
furthest_fort['longitude'])
self.walk_to((furthest_fort['latitude'], furthest_fort['longitude']),
map(lambda x: "via:%f,%f" % (x[0]['latitude'], x[0]['longitude']), destinations[1:]))
return True
def return_to_start(self):
self.set_position(*self._origPosF)
def spin_near_fort(self):
res = self.nearby_map_objects()
map_cells = res['responses'].get('GET_MAP_OBJECTS', {}).get('map_cells', [])
forts = PGoApi.flatmap(lambda c: c.get('forts', []), map_cells)
destinations = filtered_forts(self._origPosF, self._posf, forts, self.STAY_WITHIN_PROXIMITY, self.visited_forts,
self.experimental)
if not destinations:
self.log.debug("No fort to walk to! %s", res)
self.log.info('No more spinnable forts within proximity. Returning back to origin')
self.walk_back_to_origin()
return False
for fort_data in destinations:
fort = fort_data[0]
self.log.info("Walking to fort at http://maps.google.com/maps?q=%s,%s", fort['latitude'],
fort['longitude'])
self.walk_to((fort['latitude'], fort['longitude']))
self.fort_search_pgoapi(fort, self.get_position(), fort_data[1])
if 'lure_info' in fort:
self.disk_encounter_pokemon(fort['lure_info'])
return True
def catch_near_pokemon(self):
map_cells = self.nearby_map_objects()['responses']['GET_MAP_OBJECTS']['map_cells']
pokemons = PGoApi.flatmap(lambda c: c.get('catchable_pokemons', []), map_cells)
# catch first pokemon:
origin = (self._posf[0], self._posf[1])
pokemon_distances = [(pokemon, distance_in_meters(origin, (pokemon['latitude'], pokemon['longitude']))) for
pokemon
in pokemons]
if pokemons:
self.log.debug("Nearby pokemon: : %s", pokemon_distances)
self.log.info("Nearby Pokemon: %s",
", ".join(map(lambda x: self.pokemon_names[str(x['pokemon_id'])], pokemons)))
else:
self.log.info("No nearby pokemon")
catches_successful = False
for pokemon_distance in pokemon_distances:
target = pokemon_distance
self.log.debug("Catching pokemon: : %s, distance: %f meters", target[0], target[1])
self.log.info("Catching Pokemon: %s", self.pokemon_names[str(target[0]['pokemon_id'])])
catches_successful &= self.encounter_pokemon(target[0])
sleep(random.randrange(4, 8))
return catches_successful
def nearby_map_objects(self):
position = self.get_position()
neighbors = getNeighbors(self._posf)
return self.get_map_objects(latitude=position[0], longitude=position[1],
since_timestamp_ms=[0] * len(neighbors),
cell_id=neighbors).call()
def attempt_catch(self, encounter_id, spawn_point_guid):
catch_status = -1
catch_attempts = 0
ret = {}
# Max 4 attempts to catch pokemon
while catch_status != 1 and self.inventory.can_attempt_catch() and catch_attempts < 5:
pokeball = self.inventory.take_next_ball()
r = self.catch_pokemon(
normalized_reticle_size=1.950,
pokeball=pokeball,
spin_modifier=0.850,
hit_pokemon=True,
normalized_hit_position=1,
encounter_id=encounter_id,
spawn_point_guid=spawn_point_guid,
).call()['responses']['CATCH_POKEMON']
catch_attempts += 1
if "status" in r:
catch_status = r['status']
# fleed or error
if catch_status == 3 or catch_status == 0:
break
ret = r
if 'status' in ret:
return ret
return {}
def cleanup_inventory(self, inventory_items=None):
if not inventory_items:
inventory_items = self.get_inventory().call()['responses']['GET_INVENTORY']['inventory_delta'][
'inventory_items']
for inventory_item in inventory_items:
if "item" in inventory_item['inventory_item_data']:
item = inventory_item['inventory_item_data']['item']
if item['item_id'] in self.MIN_ITEMS and "count" in item and item['count'] > self.MIN_ITEMS[
item['item_id']]:
recycle_count = item['count'] - self.MIN_ITEMS[item['item_id']]
self.log.info("Recycling Item_ID {0}, item count {1}".format(item['item_id'], recycle_count))
res = self.recycle_inventory_item(item_id=item['item_id'], count=recycle_count).call()['responses'][
'RECYCLE_INVENTORY_ITEM']
response_code = res['result']
if response_code == 1:
self.log.info("Recycled Item %s, New Count: %s", item['item_id'], res.get('new_count', 0))
else:
self.log.info("Failed to recycle Item %s, Code: %s", item['item_id'], response_code)
sleep(2)
return self.update_player_inventory()
def get_caught_pokemons(self, inventory_items):
caught_pokemon = defaultdict(list)
for inventory_item in inventory_items:
if "pokemon_data" in inventory_item['inventory_item_data']:
# is a pokemon:
pokemon = Pokemon(inventory_item['inventory_item_data']['pokemon_data'], self.pokemon_names)
pokemon.pokemon_additional_data = self.game_master.get(pokemon.pokemon_id, PokemonData())
if not pokemon.is_egg:
caught_pokemon[pokemon.pokemon_id].append(pokemon)
return caught_pokemon
def cleanup_pokemon(self, inventory_items=None):
if not inventory_items:
inventory_items = self.get_inventory().call()['responses']['GET_INVENTORY']['inventory_delta'][
'inventory_items']
caught_pokemon = self.get_caught_pokemons(inventory_items)
for pokemons in caught_pokemon.values():
# Only if we have more than MIN_SIMILAR_POKEMON
if len(pokemons) > self.MIN_SIMILAR_POKEMON:
pokemons = sorted(pokemons, key=lambda x: (x.cp, x.iv), reverse=True)
# keep the first pokemon....
for pokemon in pokemons[self.MIN_SIMILAR_POKEMON:]:
if self.is_pokemon_eligible_for_transfer(pokemon):
self.log.info("Releasing pokemon: %s", pokemon)
self.release_pokemon(pokemon_id=pokemon.id)
release_res = self.call()['responses']['RELEASE_POKEMON']
status = release_res.get('result', -1)
if status == 1:
self.log.info("Successfully Released Pokemon %s", pokemon)
else:
self.log.debug("Failed to release pokemon %s, %s", pokemon, release_res)
self.log.info("Failed to release Pokemon %s", pokemon)
sleep(3)
def is_pokemon_eligible_for_transfer(self, pokemon):
return (pokemon.pokemon_id in self.throw_pokemon_ids and not pokemon.is_favorite) \
or (not pokemon.is_favorite and
pokemon.iv < self.MIN_KEEP_IV and
pokemon.cp < self.KEEP_CP_OVER and
pokemon.is_valid_pokemon() and
pokemon.pokemon_id not in self.keep_pokemon_ids)
def attempt_evolve(self, inventory_items=None):
if not inventory_items:
inventory_items = self.get_inventory().call()['responses']['GET_INVENTORY']['inventory_delta'][
'inventory_items']
caught_pokemon = self.get_caught_pokemons(inventory_items)
self.inventory = Player_Inventory(inventory_items)
for pokemons in caught_pokemon.values():
if len(pokemons) > self.MIN_SIMILAR_POKEMON:
pokemons = sorted(pokemons, key=lambda x: (x.cp, x.iv), reverse=True)
for pokemon in pokemons[self.MIN_SIMILAR_POKEMON:]:
# If we can't evolve this type of pokemon anymore, don't check others.
if not self.attempt_evolve_pokemon(pokemon):
break
return False
def attempt_evolve_pokemon(self, pokemon):
if self.is_pokemon_eligible_for_evolution(pokemon=pokemon):
self.log.info("Evolving pokemon: %s", pokemon)
evo_res = self.evolve_pokemon(pokemon_id=pokemon.id).call()['responses']['EVOLVE_POKEMON']
status = evo_res.get('result', -1)
sleep(3)
if status == 1:
evolved_pokemon = Pokemon(evo_res.get('evolved_pokemon_data', {}), self.pokemon_names,
self.game_master.get(str(pokemon.pokemon_id), PokemonData()))
# I don' think we need additional stats for evolved pokemon. Since we do not do anything with it.
# evolved_pokemon.pokemon_additional_data = self.game_master.get(pokemon.pokemon_id, PokemonData())
self.log.info("Evolved to %s", evolved_pokemon)
self.update_player_inventory()
return True
else:
self.log.debug("Could not evolve Pokemon %s", evo_res)
self.log.info("Could not evolve pokemon %s | Status %s", pokemon, status)
self.update_player_inventory()
return False
else:
return False
def is_pokemon_eligible_for_evolution(self, pokemon):
return self.inventory.pokemon_candy.get(self.POKEMON_EVOLUTION_FAMILY.get(pokemon.pokemon_id, None),
-1) > self.POKEMON_EVOLUTION.get(pokemon.pokemon_id, None) \
and pokemon.pokemon_id not in self.keep_pokemon_ids \
and not pokemon.is_favorite \
and pokemon.pokemon_id in self.POKEMON_EVOLUTION
def disk_encounter_pokemon(self, lureinfo, retry=False):
try:
self.update_player_inventory()
if not self.inventory.can_attempt_catch():
self.log.info("No balls to catch %s, exiting disk encounter", self.inventory)
return False
encounter_id = lureinfo['encounter_id']
fort_id = lureinfo['fort_id']
position = self._posf
self.log.debug("At Fort with lure %s".encode('ascii', 'ignore'), lureinfo)
self.log.info("At Fort with Lure AND Active Pokemon %s",
self.pokemon_names.get(str(lureinfo.get('active_pokemon_id', 0)), "NA"))
resp = self.disk_encounter(encounter_id=encounter_id, fort_id=fort_id, player_latitude=position[0],
player_longitude=position[1]).call()['responses']['DISK_ENCOUNTER']
if resp['result'] == 1:
capture_status = -1
while capture_status != 0 and capture_status != 3:
catch_attempt = self.attempt_catch(encounter_id, fort_id)
capture_status = catch_attempt['status']
if capture_status == 1:
self.log.debug("(LURE) Caught Pokemon: : %s", catch_attempt)
self.log.info("(LURE) Caught Pokemon: %s",
self.pokemon_names.get(str(lureinfo.get('active_pokemon_id', 0)), "NA"))
self.pokemon_caught += 1
sleep(2)
return True
elif capture_status != 2:
self.log.debug("(LURE) Failed Catch: : %s", catch_attempt)
self.log.info("(LURE) Failed to catch Pokemon: %s",
self.pokemon_names.get(str(lureinfo.get('active_pokemon_id', 0)), "NA"))
return False
sleep(2)
elif resp['result'] == 5:
self.log.info("Couldn't catch %s Your pokemon bag was full, attempting to clear and re-try",
self.pokemon_names.get(str(lureinfo.get('active_pokemon_id', 0)), "NA"))
self.cleanup_pokemon()
if not retry:
return self.disk_encounter_pokemon(lureinfo, retry=True)
else:
self.log.info("Could not start Disk (lure) encounter for pokemon: %s",
self.pokemon_names.get(str(lureinfo.get('active_pokemon_id', 0)), "NA"))
except Exception as e:
self.log.error("Error in disk encounter %s", e)
return False
def encounter_pokemon(self, pokemon, retry=False): # take in a MapPokemon from MapCell.catchable_pokemons
# Update Inventory to make sure we can catch this mon
self.update_player_inventory()
if not self.inventory.can_attempt_catch():
self.log.info("No balls to catch %s, exiting encounter", self.inventory)
return False
encounter_id = pokemon['encounter_id']
spawn_point_id = pokemon['spawn_point_id']
# begin encounter_id
position = self.get_position()
encounter = self.encounter(encounter_id=encounter_id,
spawn_point_id=spawn_point_id,
player_latitude=position[0],
player_longitude=position[1]).call()['responses']['ENCOUNTER']
self.log.debug("Attempting to Start Encounter: %s", encounter)
if encounter['status'] == 1:
capture_status = -1
# while capture_status != RpcEnum.CATCH_ERROR and capture_status != RpcEnum.CATCH_FLEE:
while capture_status != 0 and capture_status != 3:
catch_attempt = self.attempt_catch(encounter_id, spawn_point_id)
capture_status = catch_attempt.get('status', -1)
# if status == RpcEnum.CATCH_SUCCESS:
if capture_status == 1:
self.log.debug("Caught Pokemon: : %s", catch_attempt)
self.log.info("Caught Pokemon: %s", self.pokemon_names.get(str(pokemon['pokemon_id']), "NA"))
self.pokemon_caught += 1
sleep(2)
return True
elif capture_status != 2:
self.log.debug("Failed Catch: : %s", catch_attempt)
self.log.info("Failed to Catch Pokemon: %s",
self.pokemon_names.get(str(pokemon['pokemon_id']), "NA"))
return False
sleep(2)
elif encounter['status'] == 7:
self.log.info("Couldn't catch %s Your pokemon bag was full, attempting to clear and re-try",
self.pokemon_names.get(str(pokemon['pokemon_id']), "NA"))
self.cleanup_pokemon()
if not retry:
return self.encounter_pokemon(pokemon, retry=True)
else:
self.log.info("Could not start encounter for pokemon: %s",
self.pokemon_names.get(str(pokemon['pokemon_id']), "NA"))
return False
def login(self, provider, username, password, cached=False):
if not isinstance(username, basestring) or not isinstance(password, basestring):
raise AuthException("Username/password not correctly specified")
if provider == 'ptc':
self._auth_provider = AuthPtc()
elif provider == 'google':
self._auth_provider = AuthGoogle()
else:
raise AuthException("Invalid authentication provider - only ptc/google available.")
self.log.debug('Auth provider: %s', provider)
if not self._auth_provider.login(username, password):
self.log.info('Login process failed')
return False
self.log.info('Starting RPC login sequence (app simulation)')
# making a standard call, like it is also done by the client
self.get_player()
self.get_hatched_eggs()
self.get_inventory()
self.check_awarded_badges()
self.download_settings(hash="05daf51635c82611d1aac95c0b051d3ec088a930")
response = self.call()
if not response:
self.log.info('Login failed!')
if os.path.isfile("auth_cache") and cached:
response = pickle.load(open("auth_cache"))
fname = "auth_cache_%s" % username
if os.path.isfile(fname) and cached:
response = pickle.load(open(fname))
else:
response = self.heartbeat()
f = open(fname, "w")
pickle.dump(response, f)
if not response:
self.log.info('Login failed!')
return False
if 'api_url' in response:
self._api_endpoint = ('https://{}/rpc'.format(response['api_url']))
self.log.debug('Setting API endpoint to: %s', self._api_endpoint)
else:
self.log.error('Login failed - unexpected server response!')
return False
if 'auth_ticket' in response:
self._auth_provider.set_ticket(response['auth_ticket'].values())
self.log.info('Finished RPC login sequence (app simulation)')
self.log.info('Login process completed')
return True
def main_loop(self):
catch_attempt = 0
self.heartbeat()
self.cleanup_inventory()
while True:
self.heartbeat()
sleep(1)
if self.experimental and self.spin_all_forts:
self.spin_all_forts_visible()
else:
self.spin_near_fort()
# if catching fails 10 times, maybe you are sofbanned.
while self.catch_near_pokemon() and catch_attempt <= self.max_catch_attempts:
sleep(4)
catch_attempt += 1
pass
if catch_attempt > self.max_catch_attempts:
self.log.warn("Your account may be softbaned Or no Pokeballs. Failed to catch pokemon %s times",
catch_attempt)
catch_attempt = 0
@staticmethod
def flatmap(f, items):
return list(chain.from_iterable(imap(f, items)))
| mit | -1,503,508,750,063,969,000 | 47.43465 | 120 | 0.577063 | false |
smartforceplus/SmartForceplus | openerp/addons/tag_website_noodoo/__openerp__.py | 1 | 1221 | # -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright JLaloux
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Website no Odoo references',
'version': '0.0.1',
'author': 'JLaloux',
'category': 'Website',
'description': 'Removes references to Odoo in the website',
'depends': ['website'],
'data': [
'views/website_templates.xml'
],
'active': False,
'installable': True,
'auto_install': True,
} | agpl-3.0 | -6,927,963,381,047,777,000 | 36.030303 | 78 | 0.579853 | false |
Masood-M/yalih | honeypot.py | 1 | 6809 | #! /usr/bin/env python
import time
import threading
import os, sys, Queue
from time import gmtime, strftime
from itertools import groupby
from operator import itemgetter
import os.path
import imapfile
import logging
import honeypotconfig
import scan
import bing
import executemechanize
import malwebsites
import normalize
import updateantivirus
import yaradetection
import unquote
import argparse
import extraction
try:
import signal
from signal import SIGPIPE, SIG_IGN
signal.signal(signal.SIGPIPE, signal.SIG_IGN)
except ImportError:
pass
queue=Queue.Queue()
logger = logging.getLogger()
def worker():
urldict = queue.get()
#this is for the normal visitor output (no error)
logger.info(str(urldict["counter"]) + ",\t" + urldict["url"]+",\t"+ "Visiting")
executemechanize.executemechanize(urldict)
queue.task_done()
def threadmaker():
while True:
threadstomake = honeypotconfig.threadnum - threading.active_count()
for i in range(threadstomake):
thread = threading.Thread(target=worker)
thread.setDaemon(True)
thread.start()
time.sleep(5)
def readurl():
url = sys.argv[2]
return url
def main():
#Create the threads
thread = threading.Thread(target=threadmaker)
thread.setDaemon(True)
thread.start()
script_path = os.path.dirname(os.path.abspath( __file__ ))
parser = argparse.ArgumentParser(description="Examples:\n/honeypot.py --url www.yahoo.com\nhoneypot.py --file <file path>\n./honeypot.py --blacklist\n./honeypot.py --email\n./honeypot.py --update\n./honeypot.py --search <warez>\n./honeypot.py --local <file/directory path>", formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument("--email", help="Retrieves your Spam emails from your mail server and crawls the extracted URLS. Enter your email credentials in honeypotconfig.py file!", action="store_true")
parser.add_argument("--update", help="Updates the anti-virus signatures", action="store_true")
parser.add_argument("--blacklist", help="Downloads list of suspicious malicious websites from three databases and retrieves/scans them accordingly", action="store_true")
parser.add_argument("--file", nargs=1, help="Provide an input file", action="store")
parser.add_argument("--url", nargs=1, help="Provide a url", action="store")
parser.add_argument("--search", nargs=1, help="searches Bing search engine for a keyword (1 single keyword at the moment) and returns 100 results starting from the 20th result.", action="store")
parser.add_argument("--local", nargs=1, help="scans a local file or directory for malicious signatures.", action="store")
parser.add_argument("--debug", help="Include http header", action="store_true")
parser.add_argument("--crawler", help="Crawl the sites and save any executables found", action="store_true")
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
path = script_path+"/tmp"
print path
#create the tmp folder
if not os.path.isdir(path):
os.makedirs("tmp")
#Crawler
if args.crawler:
executemechanize.crawler = True
#Logging
"""Initialize logger."""
command = "mkdir -p debug/" #create a temporary folder in your working space folder
os.system(command)
sys.stdin=open("debug/" + time.asctime(time.localtime(time.time())) +".log", "a")
logger = logging.getLogger()
sh = logging.StreamHandler()
sh.setFormatter(SpecialFormatter())
sh2 = logging.StreamHandler(sys.stdin)
sh2.setFormatter(SpecialFormatter())
logger.addHandler(sh)
logger.addHandler(sh2)
logger.setLevel(logging.INFO)
if args.debug:
logger.setLevel(logging.DEBUG)
executemechanize.set_logging_level(logging.DEBUG)
#Update antivirus signatures
if args.update:
updateantivirus.updateantivirus()
#Blacklist Databases
if args.blacklist:
try:
if not os.path.exists("list"):
os.mkdir("list")
except OSError as e:
logger.error(e)
malwebsites.domaindownload()
malwebsites.duplicateremover()
urls = open("list/malwebsites.txt", "r")
counter = 0
for line in urls:
dict={}
counter += 1
dict["url"] = line.strip()
dict["counter"] = counter
queue.put(dict)
queue.join()
scan.scanning(path)
yaradetection.listandscan(path)
unquote.unquoteDirectory(path)
#Email
if args.email:
imapfile.imap()
extraction.extracturl()#extracts urls from emails.txt file
extraction.duplicateremover() #removes the duplicate urls from crawler.txt files (which now contain extracted urls from emails.txt)
os.remove("emails.txt")
urls = open('crawler.txt', "r")
counter = 0
for line in urls:
dict={}
counter += 1
dict["url"] = line.rstrip()
dict["counter"] = counter
queue.put(dict)
queue.join()
scan.scanning(path)
yaradetection.listandscan(path)
unquote.unquoteDirectory(path)
#File
if args.file:
mylist = list()
mylist2 = list()
counter =0
fopen3 = open(sys.argv[2],"r")
for line in fopen3:
dict={}
line = line.strip()
counter += 1
if not (line.startswith("http://")) and not (line.startswith("https://")):
line = "http://"+line
dict["url"] = line
dict["counter"] = counter
queue.put(dict)
queue.join()
fopen3.close()
scan.scanning(path)
yaradetection.listandscan(path)
unquote.unquoteDirectory(path)
#URL
if args.url:
url = readurl()
url = normalize.normalizeurl(url)
dict={}
counter = 1
if not (url.startswith("http://")) and not (url.startswith("https://")):
url = "http://"+url
dict["url"] = url
dict["counter"] = counter
queue.put(dict)
queue.join()
# executemechanize.executemechanize(url)
scan.scanning(path)
yaradetection.listandscan(path)
unquote.unquoteDirectory(path)
#Search
if args.search:
keyword = sys.argv[2]
bing.searchBing(keyword)
mylist = list()
fopen = open("list/searchresult.txt","r")
for line in fopen:
line = line.strip()
if not line:
continue
mylist.append(line)
fopen.close()
counter = 0
for line in mylist:
dict={}
counter += 1
dict["url"] = line
dict["counter"] = counter
queue.put(dict)
queue.join()
scan.scanning(path)
yaradetection.listandscan(path)
unquote.unquoteDirectory(path)
#Local Scan
if args.local:
path = sys.argv[2]
scan.scanning(path)
yaradetection.listandscan(path)
unquote.unquoteDirectory(path)
class SpecialFormatter(logging.Formatter):
FORMATS = {logging.INFO : "%(name)s,\t%(levelname)s,\t%(message)s", 'DEFAULT' : "%(name)s,\t%(levelname)s,\t%(message)s"}
def formatTime(self, record, datefmt=None):
self._datefmt = time.strftime("%Y-%m-%d %H:%M:%S")
return logging.Formatter.formatTime(self, record, self._datefmt)
def format(self, record):
self._fmt = self.FORMATS.get(record.levelno, self.FORMATS['DEFAULT'])
return logging.Formatter.format(self, record)
if __name__ == "__main__":
main()
| apache-2.0 | 7,934,120,239,446,023,000 | 25.597656 | 322 | 0.708474 | false |
proximate/proximate | userpresence.py | 1 | 1863 | #
# Proximate - Peer-to-peer social networking
#
# Copyright (c) 2008-2011 Nokia Corporation
#
# All rights reserved.
#
# This software is licensed under The Clear BSD license.
# See the LICENSE file for more details.
#
from plugins import Plugin, get_plugin_by_type
from proximateprotocol import PLUGIN_TYPE_COMMUNITY, PLUGIN_TYPE_NOTIFICATION, PLUGIN_TYPE_USER_PRESENCE
from userpresence_gui import User_Presence_GUI
community = None
notify = None
class Pattern:
def __init__(self, dict):
self.dict = dict
def match(self, user):
for (key, value) in self.dict.iteritems():
if user.get(key).find(value) < 0:
return False
return True
def __str__(self):
return str(self.dict)
class User_Presence_Plugin(Plugin):
def __init__(self):
global community, notify
self.register_plugin(PLUGIN_TYPE_USER_PRESENCE)
community = get_plugin_by_type(PLUGIN_TYPE_COMMUNITY)
notify = get_plugin_by_type(PLUGIN_TYPE_NOTIFICATION)
self.patterns = []
def user_appears(self, user):
nick = user.get('nick')
for p in self.patterns:
if p.match(user):
notify.notify_with_response('User %s appeared' % nick, self.response_handler, None)
def user_changes(self, user, what=None):
for p in self.patterns:
if p.match(user):
notify.notify_with_response('User %s appeared' % nick, self.response_handler, None)
def response_handler(self, response, msg, ctx):
return False
def add_pattern(self, pdict):
p = Pattern(pdict)
self.patterns.append(p)
def delete_pattern(self, pattern):
self.patterns.remove(p)
def get_patterns(self):
return self.patterns
def init(options):
if options.presence:
User_Presence_Plugin()
| bsd-3-clause | 5,973,490,620,666,871,000 | 27.227273 | 104 | 0.640902 | false |
desarrollosimagos/svidb | administrativo/actores/bancoaudio.py | 1 | 5678 | # -*- coding: utf8
from gestion.models import *
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext, loader
from actores.models import *
from areas.models import *
from especies.models import *
from plantillas.models import *
from menu.models import *
from perfil.models import PerfilPublico,ModulosPublicos,PerfilModulos
from django.db.models import Q
from forms import *
from inicio.models import Tipocolaboracion
from mapas.models import Colaboradorespersonas,contriBiblioteca,contriAudio,contriAvistamiento
def index(request):
_username = request.user.username
id_usuario = User.objects.get(username=_username)
id_persona = get_object_or_404(PerfilPublico, user=id_usuario.id)
persona = get_object_or_404(Directorios, pk=id_persona.persona.id)
return render_to_response('actores/bancoaudio/index.html', {'persona':persona})
def lista(request):
_username = request.user.username
id_usuario = User.objects.get(username=_username)
id_persona = get_object_or_404(PerfilPublico, user=id_usuario.id)
persona = get_object_or_404(Directorios, pk=id_persona.persona.id)
try:
biblioteca = Bancoaudiovisuals.objects.filter(directorio=persona).order_by('fecha')
except Bancoaudiovisuals.DoesNotExist:
biblioteca = None
return render_to_response('actores/bancoaudio/tus_aportes.html', {'persona':persona,'biblioteca':biblioteca})
#@csrf_protect
def agregar(request):
_username = request.user.username
id_usuario = User.objects.get(username=_username)
id_persona = get_object_or_404(PerfilPublico, user=id_usuario.id)
persona = get_object_or_404(Directorios, pk=id_persona.persona.id)
#si se recibe el metodo post
if request.method == 'POST':
#formulario enviado
banco_form = AgregarBancoaudiovisualsPublic(request.POST, request.FILES)
#validar formulario
if banco_form.is_valid():
banco = banco_form.save()
today = datetime.now()
dateFormat = today.strftime("%Y-%m-%d")
tipocolaboracion = Tipocolaboracion.objects.get(id=15)
titulo = u'Colaboracion en Banco Audiovisual: %s' %(banco.id)
contribucion = Colaboradorespersonas(fecha=dateFormat,tipoColaboracion=tipocolaboracion,titulo=titulo,userupdate=id_usuario,persona=persona,estatu=3)
contribucion.save()
aporte = contriAudio(contribucion=contribucion,audio=banco)
aporte.save()
mensaje=True
return render_to_response('actores/bancoaudio/index.html',{'msm':mensaje,'persona':persona,'id_usuario':id_usuario})
else:
biblioteca_form = AgregarBancoaudiovisualsPublic(request.POST)
mensaje=True
return render_to_response('actores/bancoaudio/nuevo.html',{'form':banco_form,'msm':mensaje,'persona':persona,'usuario':id_usuario}, context_instance=RequestContext(request))
else:
#formulario incial
mensaje=False
banco_form = AgregarBancoaudiovisualsPublic()
return render_to_response('actores/bancoaudio/nuevo.html',{'form':banco_form,'persona':persona,'usuario':id_usuario}, context_instance=RequestContext(request))
def galeria(request,elemento,id):
if elemento == 'actor':
try:
datos = Actores.objects.get(id=id)
except Actores.DoesNotExist:
datos = None
if elemento == 'areas':
try:
datos = Areas.objects.get(id=id)
except Areas.DoesNotExist:
datos = None
if elemento == 'taxon':
try:
datos = DetalleTaxon.objects.get(id=id)
except DetalleTaxon.DoesNotExist:
datos = None
return render_to_response('actores/bancoaudio/galeria.html', {'elemento':datos})
def PaginadorGaleria(request,elemento,id,pagina):
if elemento == 'actor':
try:
datos = Actores.objects.get(id=id)
except Actores.DoesNotExist:
datos = None
if elemento == 'areas':
try:
datos = Areas.objects.get(id=id)
except Areas.DoesNotExist:
datos = None
if elemento == 'taxon':
try:
datos = DetalleTaxon.objects.get(id=id)
except DetalleTaxon.DoesNotExist:
datos = None
image_list = datos.bancoaudio.all()
paginator = Paginator(image_list, 4)
page = pagina
try:
mesn = paginator.page(page)
except PageNotAnInteger:
mesn = paginator.page(1)
return render_to_response('actores/bancoaudio/paginadorGaleria.html', {'elemento':elemento,'mesn':mesn,'id':id})
def PaginadorGaleria2(request,pagina,id):
try:
dat = Bancoaudiovisuals.objects.get(pk=id)
except Bancoaudiovisuals.DoesNotExist:
dat = None
try:
datos = Bancoaudiovisuals.objects.filter()
except Bancoaudiovisuals.DoesNotExist:
datos = None
# image_list = datos.all()
paginator = Paginator(datos, 10)
page = pagina
try:
mesn = paginator.page(page)
except PageNotAnInteger:
mesn = paginator.page(1)
return render_to_response('actores/bancoaudio/paginadorGaleria2.html', {'mesn':mesn,'dat':dat})
def bancoVer(request,id):
try:
datos = Bancoaudiovisuals.objects.get(pk=id)
except Bancoaudiovisuals.DoesNotExist:
datos = None
return render_to_response('actores/bancoaudio/bancover.html', {'datos':datos})
| gpl-3.0 | 5,877,501,885,763,019,000 | 36.364865 | 188 | 0.664142 | false |
aplicatii-romanesti/allinclusive-kodi-pi | .kodi/userdata/addon_data/plugin.video.p2p-streams/acestream/ace/ACEStream/Core/APIImplementation/makeurl.py | 1 | 8330 | #Embedded file name: ACEStream\Core\APIImplementation\makeurl.pyo
import sys
import urlparse
import urllib
import math
#if sys.platform != 'win32':
# import curses.ascii
from types import IntType, LongType
from struct import pack, unpack
from base64 import b64encode, b64decode
from M2Crypto import Rand
from traceback import print_exc, print_stack
from ACEStream.Core.simpledefs import *
from ACEStream.Core.Utilities.TSCrypto import sha
DEBUG = False
def metainfo2p2purl(metainfo):
info = metainfo['info']
bitrate = None
if 'azureus_properties' in metainfo:
azprops = metainfo['azureus_properties']
if 'Content' in azprops:
content = metainfo['azureus_properties']['Content']
if 'Speed Bps' in content:
bitrate = content['Speed Bps']
if 'encoding' not in metainfo:
encoding = 'utf-8'
else:
encoding = metainfo['encoding']
urldict = {}
urldict['s'] = p2purl_encode_piecelength(info['piece length'])
urldict['n'] = p2purl_encode_name2url(info['name'], encoding)
if info.has_key('length'):
urldict['l'] = p2purl_encode_nnumber(info['length'])
else:
raise ValueError('Multi-file torrents currently not supported')
if info.has_key('root hash'):
urldict['r'] = b64urlencode(info['root hash'])
elif info.has_key('live'):
urldict['k'] = b64urlencode(info['live']['pubkey'])
urldict['a'] = info['live']['authmethod']
else:
raise ValueError('url-compat and Merkle torrent must be on to create URL')
if bitrate is not None:
urldict['b'] = p2purl_encode_nnumber(bitrate)
query = ''
for k in ['n',
'r',
'k',
'l',
's',
'a',
'b']:
if k in urldict:
if query != '':
query += '&'
v = urldict[k]
if k == 'n':
s = v
else:
s = k + '=' + v
query += s
sidx = metainfo['announce'].find(':')
hierpart = metainfo['announce'][sidx + 1:]
url = P2PURL_SCHEME + ':' + hierpart + '?' + query
return url
def p2purl2metainfo(url):
if DEBUG:
print >> sys.stderr, 'p2purl2metainfo: URL', url
colidx = url.find(':')
scheme = url[0:colidx]
qidx = url.find('?')
if qidx == -1:
authority = None
path = None
query = url[colidx + 1:]
fragment = None
else:
authoritypath = url[colidx + 3:qidx]
pidx = authoritypath.find('/')
authority = authoritypath[0:pidx]
path = authoritypath[pidx:]
fidx = url.find('#')
if fidx == -1:
query = url[qidx + 1:]
fragment = None
else:
query = url[qidx + 1:fidx]
fragment = url[fidx:]
csbidx = authority.find(']')
if authority.startswith('[') and csbidx != -1:
if csbidx == len(authority) - 1:
port = None
else:
port = authority[csbidx + 1:]
else:
cidx = authority.find(':')
if cidx != -1:
port = authority[cidx + 1:]
else:
port = None
if port is not None and not port.isdigit():
raise ValueError('Port not int')
if scheme != P2PURL_SCHEME:
raise ValueError('Unknown scheme ' + P2PURL_SCHEME)
metainfo = {}
if authority and path:
metainfo['announce'] = 'http://' + authority + path
result = urlparse.urlparse(metainfo['announce'])
if result[0] != 'http':
raise ValueError('Malformed tracker URL')
reqinfo = p2purl_parse_query(query)
metainfo.update(reqinfo)
swarmid = metainfo2swarmid(metainfo)
if DEBUG:
print >> sys.stderr, 'p2purl2metainfo: parsed', `metainfo`
return (metainfo, swarmid)
def metainfo2swarmid(metainfo):
if 'live' in metainfo['info']:
swarmid = pubkey2swarmid(metainfo['info']['live'])
else:
swarmid = metainfo['info']['root hash']
return swarmid
def p2purl_parse_query(query):
if DEBUG:
print >> sys.stderr, 'p2purl_parse_query: query', query
gotname = False
gotkey = False
gotrh = False
gotlen = False
gotps = False
gotam = False
gotbps = False
reqinfo = {}
reqinfo['info'] = {}
kvs = query.split('&')
for kv in kvs:
if '=' not in kv:
reqinfo['info']['name'] = p2purl_decode_name2utf8(kv)
reqinfo['encoding'] = 'UTF-8'
gotname = True
continue
k, v = kv.split('=')
if k == 'k' or k == 'a' and 'live' not in reqinfo['info']:
reqinfo['info']['live'] = {}
if k == 'n':
reqinfo['info']['name'] = p2purl_decode_name2utf8(v)
reqinfo['encoding'] = 'UTF-8'
gotname = True
elif k == 'r':
reqinfo['info']['root hash'] = p2purl_decode_base64url(v)
gotrh = True
elif k == 'k':
reqinfo['info']['live']['pubkey'] = p2purl_decode_base64url(v)
gotkey = True
elif k == 'l':
reqinfo['info']['length'] = p2purl_decode_nnumber(v)
gotlen = True
elif k == 's':
reqinfo['info']['piece length'] = p2purl_decode_piecelength(v)
gotps = True
elif k == 'a':
reqinfo['info']['live']['authmethod'] = v
gotam = True
elif k == 'b':
bitrate = p2purl_decode_nnumber(v)
reqinfo['azureus_properties'] = {}
reqinfo['azureus_properties']['Content'] = {}
reqinfo['azureus_properties']['Content']['Speed Bps'] = bitrate
gotbps = True
if not gotname:
raise ValueError('Missing name field')
if not gotrh and not gotkey:
raise ValueError('Missing root hash or live pub key field')
if gotrh and gotkey:
raise ValueError('Found both root hash and live pub key field')
if not gotlen:
raise ValueError('Missing length field')
if not gotps:
raise ValueError('Missing piece size field')
if gotkey and not gotam:
raise ValueError('Missing live authentication method field')
if gotrh and gotam:
raise ValueError('Inconsistent: root hash and live authentication method field')
if not gotbps:
raise ValueError('Missing bitrate field')
return reqinfo
def pubkey2swarmid(livedict):
if DEBUG:
print >> sys.stderr, 'pubkey2swarmid:', livedict.keys()
if livedict['authmethod'] == 'None':
return Rand.rand_bytes(20)
else:
return sha(livedict['pubkey']).digest()
def p2purl_decode_name2utf8(v):
if sys.platform != 'win32':
for c in v:
#if not curses.ascii.isascii(c):
# raise ValueError('Name contains unescaped 8-bit value ' + `c`)
pass
return urllib.unquote_plus(v)
def p2purl_encode_name2url(name, encoding):
if encoding.lower() == 'utf-8':
utf8name = name
else:
uname = unicode(name, encoding)
utf8name = uname.encode('utf-8')
return urllib.quote_plus(utf8name)
def p2purl_decode_base64url(v):
return b64urldecode(v)
def p2purl_decode_nnumber(s):
b = b64urldecode(s)
if len(b) == 2:
format = 'H'
elif len(b) == 4:
format = 'l'
else:
format = 'Q'
format = '!' + format
return unpack(format, b)[0]
def p2purl_encode_nnumber(s):
if type(s) == IntType:
if s < 65536:
format = 'H'
elif s < 4294967296L:
format = 'l'
else:
format = 'Q'
format = '!' + format
return b64urlencode(pack(format, s))
def p2purl_decode_piecelength(s):
return int(math.pow(2.0, float(s)))
def p2purl_encode_piecelength(s):
return str(int(math.log(float(s), 2.0)))
def b64urlencode(input):
output = b64encode(input)
output = output.rstrip('=')
output = output.replace('+', '-')
output = output.replace('/', '_')
return output
def b64urldecode(input):
inter = input[:]
padlen = 4 - (len(inter) - len(inter) / 4 * 4)
padstr = '=' * padlen
inter += padstr
inter = inter.replace('-', '+')
inter = inter.replace('_', '/')
output = b64decode(inter)
return output
| apache-2.0 | -7,401,510,805,719,010,000 | 28.964029 | 88 | 0.560624 | false |
teknolab/teknolab-wapiti | wapiti/file/auxtext.py | 1 | 1712 | #!/usr/bin/env python
# XML Report Generator Module for Wapiti Project
# Wapiti Project (http://wapiti.sourceforge.net)
#
# David del Pozo
# Alberto Pastor
# Copyright (C) 2008 Informatica Gesfor
# ICT Romulus (http://www.ict-romulus.eu)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
class AuxText:
"""Class for reading and writing in text files"""
def readLines(self,fileName):
"""returns a array"""
lines = []
f = None
try:
f = open(fileName)
for line in f:
cleanLine = line.strip(" \n")
if cleanLine != "":
lines.append(cleanLine.replace("\\0","\0"))
except IOError,e:
print e
#finally clause do not work with jyton
#finally:
#if f!=None:
#f.close()
return lines
#class
if __name__ == "__main__":
try:
l = AuxText()
ll = l.readLines("./config/execPayloads.txt")
for li in ll:
print li
except SystemExit:
pass
| gpl-2.0 | -7,611,681,490,759,631,000 | 31.301887 | 76 | 0.626752 | false |
liyangbit/liyangbit.github.io | _to-ipynb.py | 1 | 4868 | import os, re
import shutil
import csv
import datetime
# Main
thepath = os.getcwd()
ipynb_path = os.path.join(thepath, 'ipynb')
yaml_csv_path = os.path.join(ipynb_path, r'_post_head.csv')
today = datetime.datetime.today()
today = '{}-{:0>2d}-{:0>2d}'.format(today.year, today.month, today.day)
# Read head string from "_post_head.csv"
with open(yaml_csv_path, 'r', encoding="utf8") as f:
hasPost = False
for row in csv.reader(f):
if len(row) == 1: # First line is the default post name
fname = row[0]
continue
if fname == row[1]:
if not os.path.isfile(os.path.join(ipynb_path, '{}.ipynb'.format(fname))):
print('\n\tWarning: "{}.ipynb" doesn\'t exist.\n\n'.format(fname))
exit()
date = row[0]
updatestr = ""
headstr = '---\n'
headstr += 'layout: posts\n'
headstr += 'title: {}\n'.format(row[2])
# headstr += 'categories: {}\n'.format(row[3])
# if date != today:
# headstr += 'update: {}\n'.format(today)
# headstr += 'tags: {}\n---\n\n'.format(row[4])
headstr += '---\n\n'
hasPost = True
break
if not hasPost:
print('\n\tError: No record relevant to "{}" in csv file.\n\n'.format(fname))
exit()
ipynb_image_path = os.path.join(ipynb_path, r'{}_files'.format(fname))
destination_path = os.path.join(os.path.join(thepath, 'assets'), 'ipynb-images')
post_path = os.path.join(thepath, r'_posts/{}.md').format(date + '-' + fname)
# Convert ipynb to markdown
os.system('jupyter nbconvert --to markdown ipynb/{}.ipynb'.format(fname))
# Move it to "/_posts" and renameit
shutil.move(os.path.join(ipynb_path, '{}.md'.format(fname)), os.path.join(thepath, r'_posts/{}.md').format(fname))
if os.path.isfile(post_path):
os.remove(post_path)
os.rename(os.path.join(thepath, r'_posts/{}.md').format(fname), post_path)
# Move the images under "/ipynb/<fname>_files" to "/assets/ipynb-images"
def moveallfiles(origindir, destinationdir, filename):
if not os.path.exists(origindir):
return
# Delete all image files which contain "fname" in their filename
for file in os.listdir(destinationdir):
if file[:len(filename)] == filename:
os.remove(os.path.join(destinationdir, file))
for file in os.listdir(origindir):
originfile = os.path.join(origindir, file)
destinationfile = os.path.join(destinationdir, file)
# If it exists, then delete it and then conduct the movement
if os.path.isfile(destinationfile):
os.remove(destinationfile)
shutil.move(originfile, destinationfile)
# Delete the origin image path
shutil.rmtree(ipynb_image_path)
moveallfiles(ipynb_image_path, destination_path, fname)
with open(post_path, 'r', encoding='utf8') as f:
fstr = f.read()
# Replace the image link strings
fstr = re.compile(r'{}_files'.format(fname)).sub(r'https://liyangbit.github.io/assets/ipynb-images', fstr)
fstr = headstr + fstr
# Convert HTML table to markdown table
def transfertable(tablehtml):
tablehtml = re.compile(r'<table>').sub('', tablehtml)
tablehtml = re.compile(r'</tbody>[\n]</table>').sub('', tablehtml)
# Table head
tablehtml = re.compile(r'<tr><th>').sub(r'#', tablehtml)
tablehead = re.compile(r'<thead>[\S\s]*?</thead>').findall(tablehtml)
if tablehead:
tablehead = tablehead[0]
# Headline
col_num = len(re.compile(r'</th>').findall(tablehead))
tablehtml = re.compile(r'<tbody>').sub('|' + ' --- |' * col_num, tablehtml)
headcontent = re.compile(r'(?<=>)[\S]*?(?=</th>)').findall(tablehead)
newhead = '| ' + ' | '.join(headcontent) + ' |'
tablehtml = re.compile(tablehead).sub(newhead, tablehtml)
# First column
firstcol = re.compile(r'(?<=\s)<tr>[\S\s]*?<td>').findall(tablehtml)
for cell in firstcol:
origincell = cell
cell = re.compile(r'<tr><th[^>]*?>').sub('| **', cell)
cell = re.compile(r'</th><td>').sub('** | ', cell)
tablehtml = re.compile('\t' + origincell).sub(cell, tablehtml)
# Table body
tablehtml = re.compile(r'<tr><td>').sub('| ', tablehtml)
tablehtml = re.compile(r'</td></tr>').sub(' |', tablehtml)
tablehtml = re.compile(r'</th><td>').sub(' | ', tablehtml)
tablehtml = re.compile(r'</td><td>').sub(' | ', tablehtml)
# Final Check
tablehtml = re.compile(r'<tbody>').sub("", tablehtml)
return tablehtml
tablehtmllst = re.compile(r'<table>[\s\S]*?</table>').findall(fstr)
if tablehtmllst:
for table in tablehtmllst:
fstr = re.compile(table).sub(transfertable(table), fstr)
os.remove(post_path)
fstr = re.sub(r"\n{5,}", "\n", fstr)
with open(post_path, 'w', encoding='utf8') as f:
f.write(fstr)
| mit | 7,849,797,015,370,449,000 | 37.944 | 114 | 0.604766 | false |
Onirik79/aaritmud | src/config.py | 1 | 40606 | # -*- coding: utf-8 -*-
"""
Modulo che gestisce le configurazioni del gioco.
Viene caricato il file di configurazione passato alla funzione start_mud nel
modulo engine.
"""
#= IMPORT ======================================================================
import ConfigParser
import sys
from src.color import check_colors, colors
from src.log import log
#= COSTANTI ====================================================================
class ConfigOption(object):
def __init__(self, name, section, default, facultative, online, getter, setter, minihelp):
self.name = name
self.section = section
self.default = default
self.facultative = facultative
self.online = online
self.getter = getter
self.setter = setter
self.minihelp = minihelp
if not self.name:
log.bug("(ConfigOption: ?) name non valido: %r" % self.name)
return
if not self.section:
log.bug("(ConfigOption: %s) section non valido: %r" % (self.name, self.section))
if not self.getter:
log.bug("(ConfigOption: %s) getter non valido: %r" % (self.name, self.getter))
if not self.setter:
log.bug("(ConfigOption: %s) setter non valido: %r" % (self.name, self.setter))
if not self.minihelp:
log.bug("(ConfigOption: %s) minihelp non valido: %r" % (self.name, self.minihelp))
#- Fine Inizializzazione -
CONFIG_OPTIONS = (
# name section default facult online, getter setter minihelp
ConfigOption("http_port", "SITE", 0, False, False, "getint", "set", "Porta http con cui i client si collegano al sito. L'opzione non è modificabile online"),
ConfigOption("site_address", "SITE", "http://", False, True, "get", "set", "Indirizzo relativo il sito"),
ConfigOption("allow_web_robots", "SITE", False, False, True, "getboolean", "set", "Indica se lasciare indicizzare o meno le pagine di aarit da parte dei bot dei motori di ricerca"),
ConfigOption("google_analytics_ua", "SITE", "", True, True, "get", "set", "User Application ID per google analytics, se viene inserito verrà creato del codice html nell'header di tutte le pagine web dinamiche, l'opzione è facoltativa"),
ConfigOption("max_google_translate", "SITE", 500, False, True, "getint", "set", "Lunghezza massima gestita dalle api di google translate"),
ConfigOption("max_feedback_len", "SITE", 400, False, True, "getint", "set", "Numero massimo di caratteri inseribili per il feedback riguardo alla compatibilità"),
ConfigOption("max_square_msg_len", "SITE", 100, False, True, "getint", "set", "Lunghezza massima postabile sulla piazzetta, a 0 significa che è disattivata"),
ConfigOption("max_square_messages", "SITE", 100, False, True, "getint", "set", "Numero massimo di messaggi visualizzabili sul sito, se impostato a 0 non mostrerà la piazzetta"),
ConfigOption("sending_interval", "SITE", 5, False, True, "getint", "set", "Secondi di attesa tra un send di nota, messaggio, post ed un altro"),
ConfigOption("text_color", "SITE", "silver", False, True, "get", "set", "Serve ad evitare di inviare codici di colore laddove non serve"),
ConfigOption("game_name", "SERVER", "Mud", False, True, "get", "set", "Nome del gioco"),
ConfigOption("server_name", "SERVER", "Server", False, True, "get", "set", "Nome del server che ospita il gioco"),
ConfigOption("engine_name", "SERVER", "Engine", False, True, "get", "set", "Nome del motore del gioco"),
ConfigOption("engine_version", "SERVER", "0.0", False, True, "get", "set", "Versione del motore del gioco"),
ConfigOption("staff_name", "SERVER", "Staff", False, True, "get", "set", "Nome dello staff del gioco"),
ConfigOption("motto", "SERVER", "Huzza!", False, True, "get", "set", "Motto o frase d'effetto per il gioco"),
ConfigOption("news_to_show", "SERVER", 5, False, True, "getint", "set", "Numero di novità da visualizzare nella homepage e quantità inviata ad ogni richiesta di visualizzazione delle novità più vecchie"),
ConfigOption("allow_player_gaming", "SERVER", True, False, True, "getboolean", "set", "Indica se è permesso far entrare i giocatori nel gioco"),
ConfigOption("save_persistence", "SERVER", True, False, True, "getboolean", "set", "Salva i dati che servono per mantenere la persistenza del mondo del Mud"),
ConfigOption("compression_mode", "SERVER", "bz2", False, True, "get", "set", "Tipologia di archiviazione dei backup creati da aarit"),
ConfigOption("max_output_buffer", "SERVER", 128000, False, True, "getint", "set", "Indica il limite massimo in Kb di buffer di output da inviare al giocatore, una volta sforato tale limite la connessione al client viene chiusa (valori tra 64000 a 128000)"),
ConfigOption("max_execution_time", "SERVER", 0.04, False, True, "getfloat", "set", "Indica il tempo massimo in secondi dell'esecuzione di un comando (al di là del quale le deferred automatiche di group_entities nel metodo split_entity potrebbero fare più danno di quanto non ne facciano normalmente)"),
ConfigOption("mail_on_enter_in_game", "MAIL", True, False, True, "getboolean", "set", "Avvisa se qualche giocatore non admin entra in gioco"),
ConfigOption("email", "MAIL", "?@?.?", False, True, "getemail", "set", "Indirizzo a cui vengono inviate le mail da parte dei giocatori"),
ConfigOption("smtp_host", "MAIL", "smpt.?", False, True, "get", "set", "SMTP con cui inviare le mail"),
ConfigOption("smtp_email", "MAIL", "?@?.?", False, True, "getemail", "set", "Email utilizzata per l'invio delle mail"),
ConfigOption("min_len_name", "GAME", 3, False, True, "getint", "set", "Lunghezza minima per un nome di account o di personaggio"),
ConfigOption("max_len_name", "GAME", 14, False, True, "getint", "set", "Lunghezza massima per un nome di account o di personaggio"),
ConfigOption("min_len_password", "GAME", 6, False, True, "getint", "set", "Lunghezza minima per una password"),
ConfigOption("max_len_password", "GAME", 24, False, True, "getint", "set", "Lunghezza massima per la password"),
ConfigOption("max_aliases", "GAME", 100, False, True, "getint", "set", "Numero massimo di alias creati per personaggio"),
ConfigOption("max_macros", "GAME", 100, False, True, "getint", "set", "Numero massimo di macro creati per personaggio"),
ConfigOption("max_account_players", "GAME", 30, False, True, "getint", "set", "Numero massimo di personaggi creabili in un account"), # (TD) da togliere la limitazione grazie al sistema dell'immaginetta di conferma del codice
ConfigOption("max_account_bugs", "GAME", 1000, False, True, "getint", "set", "Numero massimo di bachi segnalabili"),
ConfigOption("max_account_comments", "GAME", 1000, False, True, "getint", "set", "Numero massimo di commenti segnalabili"),
ConfigOption("max_account_typos", "GAME", 1000, False, True, "getint", "set", "Numero massimo di typo segnalabili"),
ConfigOption("max_account_ideas", "GAME", 1000, False, True, "getint", "set", "Numero massimo di idee segnalabili"),
ConfigOption("max_level", "GAME", 200, False, True, "getint", "set", "Livello massimo consentito dal gioco"),
ConfigOption("max_stat_value", "GAME", 100, False, True, "getint", "set", "Valore massimo per gli attributi come forza, velocità, etc"),
ConfigOption("max_skill_value", "GAME", 100, False, True, "getint", "set", "Valore massimo imparabile per le skill"),
ConfigOption("clumsy_value", "GAME", -100, False, True, "getint", "set", "Valore limite prima di considerare un lancio di dadi per una skill maldestra"),
ConfigOption("failure_value", "GAME", 50, False, True, "getint", "set", "Valore limite prima di considerare un lancio di dadi per una skill fallito"),
ConfigOption("success_value", "GAME", 200, False, True, "getint", "set", "Valore limite prima di considerare un lancio di dadi per una skill un successo"),
ConfigOption("masterly_value", "GAME", 250, False, True, "getint", "set", "Valore limite prima di considerare un lancio di dadi per una skill un magistrale"),
ConfigOption("starting_points", "GAME", 100, False, True, "getint", "set", "Valore iniziale utilizzato in vari modi dei punti vita, mana e vigore"),
ConfigOption("starting_attrs", "GAME", 30, False, True, "getint", "set", "Valore iniziale utilizzato per gli attributi"),
ConfigOption("min_repop_time", "GAME", 0, False, True, "getint", "set", "Minuti di tempo minimo impostabili per un reset di area"),
ConfigOption("max_repop_time", "GAME", 1440, False, True, "getint", "set", "Minuti di tempo massimo impostabili per un reset di area"),
ConfigOption("max_idle_seconds", "GAME", 900, False, True, "getint", "set", "Secondi di inattività massima prima che il mud esegua una sconnessione forzata"),
ConfigOption("chars_for_smile", "GAME", 8, False, True, "getint", "set", "Numero di caratteri controllati alla fine di una frase detta in cui viene cercato uno smile"),
ConfigOption("gift_on_enter", "GAME", None, True, True, "get", "set", "Entità da regalare ai giocatori che non l'hanno ancora, ogni volta che entrano, l'opzione è facoltativa"),
ConfigOption("initial_destination", "GAME", None, False, True, "get", "set", "Destinazione per i pg che entrano nel gioco"),
ConfigOption("min_secret_arg_len", "GAME", 2, False, True, "getint", "set", "Numero minimo da digitare"),
ConfigOption("max_behaviour_probability", "GAME", 300, False, True, "getint", "set", "Probabilità massima impostabile nelle etichette di behaviour"), # (TD) toglierla e farla a 100% fisso e non al 300% come ora
ConfigOption("purification_rpg_hours", "GAME", 24, False, True, "getint", "set", "Ore rpg prima che un'entità prescelta per la purificazione venga estratta"),
ConfigOption("leveling_restore_points", "GAME", False, False, True, "getboolean", "set", "Se impostato a vero indica che ad ogni livello nuovo guadagnato da un giocatore i punteggi di vita, mana e vigore vengono recuperati totalmente"),
ConfigOption("use_visual_grouping", "GAME", True, False, True, "getboolean", "set", "Se impostato a vero indica che gli oggetti verranno ammucchiati visivamente a seconda che la loro long sia uguale o meno"),
ConfigOption("use_physical_grouping", "GAME", True, False, True, "getboolean", "set", "Se impostato a vero indica che gli oggetti verranno ammucchiati fisicamente a seconda che siano tra di loro equivalenti"),
ConfigOption("currency_jump", "GAME", 1, False, True, "getint", "set", "Indica di quante decine le 4 valute rame, argento, oro e mithril si differenziano una dall'altra: 1, 10, 100 o 1000"),
ConfigOption("persistent_act_seconds", "GAME", 2, False, True, "getint", "set", "Indica quanti secondi durano i messaggi della persistenza dell'azione, cioè i messaggi di act utilizzati come long, valori validi tra 4 e 1"),
ConfigOption("running_step_time", "GAME", 1.0, False, True, "getfloat", "set", "Indica i secondi o i centesimi di secondo minimi tra un comando di movimento ed un altro per considerarlo come corsa, valori validi tra 2.0 e 0.1"),
ConfigOption("dam_plr_vs_plr", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra giocatori contro giocatori"),
ConfigOption("dam_plr_vs_mob", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra giocatori contro mob"),
ConfigOption("dam_plr_vs_item", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra giocatori contro oggetti"),
ConfigOption("dam_mob_vs_plr", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra mob contro giocatori"),
ConfigOption("dam_mob_vs_mob", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra mob contro mob"),
ConfigOption("dam_mob_vs_item", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra mob contro item"),
ConfigOption("dam_item_vs_plr", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra item contro giocatori"),
ConfigOption("dam_item_vs_mob", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra item contro mob"),
ConfigOption("dam_item_vs_item", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del danno inferto tra item contro item"),
ConfigOption("exp_modifier", "GAME", 100, False, True, "getint", "set", "Indica il modificatore in percentuale del guadagno dei punti di esperienza"),
ConfigOption("seconds_in_minute", "TIME", 2, False, True, "getint", "set", "Numero di secondi reali che formano un minuto rpg"),
ConfigOption("minutes_in_hour", "TIME", 60, False, True, "getint", "set", "Numero di minuti rpg in un'ora rpg"),
ConfigOption("hours_in_day", "TIME", 24, False, True, "getint", "set", "Numero delle ore rpg in un giorno rpg"),
ConfigOption("days_in_month", "TIME", 30, False, True, "getint", "set", "Numero dei giorni rpg in un mese rpg"),
ConfigOption("months_in_year", "TIME", 10, False, True, "getint", "set", "Numero dei mesi rpg in un anno rpg"),
ConfigOption("dawn_hour", "TIME", 5, False, True, "getint", "set", "Ora dell'aurora"),
ConfigOption("sunrise_hour", "TIME", 6, False, True, "getint", "set", "Ora dell'alba"),
ConfigOption("noon_hour", "TIME", 12, False, True, "getint", "set", "Ora del mezzogiorno"),
ConfigOption("sunset_hour", "TIME", 18, False, True, "getint", "set", "Ora del tramonto"),
ConfigOption("dusk_hour", "TIME", 19, False, True, "getint", "set", "Ora del crepuscolo"),
ConfigOption("midnight_hour", "TIME", 0, False, True, "getint", "set", "Ora relativa alla mezzanotte"),
ConfigOption("aggressiveness_loop_seconds","TIME", 1, False, True, "getfloat", "set", "Tempo in secondi del ciclo relativo all'invio di messaggi di minaccia (impostabile da 0.1 a 10)"),
ConfigOption("blob_loop_seconds", "TIME", 1, False, True, "getfloat", "set", "Tempo in secondi del ciclo relativo la dinamica dei fluidi (impostabile da 0.1 a 10)"),
ConfigOption("decomposer_loop_seconds", "TIME", 120, False, True, "getfloat", "set", "Tempo in secondi del ciclo relativo alla decomposizione dei cadaveri (impostabile da 12 a 1200)"),
ConfigOption("digestion_loop_seconds", "TIME", 60, False, True, "getfloat", "set", "Tempo in secondi del ciclo relativo alla digestione di cibo ingerito (impostabile da 6 a 600)"),
ConfigOption("fight_loop_seconds", "TIME", 0.1, False, True, "getfloat", "set", "Tempo in secondi del ciclo relativo al combattimento (impostabile da 0.01 a 1)"),
ConfigOption("game_loop_seconds", "TIME", 1, False, True, "getfloat", "set", "Tempo in secondi del ciclo relativo al gioco (impostabile da 0.1 a 10)"),
ConfigOption("maintenance_loop_seconds", "TIME", 60, False, True, "getfloat", "set", "Tempo in secondi del ciclo relativo alla manutenzione (impostabile da 0.1 a 10)"),
ConfigOption("room_behaviour_loop_seconds","TIME", 1, False, True, "getfloat", "set", "Tempo in secondi del ciclo relativo ai behaviour delle stanze (impostabile da 0.1 a 10)"),
ConfigOption("log_accents", "LOG", True, False, True, "getboolean", "set", "Logga gli accenti senza convertirli nei, volgarmente chiamati, 'accenti apostrofati'"),
ConfigOption("log_player_output", "LOG", False, False, True, "getboolean", "set", "Esegue un log, per ogni personaggio, di tutto l'output inviato relativo alla pagina del gioco"),
ConfigOption("print_entity_inputs", "LOG", False, False, True, "getboolean", "set", "Esegue il print su console degli input inviati dagli oggetti e dai mob"),
ConfigOption("track_behaviours", "LOG", True, False, True, "getboolean", "set", "Attiva o meno il sistema di tracking delle esecuzioni dei behaviour"),
ConfigOption("track_triggers", "LOG", True, False, True, "getboolean", "set", "Attiva o meno il sistema di tracking delle esecuzioni dei trigger"),
ConfigOption("reload_web_pages", "DEVELOPMENT", True, False, True, "getboolean", "set", "Ricompila i controller delle pagina web ad ogni loro richiesta, utile per modifiche on the fly senza riavviare il server"),
ConfigOption("reload_commands", "DEVELOPMENT", True, False, True, "getboolean", "set", "Ricompila il modulo del comando ad ogni sua chiamata, utile per modifiche on the fly senza riavviare il server"),
ConfigOption("reload_gamescripts", "DEVELOPMENT", True, False, True, "getboolean", "set", "Ricompila i gamescript, utile se si stanno modificando per test o debug"),
ConfigOption("use_subsequent_resets", "DEVELOPMENT", True, False, True, "getboolean", "set", "Abilita i reset successivi al primo, a volte può essere utile disatibilitarli per test"),
ConfigOption("use_behaviours", "DEVELOPMENT", True, False, True, "getboolean", "set", "Abilita o disabilita tutti i behaviour"),
ConfigOption("use_gamescripts", "DEVELOPMENT", True, False, True, "getboolean", "set", "Abilita o disabilita tutti i mudscripts"),
ConfigOption("use_profiler", "DEVELOPMENT", False, False, True, "getboolean", "set", "Attiva il sistema di profiling per analizzare i colli di bottiglia nel codice"),
ConfigOption("check_references", "DEVELOPMENT", False, False, True, "getboolean", "set", "Attiva o meno un sistema di controllo dei riferimenti di tutte le persistenze"),
ConfigOption("delete_pyc_files", "DEVELOPMENT", True, False, True, "getboolean", "set", "Cancella tutti i file py compilati alla chiusura del gioco per maggiore pulizia, soprattutto tra i file dat"),
ConfigOption("time_warp", "DEVELOPMENT", False, False, True, "getboolean", "set", "Se attivata tutte le deferred scatterrano dopo un secondo invece di attendere il loro naturale decorso di tempi, anche alcuni loop scattaranno il prima possibile (dipende dal loop, alcuni dopo un secondo altri dopo un minuto), questa opzione è utile per testare praticamente in real-time il normale flusso del codice senza dovre aspettare minuti e minuti"))
SUPPORTED_COMPRESSIONS = ("tar", "gz", "bz2")
#= CLASSI ======================================================================
class Config(ConfigParser.SafeConfigParser):
"""
Classe la cui variabile singleton è inizializzata a fine modulo.
"""
ready = False
filename = ""
def check_option_names(self):
try:
config_file = open(self.filename, "r")
except IOError:
log.bug("Impossibile aprire il file %s in lettura" % self.filename)
return
for line in config_file:
if not line.strip():
continue
if line[0] == "#":
continue
if "=" not in line:
continue
name = line.split("=")[0].strip()
for option in CONFIG_OPTIONS:
if option.name == name:
break
else:
log.bug("Non è stata trovata nessuna opzione dal nome %s nel file di configurazione %s" % (name, self.filename))
continue
#- Fine Metodo -
def load(self, filename):
self.filename = filename
self.check_option_names()
ConfigParser.SafeConfigParser.read(self, filename)
for option in CONFIG_OPTIONS:
if hasattr(self, option.name):
log.bug("L'opzione di config %s è già stata impostata precedentemente alla sezione %s" % (option.name, option.section))
continue
if not hasattr(self, option.getter):
log.bug("L'opzione di config %s non possiede il getter %s" % (option.name, option.getter))
setattr(self, option.name, option.default)
continue
getter = getattr(self, option.getter)
try:
value = getter(option.section, option.name)
except ConfigParser.NoOptionError:
if not option.facultative:
log.bug("Opzione %s mancante nel file %s, verrà caricata con il suo valore di default: %s" % (option.name, filename, option.default))
setattr(self, option.name, option.default)
else:
setattr(self, option.name, value)
self.ready = True
#- Fine Metodo -
def save(self):
for option in reversed(CONFIG_OPTIONS):
value = getattr(self, option.name)
if not value and option.getter == "get" and not option.facultative:
log.bug("valore dell'opzione di config %s non valida e non opzionale: %s" % (option.name, value))
if not hasattr(self, option.setter):
log.bug("L'opzione di config %s non possiede il setter %s" % (option.name, option.getter))
continue
setter = getattr(self, option.setter)
setter(option.section, option.name, value)
try:
config_file = open(self.filename, "w")
except IOError:
log.bug("Impossibile aprire il file %s in scrittura" % self.filename)
return
ConfigParser.SafeConfigParser.write(self, config_file)
config_file.close()
#- Fine Metodo -
def finalize(self):
# Converte la stringa ricavata per la prima destinazione in
# oggetto-destinazione vero e proprio
from room import Destination
destination = Destination()
destination.fread_the_line(None, self.initial_destination, "self.initial_destination")
from src.database import database
if destination.area in database["areas"]:
destination.area = database["areas"][destination.area]
else:
log.bug("Codice d'area %s inesistente nel file %s (sarà impossibile per i pg entrare in gioco)" % (
destination.area, self.filename))
self.initial_destination = destination
first_room = self.initial_destination.get_room()
if not first_room:
log.bug("initial_destination impostata nel file %s non punta ad una stanza valida: %r (first_room: %r)" % (
self.filename, self.initial_destination, first_room))
sys.exit(1)
return
# ---------------------------------------------------------------------
# Recupera il riferimento dell'eventuale dono da inserire in gioco
if self.gift_on_enter == "None":
self.gift_on_enter = None
elif self.gift_on_enter:
if "_item_" in self.gift_on_enter:
table_name = "proto_items"
else:
table_name = "proto_mobs"
if self.gift_on_enter in database[table_name]:
self.gift_on_enter = database[table_name][self.gift_on_enter]
else:
log.bug("Non è stata trovata nessuna entità di prototipo con codice %s nel database %s" % (
self.gift_on_enter, table_name))
#- Fine Metodo -
def iter_all_error_messages(self):
msgs = []
if not self.game_name:
yield "game_name è vuoto"
if self.game_name and not check_colors(self.game_name):
yield "game_name contiene dei stili errati: %s" % self.game_name
if not self.site_address:
yield "site_address è vuoto"
if self.site_address and not self.site_address.startswith("http://"):
yield "site_address non inizia con 'http://'"
if self.site_address and not self.site_address[-1].isalnum():
yield "site_address non finisce con un numero o con una stringa, dev'essere valido per poterci eventualmente concatenare la porta: %r" % self.site_address
if self.http_port < 1 or self.http_port > 65535:
yield "http_port è un valore errato: %d" % self.http_port
if not self.text_color:
yield "text_color non è valido: %r" % self.text_color
if self.text_color and not self.text_color.lower():
yield "text_color non ha tutti i caratteri minuscoli: %s" % self.text_color
if self.text_color and not self.text_color in colors:
yield "text_color non si trova tra i nomi dei colori: %s" % self.text_color
if not self.email:
yield "email mancante per poter inviare mail tramite l'smtp: %r" % self.email
if not self.smtp_email:
yield "smtp_email mancante per poter inviare mail tramite l'smtp: %r" % self.smtp_email
if not self.smtp_host:
yield "smtp_host non è un valore valido: %r" % self.smtp_host
if self.min_len_name < 2:
yield "min_len_name è minore di 2: %d" % self.min_len_name
if self.max_len_name < 3:
yield "max_len_name è minore di 3: %d" % self.max_len_name
if self.min_len_name >= self.max_len_name:
yield "min_len_name %d supera quella di max_len_name %d" % (self.min_len_name, self.max_len_name)
if self.min_len_password < 5:
yield "min_len_password è minore di 5: %d" % self.min_len_password
if self.max_len_password < 5:
yield "max_len_password è minore di 5: %d" % self.max_len_password
if self.min_len_password >= self.max_len_password:
yield "min_len_password %d supera quella di max_len_password %d" % (self.min_len_password, self.max_len_password)
if self.max_aliases < 0:
yield "max_aliases non può essere negativo: %d" % self.max_aliases
if self.max_macros < 0:
yield "max_macros non può essere negativo: %d" % self.max_macros
if self.max_account_players <= 0:
yield "max_account_players non può essere negativo: %d" % self.max_account_players
if self.max_account_bugs < 0:
yield "max_account_bugs non può essere negativo: %d" % self.max_account_bugs
if self.max_account_typos < 0:
yield "max_account_typos non può essere negativo: %d" % self.max_account_typos
if self.max_account_ideas < 0:
yield "max_account_ideas non può essere negativo: %d" % self.max_account_ideas
if self.sending_interval < 1 or self.sending_interval > 60:
yield "sending_interval è un valore troppo basso o troppo alto: %d" % self.sending_interval
if self.max_level < 50 or self.max_level > 1000:
yield "max_level non è un valore valido: %d (meglio che rimanga tra 50 e 1000)" % self.max_level
if self.max_stat_value < 50 or self.max_stat_value > 200:
yield "max_stat_value non è un valore valido: %d (meglio che rimanga tra 50 e 200)" % self.max_stat_value
if self.max_skill_value < 50 or self.max_skill_value > 200:
yield "max_skill_value non è un valore valido: %d (meglio che rimanga tra 50 e 200)" % self.max_skill_value
if self.clumsy_value < -300 or self.clumsy_value > 300:
yield "clumsy_value non è un valore valido: %d (meglio che rimanga tra -300 e 300)" % self.clumsy_value
if self.failure_value < -300 or self.failure_value > 300:
yield "failure_value non è un valore valido: %d (meglio che rimanga tra -300 e 300)" % self.failure_value
if self.success_value < -300 or self.success_value > 300:
yield "success_value non è un valore valido: %d (meglio che rimanga tra -300 e 300)" % self.success_value
if self.masterly_value < -300 or self.masterly_value > 300:
yield "masterly_value non è un valore valido: %d (meglio che rimanga tra -300 e 300)" % self.masterly_value
if self.starting_points < 10 or self.starting_points > 1000:
yield "starting_points non è valido: %d (meglio che rimanga tra 10 e 1000)" % self.starting_points
if self.starting_attrs < 20 or self.starting_attrs > 50:
yield "starting_attrs non è valido: %d (meglio che rimanga tra 20 e 50)" % self.starting_attrs
if self.min_repop_time < 0:
yield "min_repop_time è un valore minore di zero: %d" % self.min_repop_time
if self.max_repop_time < 0 or self.max_repop_time < self.min_repop_time:
yield "max_repop_time è un valore minore di zero o minore di min_repop_time: %d" % self.max_repop_time
if self.max_idle_seconds < 60 * 5 or self.max_idle_seconds > 60 * 60:
yield "max_idle_seconds non è un valore tra 5 minuti e un'ora: %d" % self.max_idle_seconds
if self.chars_for_smile < 0:
yield "chars_for_smile non può essere negativo: %d" % self.chars_for_smile
if self.initial_destination.get_error_message() != "":
yield self.initial_destination.get_error_message()
if not self.compression_mode in SUPPORTED_COMPRESSIONS:
yield "compression_mode è errata: %r" % self.compression_mode
if not self.motto:
yield "motto è un valore non valido: %r" % self.motto
if not self.staff_name:
yield "staff_name è un valore non valido: %r" % self.staff_name
if not self.engine_name:
yield "engine_name è un valore non valido: %r" % self.engine_name
if not self.engine_version:
yield "engine_version è un valore non valido: %r" % self.engine_version
if not self.server_name:
yield "server_name è un valore non valido: %r" % self.server_name
if self.news_to_show < 5 or self.news_to_show > 100:
yield "news_to_show dev'essere tra 5 e 100: %d" % self.news_to_show
if self.max_google_translate < 100:
yield "max_google_translate non è una quantità di caratteri valida: %d" % self.max_google_translate
if self.max_square_msg_len < 32:
yield "max_square_msg_len non è una quantità di caratteri valida: %d" % self.max_square_msg_len
if self.max_square_messages < 10:
yield "max_square_messages non è una quantità di caratteri valida: %d" % self.max_square_messages
if self.max_feedback_len < 64:
yield "max_feedback_len non è una quantità di caratteri valida: %d" % self.max_feedback_len
if self.min_secret_arg_len not in (1, 2, 3):
yield "min_secret_arg_len dev'essere tra 1 e 3 compresi: %d" % self.min_secret_arg_len
if self.max_behaviour_probability < 0 or self.max_behaviour_probability > 1000:
yield "max_behaviour_probability dev'essere un numero tra 0 e 1000 compresi: %d" % self.max_behaviour_probability
if self.purification_rpg_hours < 0 or self.purification_rpg_hours > 720:
yield "purification_rpg_hours dev'essere un numero tra 0 e 720 compresi: %d" % self.purification_rpg_hours
if self.currency_jump not in (1, 10, 100, 1000):
yield "currency_jump dev'essere una decina tra 1 e 1000 compresi: %d" % self.currency_jump
if self.persistent_act_seconds < 1 or self.persistent_act_seconds > 4:
yield "persistent_act_seconds dev'essere tra 1 e 4 compresi: %d" % self.persistent_act_seconds
if self.running_step_time < 0.1 or self.running_step_time > 2.0:
yield "running_step_time dev'essere tra 0.1 e 2.0 compresi: %f" % self.running_step_time
if self.dam_plr_vs_plr < 10 or self.dam_plr_vs_plr > 1000:
yield "dam_plr_vs_plr dev'essere tra 10 e 1000 compresi: %d" % self.dam_plr_vs_plr
if self.dam_plr_vs_mob < 10 or self.dam_plr_vs_mob > 1000:
yield "dam_plr_vs_mob dev'essere tra 10 e 1000 compresi: %d" % self.dam_plr_vs_mob
if self.dam_plr_vs_item < 10 or self.dam_plr_vs_item > 1000:
yield "dam_plr_vs_item dev'essere tra 10 e 1000 compresi: %d" % self.dam_plr_vs_item
if self.dam_mob_vs_plr < 10 or self.dam_mob_vs_plr > 1000:
yield "dam_mob_vs_plr dev'essere tra 10 e 1000 compresi: %d" % self.dam_mob_vs_plr
if self.dam_mob_vs_mob < 10 or self.dam_mob_vs_mob > 1000:
yield "dam_mob_vs_mob dev'essere tra 10 e 1000 compresi: %d" % self.dam_mob_vs_mob
if self.dam_mob_vs_item < 10 or self.dam_mob_vs_item > 1000:
yield "dam_mob_vs_item dev'essere tra 10 e 1000 compresi: %d" % self.dam_mob_vs_item
if self.dam_item_vs_plr < 10 or self.dam_item_vs_plr > 1000:
yield "dam_item_vs_plr dev'essere tra 10 e 1000 compresi: %d" % self.dam_item_vs_plr
if self.dam_item_vs_mob < 10 or self.dam_item_vs_mob > 1000:
yield "dam_item_vs_mob dev'essere tra 10 e 1000 compresi: %d" % self.dam_item_vs_mob
if self.dam_item_vs_item < 10 or self.dam_item_vs_item > 1000:
yield "dam_item_vs_item dev'essere tra 10 e 1000 compresi: %d" % self.dam_item_vs_item
if self.exp_modifier < 10 or self.exp_modifier > 1000:
yield "exp_modifier dev'essere tra 10 e 1000 compresi: %d" % self.exp_modifier
if self.max_output_buffer < 64000 or self.max_output_buffer > 256000:
yield "max_output_buffer dev'essere tra 64000 e 128000: %d" % self.max_output_buffer
if self.max_execution_time < 0.001 or self.max_execution_time > 0.5:
yield "max_execution_time dev'essere tra 0.001 e 0.5: %d" % self.max_execution_time
if self.seconds_in_minute < 1:
yield "seconds_in_minute non può essere minore di 1: %d" % self.seconds_in_minute
if self.minutes_in_hour < 1:
yield "minutes_in_hour non può essere minore di 1: %d" % self.minutes_in_hour
if self.hours_in_day < 1:
yield "hours_in_day non può essere minore di 1: %d" % self.hours_in_day
if self.days_in_month < 1:
yield "days_in_month non può essere minore di 1: %d" % self.days_in_month
if self.months_in_year < 1:
yield "months_in_year non può essere minore di 1: %d" % self.months_in_year
if self.dawn_hour < 0 or self.dawn_hour > self.hours_in_day - 1:
yield "dawn_hour è errata: %d (dev'essere tra 0 e %d)" % (self.dawn_hour, self.hours_in_day - 1)
if self.sunrise_hour < 0 or self.sunrise_hour > self.hours_in_day - 1:
yield "sunrise_hour è errata: %d (dev'essere tra 0 e %d)" % (self.sunrise_hour, self.hours_in_day - 1)
if self.noon_hour < 0 or self.noon_hour > self.hours_in_day - 1:
yield "noon_hour è errata: %d (dev'essere tra 0 e %d)" % (self.noon_hour, self.hours_in_day - 1)
if self.sunset_hour < 0 or self.sunset_hour > self.hours_in_day - 1:
yield "sunset_hour è errata: %d (dev'essere tra 0 e %d)" % (self.sunset_hour, self.hours_in_day - 1)
if self.dusk_hour < 0 or self.dusk_hour > self.hours_in_day - 1:
yield "dusk_hour è errata: %d (dev'essere tra 0 e %d)" % (self.dusk_hour, self.hours_in_day - 1)
if self.midnight_hour < 0 or self.midnight_hour > self.hours_in_day - 1:
yield "midnight_hour è errata: %d (dev'essere tra 0 e %d)" % (self.midnight_hour, self.hours_in_day - 1)
if self.aggressiveness_loop_seconds < 0.1 or self.aggressiveness_loop_seconds > 10:
yield "aggressiveness_loop_seconds è errata: %d (dev'essere tra 0.1 e 10)" % self.aggressiveness_loop_seconds
if self.blob_loop_seconds < 0.1 or self.blob_loop_seconds > 10:
yield "blob_loop_seconds è errata: %d (dev'essere tra 0.1 e 10)" % self.blob_loop_seconds
if self.decomposer_loop_seconds < 12 or self.decomposer_loop_seconds > 1200:
yield "decomposer_loop_seconds è errata: %d (dev'essere tra 12 e 1200)" % self.decomposer_loop_seconds
if self.digestion_loop_seconds < 6 or self.digestion_loop_seconds > 600:
yield "digestion_loop_seconds è errata: %d (dev'essere tra 6 e 600)" % self.digestion_loop_seconds
if self.fight_loop_seconds < 0.01 or self.fight_loop_seconds > 1:
yield "fight_loop_seconds è errata: %d (dev'essere tra 0.01 e 1)" % self.fight_loop_seconds
if self.game_loop_seconds < 0.1 or self.game_loop_seconds > 10:
yield "game_loop_seconds è errata: %d (dev'essere tra 0.1 e 10)" % self.game_loop_seconds
if self.maintenance_loop_seconds < 6 or self.maintenance_loop_seconds > 60:
yield "maintenance_loop_seconds è errata: %d (dev'essere tra 6 e 60)" % self.maintenance_loop_seconds
if self.room_behaviour_loop_seconds < 0.1 or self.room_behaviour_loop_seconds > 10:
yield "room_behaviour_loop_seconds è errata: %d (dev'essere tra 0.1 e 10)" % self.room_behaviour_loop_seconds
#- Fine Metodo -
def get_error_message(self):
messages = list(self.iter_all_error_messages())
if not messages:
return ""
log.bug("(Config: filename %s) %s" % (self.filename, messages[0]))
return messages[0]
#- Fine Metodo -
#- Metodi getter e setter --------------------------------------------------
def getemail(self, section_name, option_name):
return ConfigParser.SafeConfigParser.get(self, section_name, option_name)
#- Fine Metodo -
def set(self, section_name, option_name, value):
# Qui anche le opzioni che hanno entità o altri oggetti (gift_on_enter)
# funzionano senza problemi grazie al metodo __str__
ConfigParser.SafeConfigParser.set(self, section_name, option_name, str(value))
#- Fine Metodo -
#= SINGLETON ===================================================================
config = Config()
| gpl-2.0 | 8,767,262,787,988,087,000 | 82.014344 | 467 | 0.603466 | false |
paulo-romano/google-python-exercises | basic/string1.py | 1 | 3596 | #!/usr/bin/python -tt
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
# Basic string exercises
# Fill in the code for the functions below. main() is already set up
# to call the functions with a few different inputs,
# printing 'OK' when each function is correct.
# The starter code for each function includes a 'return'
# which is just a placeholder for your code.
# It's ok if you do not complete all the functions, and there
# are some additional functions to try in string2.py.
# A. donuts
# Given an int count of a number of donuts, return a string
# of the form 'Number of donuts: <count>', where <count> is the number
# passed in. However, if the count is 10 or more, then use the word 'many'
# instead of the actual count.
# So donuts(5) returns 'Number of donuts: 5'
# and donuts(23) returns 'Number of donuts: many'
def donuts(count):
count = count if count < 10 else 'many'
return 'Number of donuts: {0}'.format(count)
# B. both_ends
# Given a string s, return a string made of the first 2
# and the last 2 chars of the original string,
# so 'spring' yields 'spng'. However, if the string length
# is less than 2, return instead the empty string.
def both_ends(s):
return s[0:2] + s[-2:] if len(s) > 2 else ''
# C. fix_start
# Given a string s, return a string
# where all occurences of its first char have
# been changed to '*', except do not change
# the first char itself.
# e.g. 'babble' yields 'ba**le'
# Assume that the string is length 1 or more.
# Hint: s.replace(stra, strb) returns a version of string s
# where all instances of stra have been replaced by strb.
def fix_start(s):
return s[0] + s[1:].replace(s[0], '*')
# D. MixUp
# Given strings a and b, return a single string with a and b separated
# by a space '<a> <b>', except swap the first 2 chars of each string.
# e.g.
# 'mix', pod' -> 'pox mid'
# 'dog', 'dinner' -> 'dig donner'
# Assume a and b are length 2 or more.
def mix_up(a, b):
x = b[:2] + a[2:]
y = a[:2] + b[2:]
return '{0} {1}'.format(x, y)
# Provided simple test() function used in main() to print
# what each function returns vs. what it's supposed to return.
def test(got, expected):
if got == expected:
prefix = ' OK '
else:
prefix = ' X '
print('%s got: %s expected: %s' % (prefix, repr(got), repr(expected)))
# Provided main() calls the above functions with interesting inputs,
# using test() to check if each result is correct or not.
def main():
print('donuts')
# Each line calls donuts, compares its result to the expected for that call.
test(donuts(4), 'Number of donuts: 4')
test(donuts(9), 'Number of donuts: 9')
test(donuts(10), 'Number of donuts: many')
test(donuts(99), 'Number of donuts: many')
print()
print('both_ends')
test(both_ends('spring'), 'spng')
test(both_ends('Hello'), 'Helo')
test(both_ends('a'), '')
test(both_ends('xyz'), 'xyyz')
print()
print('fix_start')
test(fix_start('babble'), 'ba**le')
test(fix_start('aardvark'), 'a*rdv*rk')
test(fix_start('google'), 'goo*le')
test(fix_start('donut'), 'donut')
print()
print('mix_up')
test(mix_up('mix', 'pod'), 'pox mid')
test(mix_up('dog', 'dinner'), 'dig donner')
test(mix_up('gnash', 'sport'), 'spash gnort')
test(mix_up('pezzy', 'firm'), 'fizzy perm')
# Standard boilerplate to call the main() function.
if __name__ == '__main__':
main()
| apache-2.0 | 76,194,563,181,987,950 | 31.990826 | 80 | 0.651001 | false |
gzamboni/sdnResilience | loxi/of12/action.py | 1 | 32027 | # Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
# Copyright (c) 2011, 2012 Open Networking Foundation
# Copyright (c) 2012, 2013 Big Switch Networks, Inc.
# See the file LICENSE.pyloxi which should have been included in the source distribution
# Automatically generated by LOXI from template module.py
# Do not modify
import struct
import loxi
import util
import loxi.generic_util
import sys
ofp = sys.modules['loxi.of12']
class action(loxi.OFObject):
subtypes = {}
def __init__(self, type=None):
if type != None:
self.type = type
else:
self.type = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 0)
subclass = action.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = action()
obj.type = reader.read("!H")[0]
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.type != other.type: return False
return True
def pretty_print(self, q):
q.text("action {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
class experimenter(action):
subtypes = {}
type = 65535
def __init__(self, experimenter=None, data=None):
if experimenter != None:
self.experimenter = experimenter
else:
self.experimenter = 0
if data != None:
self.data = data
else:
self.data = ''
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(self.data)
length = sum([len(x) for x in packed])
packed.append(loxi.generic_util.pad_to(8, length))
length += len(packed[-1])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 4)
subclass = experimenter.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = experimenter()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.experimenter = reader.read("!L")[0]
obj.data = str(reader.read_all())
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.experimenter != other.experimenter: return False
if self.data != other.data: return False
return True
def pretty_print(self, q):
q.text("experimenter {")
with q.group():
with q.indent(2):
q.breakable()
q.text("data = ");
q.pp(self.data)
q.breakable()
q.text('}')
action.subtypes[65535] = experimenter
class bsn(experimenter):
subtypes = {}
type = 65535
experimenter = 6035143
def __init__(self, subtype=None):
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!L', 8)
subclass = bsn.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = bsn()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
obj.subtype = reader.read("!L")[0]
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("bsn {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
experimenter.subtypes[6035143] = bsn
class bsn_checksum(bsn):
type = 65535
experimenter = 6035143
subtype = 4
def __init__(self, checksum=None):
if checksum != None:
self.checksum = checksum
else:
self.checksum = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(util.pack_checksum_128(self.checksum))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_checksum()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 4)
obj.checksum = util.unpack_checksum_128(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.checksum != other.checksum: return False
return True
def pretty_print(self, q):
q.text("bsn_checksum {")
with q.group():
with q.indent(2):
q.breakable()
q.text("checksum = ");
q.pp(self.checksum)
q.breakable()
q.text('}')
bsn.subtypes[4] = bsn_checksum
class bsn_mirror(bsn):
type = 65535
experimenter = 6035143
subtype = 1
def __init__(self, dest_port=None, vlan_tag=None, copy_stage=None):
if dest_port != None:
self.dest_port = dest_port
else:
self.dest_port = 0
if vlan_tag != None:
self.vlan_tag = vlan_tag
else:
self.vlan_tag = 0
if copy_stage != None:
self.copy_stage = copy_stage
else:
self.copy_stage = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.dest_port))
packed.append(struct.pack("!L", self.vlan_tag))
packed.append(struct.pack("!B", self.copy_stage))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_mirror()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 1)
obj.dest_port = reader.read("!L")[0]
obj.vlan_tag = reader.read("!L")[0]
obj.copy_stage = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.dest_port != other.dest_port: return False
if self.vlan_tag != other.vlan_tag: return False
if self.copy_stage != other.copy_stage: return False
return True
def pretty_print(self, q):
q.text("bsn_mirror {")
with q.group():
with q.indent(2):
q.breakable()
q.text("dest_port = ");
q.text("%#x" % self.dest_port)
q.text(","); q.breakable()
q.text("vlan_tag = ");
q.text("%#x" % self.vlan_tag)
q.text(","); q.breakable()
q.text("copy_stage = ");
q.text("%#x" % self.copy_stage)
q.breakable()
q.text('}')
bsn.subtypes[1] = bsn_mirror
class bsn_set_tunnel_dst(bsn):
type = 65535
experimenter = 6035143
subtype = 2
def __init__(self, dst=None):
if dst != None:
self.dst = dst
else:
self.dst = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!L", self.subtype))
packed.append(struct.pack("!L", self.dst))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = bsn_set_tunnel_dst()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 6035143)
_subtype = reader.read("!L")[0]
assert(_subtype == 2)
obj.dst = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.dst != other.dst: return False
return True
def pretty_print(self, q):
q.text("bsn_set_tunnel_dst {")
with q.group():
with q.indent(2):
q.breakable()
q.text("dst = ");
q.text("%#x" % self.dst)
q.breakable()
q.text('}')
bsn.subtypes[2] = bsn_set_tunnel_dst
class copy_ttl_in(action):
type = 12
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = copy_ttl_in()
_type = reader.read("!H")[0]
assert(_type == 12)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("copy_ttl_in {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[12] = copy_ttl_in
class copy_ttl_out(action):
type = 11
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = copy_ttl_out()
_type = reader.read("!H")[0]
assert(_type == 11)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("copy_ttl_out {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[11] = copy_ttl_out
class dec_mpls_ttl(action):
type = 16
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = dec_mpls_ttl()
_type = reader.read("!H")[0]
assert(_type == 16)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("dec_mpls_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[16] = dec_mpls_ttl
class dec_nw_ttl(action):
type = 24
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = dec_nw_ttl()
_type = reader.read("!H")[0]
assert(_type == 24)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("dec_nw_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[24] = dec_nw_ttl
class group(action):
type = 22
def __init__(self, group_id=None):
if group_id != None:
self.group_id = group_id
else:
self.group_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.group_id))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = group()
_type = reader.read("!H")[0]
assert(_type == 22)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.group_id = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.group_id != other.group_id: return False
return True
def pretty_print(self, q):
q.text("group {")
with q.group():
with q.indent(2):
q.breakable()
q.text("group_id = ");
q.text("%#x" % self.group_id)
q.breakable()
q.text('}')
action.subtypes[22] = group
class nicira(experimenter):
subtypes = {}
type = 65535
experimenter = 8992
def __init__(self, subtype=None):
if subtype != None:
self.subtype = subtype
else:
self.subtype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!H", self.subtype))
packed.append('\x00' * 2)
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
subtype, = reader.peek('!H', 8)
subclass = nicira.subtypes.get(subtype)
if subclass:
return subclass.unpack(reader)
obj = nicira()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 8992)
obj.subtype = reader.read("!H")[0]
reader.skip(2)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.subtype != other.subtype: return False
return True
def pretty_print(self, q):
q.text("nicira {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
experimenter.subtypes[8992] = nicira
class nicira_dec_ttl(nicira):
type = 65535
experimenter = 8992
subtype = 18
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.experimenter))
packed.append(struct.pack("!H", self.subtype))
packed.append('\x00' * 2)
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = nicira_dec_ttl()
_type = reader.read("!H")[0]
assert(_type == 65535)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
_experimenter = reader.read("!L")[0]
assert(_experimenter == 8992)
_subtype = reader.read("!H")[0]
assert(_subtype == 18)
reader.skip(2)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("nicira_dec_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
nicira.subtypes[18] = nicira_dec_ttl
class output(action):
type = 0
def __init__(self, port=None, max_len=None):
if port != None:
self.port = port
else:
self.port = 0
if max_len != None:
self.max_len = max_len
else:
self.max_len = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(util.pack_port_no(self.port))
packed.append(struct.pack("!H", self.max_len))
packed.append('\x00' * 6)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = output()
_type = reader.read("!H")[0]
assert(_type == 0)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.port = util.unpack_port_no(reader)
obj.max_len = reader.read("!H")[0]
reader.skip(6)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.port != other.port: return False
if self.max_len != other.max_len: return False
return True
def pretty_print(self, q):
q.text("output {")
with q.group():
with q.indent(2):
q.breakable()
q.text("port = ");
q.text(util.pretty_port(self.port))
q.text(","); q.breakable()
q.text("max_len = ");
q.text("%#x" % self.max_len)
q.breakable()
q.text('}')
action.subtypes[0] = output
class pop_mpls(action):
type = 20
def __init__(self, ethertype=None):
if ethertype != None:
self.ethertype = ethertype
else:
self.ethertype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!H", self.ethertype))
packed.append('\x00' * 2)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = pop_mpls()
_type = reader.read("!H")[0]
assert(_type == 20)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.ethertype = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.ethertype != other.ethertype: return False
return True
def pretty_print(self, q):
q.text("pop_mpls {")
with q.group():
with q.indent(2):
q.breakable()
q.text("ethertype = ");
q.text("%#x" % self.ethertype)
q.breakable()
q.text('}')
action.subtypes[20] = pop_mpls
class pop_vlan(action):
type = 18
def __init__(self):
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append('\x00' * 4)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = pop_vlan()
_type = reader.read("!H")[0]
assert(_type == 18)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
reader.skip(4)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
return True
def pretty_print(self, q):
q.text("pop_vlan {")
with q.group():
with q.indent(2):
q.breakable()
q.breakable()
q.text('}')
action.subtypes[18] = pop_vlan
class push_mpls(action):
type = 19
def __init__(self, ethertype=None):
if ethertype != None:
self.ethertype = ethertype
else:
self.ethertype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!H", self.ethertype))
packed.append('\x00' * 2)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = push_mpls()
_type = reader.read("!H")[0]
assert(_type == 19)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.ethertype = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.ethertype != other.ethertype: return False
return True
def pretty_print(self, q):
q.text("push_mpls {")
with q.group():
with q.indent(2):
q.breakable()
q.text("ethertype = ");
q.text("%#x" % self.ethertype)
q.breakable()
q.text('}')
action.subtypes[19] = push_mpls
class push_vlan(action):
type = 17
def __init__(self, ethertype=None):
if ethertype != None:
self.ethertype = ethertype
else:
self.ethertype = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!H", self.ethertype))
packed.append('\x00' * 2)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = push_vlan()
_type = reader.read("!H")[0]
assert(_type == 17)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.ethertype = reader.read("!H")[0]
reader.skip(2)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.ethertype != other.ethertype: return False
return True
def pretty_print(self, q):
q.text("push_vlan {")
with q.group():
with q.indent(2):
q.breakable()
q.text("ethertype = ");
q.text("%#x" % self.ethertype)
q.breakable()
q.text('}')
action.subtypes[17] = push_vlan
class set_field(action):
type = 25
def __init__(self, field=None):
if field != None:
self.field = field
else:
self.field = None
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(self.field.pack())
length = sum([len(x) for x in packed])
packed.append(loxi.generic_util.pad_to(8, length))
length += len(packed[-1])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_field()
_type = reader.read("!H")[0]
assert(_type == 25)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.field = ofp.oxm.oxm.unpack(reader)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.field != other.field: return False
return True
def pretty_print(self, q):
q.text("set_field {")
with q.group():
with q.indent(2):
q.breakable()
q.text("field = ");
q.pp(self.field)
q.breakable()
q.text('}')
action.subtypes[25] = set_field
class set_mpls_ttl(action):
type = 15
def __init__(self, mpls_ttl=None):
if mpls_ttl != None:
self.mpls_ttl = mpls_ttl
else:
self.mpls_ttl = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!B", self.mpls_ttl))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_mpls_ttl()
_type = reader.read("!H")[0]
assert(_type == 15)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.mpls_ttl = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.mpls_ttl != other.mpls_ttl: return False
return True
def pretty_print(self, q):
q.text("set_mpls_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.text("mpls_ttl = ");
q.text("%#x" % self.mpls_ttl)
q.breakable()
q.text('}')
action.subtypes[15] = set_mpls_ttl
class set_nw_ttl(action):
type = 23
def __init__(self, nw_ttl=None):
if nw_ttl != None:
self.nw_ttl = nw_ttl
else:
self.nw_ttl = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!B", self.nw_ttl))
packed.append('\x00' * 3)
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_nw_ttl()
_type = reader.read("!H")[0]
assert(_type == 23)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.nw_ttl = reader.read("!B")[0]
reader.skip(3)
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.nw_ttl != other.nw_ttl: return False
return True
def pretty_print(self, q):
q.text("set_nw_ttl {")
with q.group():
with q.indent(2):
q.breakable()
q.text("nw_ttl = ");
q.text("%#x" % self.nw_ttl)
q.breakable()
q.text('}')
action.subtypes[23] = set_nw_ttl
class set_queue(action):
type = 21
def __init__(self, queue_id=None):
if queue_id != None:
self.queue_id = queue_id
else:
self.queue_id = 0
return
def pack(self):
packed = []
packed.append(struct.pack("!H", self.type))
packed.append(struct.pack("!H", 0)) # placeholder for len at index 1
packed.append(struct.pack("!L", self.queue_id))
length = sum([len(x) for x in packed])
packed[1] = struct.pack("!H", length)
return ''.join(packed)
@staticmethod
def unpack(reader):
obj = set_queue()
_type = reader.read("!H")[0]
assert(_type == 21)
_len = reader.read("!H")[0]
orig_reader = reader
reader = orig_reader.slice(_len, 4)
obj.queue_id = reader.read("!L")[0]
return obj
def __eq__(self, other):
if type(self) != type(other): return False
if self.queue_id != other.queue_id: return False
return True
def pretty_print(self, q):
q.text("set_queue {")
with q.group():
with q.indent(2):
q.breakable()
q.text("queue_id = ");
q.text("%#x" % self.queue_id)
q.breakable()
q.text('}')
action.subtypes[21] = set_queue
| gpl-2.0 | 3,260,647,571,613,940,000 | 27.317418 | 88 | 0.518843 | false |
smallyear/linuxLearn | salt/salt/utils/find.py | 1 | 22291 | # -*- coding: utf-8 -*-
'''
Approximate the Unix find(1) command and return a list of paths that
meet the specified criteria.
The options include match criteria:
name = file-glob # case sensitive
iname = file-glob # case insensitive
regex = file-regex # case sensitive
iregex = file-regex # case insensitive
type = file-types # match any listed type
user = users # match any listed user
group = groups # match any listed group
size = [+-]number[size-unit] # default unit = byte
mtime = interval # modified since date
grep = regex # search file contents
and/or actions:
delete [= file-types] # default type = 'f'
exec = command [arg ...] # where {} is replaced by pathname
print [= print-opts]
and/or depth criteria:
maxdepth = maximum depth to transverse in path
mindepth = minimum depth to transverse before checking files or directories
The default action is 'print=path'.
file-glob:
* = match zero or more chars
? = match any char
[abc] = match a, b, or c
[!abc] or [^abc] = match anything except a, b, and c
[x-y] = match chars x through y
[!x-y] or [^x-y] = match anything except chars x through y
{a,b,c} = match a or b or c
file-regex:
a Python re (regular expression) pattern
file-types: a string of one or more of the following:
a: all file types
b: block device
c: character device
d: directory
p: FIFO (named pipe)
f: plain file
l: symlink
s: socket
users:
a space and/or comma separated list of user names and/or uids
groups:
a space and/or comma separated list of group names and/or gids
size-unit:
b: bytes
k: kilobytes
m: megabytes
g: gigabytes
t: terabytes
interval:
[<num>w] [<num>[d]] [<num>h] [<num>m] [<num>s]
where:
w: week
d: day
h: hour
m: minute
s: second
print-opts: a comma and/or space separated list of one or more of
the following:
group: group name
md5: MD5 digest of file contents
mode: file permissions (as as integer)
mtime: last modification time (as time_t)
name: file basename
path: file absolute path
size: file size in bytes
type: file type
user: user name
'''
# Import python libs
from __future__ import absolute_import, print_function
import logging
import os
import re
import stat
import shutil
import sys
import time
import shlex
from subprocess import Popen, PIPE
try:
import grp
import pwd
# TODO: grp and pwd are both used in the code, we better make sure that
# that code never gets run if importing them does not succeed
except ImportError:
pass
# Import 3rd-party libs
import salt.ext.six as six
# Import salt libs
import salt.utils
import salt.defaults.exitcodes
from salt.utils.filebuffer import BufferedReader
# Set up logger
log = logging.getLogger(__name__)
_REQUIRES_PATH = 1
_REQUIRES_STAT = 2
_REQUIRES_CONTENTS = 4
_FILE_TYPES = {'b': stat.S_IFBLK,
'c': stat.S_IFCHR,
'd': stat.S_IFDIR,
'f': stat.S_IFREG,
'l': stat.S_IFLNK,
'p': stat.S_IFIFO,
's': stat.S_IFSOCK,
stat.S_IFBLK: 'b',
stat.S_IFCHR: 'c',
stat.S_IFDIR: 'd',
stat.S_IFREG: 'f',
stat.S_IFLNK: 'l',
stat.S_IFIFO: 'p',
stat.S_IFSOCK: 's'}
_INTERVAL_REGEX = re.compile(r'''
^\s*
(?P<modifier>[+-]?)
(?: (?P<week> \d+ (?:\.\d*)? ) \s* [wW] )? \s*
(?: (?P<day> \d+ (?:\.\d*)? ) \s* [dD] )? \s*
(?: (?P<hour> \d+ (?:\.\d*)? ) \s* [hH] )? \s*
(?: (?P<minute> \d+ (?:\.\d*)? ) \s* [mM] )? \s*
(?: (?P<second> \d+ (?:\.\d*)? ) \s* [sS] )? \s*
$
''',
flags=re.VERBOSE)
def _parse_interval(value):
'''
Convert an interval string like 1w3d6h into the number of seconds, time
resolution (1 unit of the smallest specified time unit) and the modifier(
'+', '-', or '').
w = week
d = day
h = hour
m = minute
s = second
'''
match = _INTERVAL_REGEX.match(str(value))
if match is None:
raise ValueError('invalid time interval: {0!r}'.format(value))
result = 0
resolution = None
for name, multiplier in [('second', 1),
('minute', 60),
('hour', 60 * 60),
('day', 60 * 60 * 24),
('week', 60 * 60 * 24 * 7)]:
if match.group(name) is not None:
result += float(match.group(name)) * multiplier
if resolution is None:
resolution = multiplier
return result, resolution, match.group('modifier')
def _parse_size(value):
scalar = value.strip()
if scalar.startswith(('-', '+')):
style = scalar[0]
scalar = scalar[1:]
else:
style = '='
if len(scalar) > 0:
multiplier = {'b': 2 ** 0,
'k': 2 ** 10,
'm': 2 ** 20,
'g': 2 ** 30,
't': 2 ** 40}.get(scalar[-1].lower())
if multiplier:
scalar = scalar[:-1].strip()
else:
multiplier = 1
else:
multiplier = 1
try:
num = int(scalar) * multiplier
except ValueError:
try:
num = int(float(scalar) * multiplier)
except ValueError:
raise ValueError('invalid size: "{0}"'.format(value))
if style == '-':
min_size = 0
max_size = num
elif style == '+':
min_size = num
max_size = six.MAXSIZE
else:
min_size = num
max_size = num + multiplier - 1
return min_size, max_size
class Option(object):
'''
Abstract base class for all find options.
'''
def requires(self):
return _REQUIRES_PATH
class NameOption(Option):
'''
Match files with a case-sensitive glob filename pattern.
Note: this is the 'basename' portion of a pathname.
The option name is 'name', e.g. {'name' : '*.txt'}.
'''
def __init__(self, key, value):
self.regex = re.compile(value.replace('.', '\\.')
.replace('?', '.?')
.replace('*', '.*') + '$')
def match(self, dirname, filename, fstat):
return self.regex.match(filename)
class InameOption(Option):
'''
Match files with a case-insensitive glob filename pattern.
Note: this is the 'basename' portion of a pathname.
The option name is 'iname', e.g. {'iname' : '*.TXT'}.
'''
def __init__(self, key, value):
self.regex = re.compile(value.replace('.', '\\.')
.replace('?', '.?')
.replace('*', '.*') + '$',
re.IGNORECASE)
def match(self, dirname, filename, fstat):
return self.regex.match(filename)
class RegexOption(Option):
'''
Match files with a case-sensitive regular expression.
Note: this is the 'basename' portion of a pathname.
The option name is 'regex', e.g. {'regex' : '.*\\.txt'}.
'''
def __init__(self, key, value):
try:
self.regex = re.compile(value)
except re.error:
raise ValueError('invalid regular expression: "{0}"'.format(value))
def match(self, dirname, filename, fstat):
return self.regex.match(filename)
class IregexOption(Option):
'''
Match files with a case-insensitive regular expression.
Note: this is the 'basename' portion of a pathname.
The option name is 'iregex', e.g. {'iregex' : '.*\\.txt'}.
'''
def __init__(self, key, value):
try:
self.regex = re.compile(value, re.IGNORECASE)
except re.error:
raise ValueError('invalid regular expression: "{0}"'.format(value))
def match(self, dirname, filename, fstat):
return self.regex.match(filename)
class TypeOption(Option):
'''
Match files by their file type(s).
The file type(s) are specified as an optionally comma and/or space
separated list of letters.
b = block device
c = character device
d = directory
f = regular (plain) file
l = symbolic link
p = FIFO (named pipe)
s = socket
The option name is 'type', e.g. {'type' : 'd'} or {'type' : 'bc'}.
'''
def __init__(self, key, value):
# remove whitespace and commas
value = "".join(value.strip().replace(',', '').split())
self.ftypes = set()
for ftype in value:
try:
self.ftypes.add(_FILE_TYPES[ftype])
except KeyError:
raise ValueError('invalid file type "{0}"'.format(ftype))
def requires(self):
return _REQUIRES_STAT
def match(self, dirname, filename, fstat):
return stat.S_IFMT(fstat[stat.ST_MODE]) in self.ftypes
class OwnerOption(Option):
'''
Match files by their owner name(s) and/or uid(s), e.g. 'root'.
The names are a space and/or comma separated list of names and/or integers.
A match occurs when the file's uid matches any user specified.
The option name is 'owner', e.g. {'owner' : 'root'}.
'''
def __init__(self, key, value):
self.uids = set()
for name in value.replace(',', ' ').split():
if name.isdigit():
self.uids.add(int(name))
else:
try:
self.uids.add(pwd.getpwnam(value).pw_uid)
except KeyError:
raise ValueError('no such user "{0}"'.format(name))
def requires(self):
return _REQUIRES_STAT
def match(self, dirname, filename, fstat):
return fstat[stat.ST_UID] in self.uids
class GroupOption(Option):
'''
Match files by their group name(s) and/or uid(s), e.g. 'admin'.
The names are a space and/or comma separated list of names and/or integers.
A match occurs when the file's gid matches any group specified.
The option name is 'group', e.g. {'group' : 'admin'}.
'''
def __init__(self, key, value):
self.gids = set()
for name in value.replace(',', ' ').split():
if name.isdigit():
self.gids.add(int(name))
else:
try:
self.gids.add(grp.getgrnam(value).gr_gid)
except KeyError:
raise ValueError('no such group "{0}"'.format(name))
def requires(self):
return _REQUIRES_STAT
def match(self, dirname, filename, fstat):
return fstat[stat.ST_GID] in self.gids
class SizeOption(Option):
'''
Match files by their size.
Prefix the size with '-' to find files the specified size and smaller.
Prefix the size with '+' to find files the specified size and larger.
Without the +/- prefix, match the exact file size.
The size can be suffixed with (case-insensitive) suffixes:
b = bytes
k = kilobytes
m = megabytes
g = gigabytes
t = terabytes
The option name is 'size', e.g. {'size' : '+1G'}.
'''
def __init__(self, key, value):
self.min_size, self.max_size = _parse_size(value)
def requires(self):
return _REQUIRES_STAT
def match(self, dirname, filename, fstat):
return self.min_size <= fstat[stat.ST_SIZE] <= self.max_size
class MtimeOption(Option):
'''
Match files modified since the specified time.
The option name is 'mtime', e.g. {'mtime' : '3d'}.
The value format is [<num>w] [<num>[d]] [<num>h] [<num>m] [<num>s]
where num is an integer or float and the case-insensitive suffixes are:
w = week
d = day
h = hour
m = minute
s = second
Whitespace is ignored in the value.
'''
def __init__(self, key, value):
secs, resolution, modifier = _parse_interval(value)
self.mtime = time.time() - int(secs / resolution) * resolution
self.modifier = modifier
def requires(self):
return _REQUIRES_STAT
def match(self, dirname, filename, fstat):
if self.modifier == '-':
return fstat[stat.ST_MTIME] >= self.mtime
else:
return fstat[stat.ST_MTIME] <= self.mtime
class GrepOption(Option):
'''Match files when a pattern occurs within the file.
The option name is 'grep', e.g. {'grep' : '(foo)|(bar}'}.
'''
def __init__(self, key, value):
try:
self.regex = re.compile(value)
except re.error:
raise ValueError('invalid regular expression: "{0}"'.format(value))
def requires(self):
return _REQUIRES_CONTENTS | _REQUIRES_STAT
def match(self, dirname, filename, fstat):
if not stat.S_ISREG(fstat[stat.ST_MODE]):
return None
dfilename = os.path.join(dirname, filename)
with BufferedReader(dfilename, mode='rb') as bread:
for chunk in bread:
if self.regex.search(chunk):
return dfilename
return None
class PrintOption(Option):
'''
Return information about a matched file.
Print options are specified as a comma and/or space separated list of
one or more of the following:
group = group name
md5 = MD5 digest of file contents
mode = file mode (as integer)
mtime = last modification time (as time_t)
name = file basename
path = file absolute path
size = file size in bytes
type = file type
user = user name
'''
def __init__(self, key, value):
self.need_stat = False
self.print_title = False
self.fmt = []
for arg in value.replace(',', ' ').split():
self.fmt.append(arg)
if arg not in ['name', 'path']:
self.need_stat = True
if len(self.fmt) == 0:
self.fmt.append('path')
def requires(self):
return _REQUIRES_STAT if self.need_stat else _REQUIRES_PATH
def execute(self, fullpath, fstat, test=False):
result = []
for arg in self.fmt:
if arg == 'path':
result.append(fullpath)
elif arg == 'name':
result.append(os.path.basename(fullpath))
elif arg == 'size':
result.append(fstat[stat.ST_SIZE])
elif arg == 'type':
result.append(
_FILE_TYPES.get(stat.S_IFMT(fstat[stat.ST_MODE]), '?')
)
elif arg == 'mode':
result.append(int(oct(fstat[stat.ST_MODE])[-3:]))
elif arg == 'mtime':
result.append(fstat[stat.ST_MTIME])
elif arg == 'user':
uid = fstat[stat.ST_UID]
try:
result.append(pwd.getpwuid(uid).pw_name)
except KeyError:
result.append(uid)
elif arg == 'group':
gid = fstat[stat.ST_GID]
try:
result.append(grp.getgrgid(gid).gr_name)
except KeyError:
result.append(gid)
elif arg == 'md5':
if stat.S_ISREG(fstat[stat.ST_MODE]):
md5digest = salt.utils.get_hash(fullpath, 'md5')
result.append(md5digest)
else:
result.append('')
if len(result) == 1:
return result[0]
else:
return result
class DeleteOption(TypeOption):
'''
Deletes matched file.
Delete options are one or more of the following:
a: all file types
b: block device
c: character device
d: directory
p: FIFO (named pipe)
f: plain file
l: symlink
s: socket
'''
def __init__(self, key, value):
if 'a' in value:
value = 'bcdpfls'
super(self.__class__, self).__init__(key, value)
def execute(self, fullpath, fstat, test=False):
if test:
return fullpath
try:
if os.path.isfile(fullpath) or os.path.islink(fullpath):
os.remove(fullpath)
elif os.path.isdir(fullpath):
shutil.rmtree(fullpath)
except (OSError, IOError) as exc:
return None
return fullpath
class ExecOption(Option):
'''
Execute the given command, {} replaced by filename.
Quote the {} if commands might include whitespace.
'''
def __init__(self, key, value):
self.command = value
def execute(self, fullpath, fstat, test=False):
try:
command = self.command.replace('{}', fullpath)
print(shlex.split(command))
p = Popen(shlex.split(command),
stdout=PIPE,
stderr=PIPE)
(out, err) = p.communicate()
if err:
log.error(
'Error running command: {0}\n\n{1}'.format(
command,
salt.utils.to_str(err)))
return "{0}:\n{1}\n".format(command, salt.utils.to_str(out))
except Exception as e:
log.error(
'Exception while executing command "{0}":\n\n{1}'.format(
command,
e))
return '{0}: Failed'.format(fullpath)
class Finder(object):
def __init__(self, options):
self.actions = []
self.maxdepth = None
self.mindepth = 0
self.test = False
criteria = {_REQUIRES_PATH: list(),
_REQUIRES_STAT: list(),
_REQUIRES_CONTENTS: list()}
if 'mindepth' in options:
self.mindepth = options['mindepth']
del options['mindepth']
if 'maxdepth' in options:
self.maxdepth = options['maxdepth']
del options['maxdepth']
if 'test' in options:
self.test = options['test']
del options['test']
for key, value in six.iteritems(options):
if key.startswith('_'):
# this is a passthrough object, continue
continue
if value is None or len(str(value)) == 0:
raise ValueError('missing value for "{0}" option'.format(key))
try:
obj = globals()[key.title() + "Option"](key, value)
except KeyError:
raise ValueError('invalid option "{0}"'.format(key))
if hasattr(obj, 'match'):
requires = obj.requires()
if requires & _REQUIRES_CONTENTS:
criteria[_REQUIRES_CONTENTS].append(obj)
elif requires & _REQUIRES_STAT:
criteria[_REQUIRES_STAT].append(obj)
else:
criteria[_REQUIRES_PATH].append(obj)
if hasattr(obj, 'execute'):
self.actions.append(obj)
if len(self.actions) == 0:
self.actions.append(PrintOption('print', ''))
# order criteria so that least expensive checks are done first
self.criteria = criteria[_REQUIRES_PATH] + \
criteria[_REQUIRES_STAT] + \
criteria[_REQUIRES_CONTENTS]
def find(self, path):
'''
Generate filenames in path that satisfy criteria specified in
the constructor.
This method is a generator and should be repeatedly called
until there are no more results.
'''
for dirpath, dirs, files in os.walk(path):
depth = dirpath[len(path) + len(os.path.sep):].count(os.path.sep)
if depth == self.maxdepth:
dirs[:] = []
if depth >= self.mindepth:
for name in dirs + files:
fstat = None
matches = True
fullpath = None
for criterion in self.criteria:
if fstat is None and criterion.requires() & _REQUIRES_STAT:
fullpath = os.path.join(dirpath, name)
fstat = os.stat(fullpath)
if not criterion.match(dirpath, name, fstat):
matches = False
break
if matches:
if fullpath is None:
fullpath = os.path.join(dirpath, name)
for action in self.actions:
if (fstat is None and
action.requires() & _REQUIRES_STAT):
fstat = os.stat(fullpath)
result = action.execute(fullpath, fstat, test=self.test)
if result is not None:
yield result
def find(path, options):
'''
WRITEME
'''
finder = Finder(options)
for path in finder.find(path):
yield path
def _main():
if len(sys.argv) < 2:
sys.stderr.write('usage: {0} path [options]\n'.format(sys.argv[0]))
sys.exit(salt.defaults.exitcodes.EX_USAGE)
path = sys.argv[1]
criteria = {}
for arg in sys.argv[2:]:
key, value = arg.split('=')
criteria[key] = value
try:
finder = Finder(criteria)
except ValueError as ex:
sys.stderr.write('error: {0}\n'.format(ex))
sys.exit(salt.defaults.exitcodes.EX_GENERIC)
for result in finder.find(path):
print(result)
if __name__ == '__main__':
_main()
| apache-2.0 | -1,515,526,065,301,240,800 | 31.165945 | 84 | 0.518864 | false |
Juanlu001/CBC.Solve | cbc/flow/saddlepointsolver.py | 1 | 7125 | __author__ = "Marie E. Rognes"
__copyright__ = "Copyright (C) 2012 Simula Research Laboratory and %s" % __author__
__license__ = "GNU GPL Version 3 or any later version"
# Last changed: 2012-05-01
__all__ = ["TaylorHoodSolver"]
from dolfin import *
from cbc.common.utils import *
from cbc.common import *
class TaylorHoodSolver(CBCSolver):
"""Navier-Stokes solver using a plain saddle point
formulation. This should be ridiculously robust. No boundary
forces allowed."""
def __init__(self, problem):
"Initialize Navier-Stokes solver"
# Initialize base class
CBCSolver.__init__(self)
# Set up parameters
self.parameters = Parameters("solver_parameters")
self.parameters.add("plot_solution", False)
self.parameters.add("save_solution", False)
self.parameters.add("store_solution_data", False)
zero_average_pressure = False
# Get mesh and time step range
mesh = problem.mesh()
dt, t_range = timestep_range_cfl(problem, mesh)
info("Using time step dt = %g" % dt)
# Function spaces
V1 = VectorFunctionSpace(mesh, "CG", 1)
V = VectorFunctionSpace(mesh, "CG", 2)
Q = FunctionSpace(mesh, "CG", 1)
if zero_average_pressure:
R = FunctionSpace(mesh, "R", 0)
W = MixedFunctionSpace([V, Q, R])
else:
W = V*Q
# Coefficients
mu = Constant(problem.viscosity()) # Dynamic viscosity [Ps x s]
rho = Constant(problem.density()) # Density [kg/m^3]
n = FacetNormal(mesh)
k = Constant(dt)
f = problem.body_force(V1)
g = problem.boundary_traction(V1)
w = problem.mesh_velocity(V1)
# If no body forces are specified, assume it is 0
if f == []:
f = Constant((0,)*V1.mesh().geometry().dim())
if g == []:
g = Constant((0,)*V1.mesh().geometry().dim())
# Create boundary conditions
bcu = create_dirichlet_conditions(problem.velocity_dirichlet_values(),
problem.velocity_dirichlet_boundaries(),
W.sub(0))
# Allow this just to be able to set all values directly
bcp = create_dirichlet_conditions(problem.pressure_dirichlet_values(),
problem.pressure_dirichlet_boundaries(),
W.sub(1))
# Create initial conditions
u0 = create_initial_condition(problem.velocity_initial_condition(), V)
u0 = interpolate(u0, V)
p0 = create_initial_condition(problem.pressure_initial_condition(), Q)
p0 = interpolate(p0, Q)
# Create initial function
upr0 = Function(W)
upr0.vector()[:V.dim()] = u0.vector()
upr0.vector()[V.dim():V.dim()+Q.dim()] = p0.vector()
# Create function for solution at previous time
upr_ = Function(W)
upr_.assign(upr0)
if zero_average_pressure:
(u_, p_, r_) = split(upr_)
else:
(u_, p_) = split(upr_)
#u0 = Function(V)
#p0 = Function(Q)
# Test and trial functions
upr = Function(W)
if zero_average_pressure:
(u, p, r) = split(upr)
(v, q, s) = TestFunctions(W)
else:
(u, p) = split(upr)
(v, q) = TestFunctions(W)
u1 = Function(V)
p1 = Function(Q)
# Define Cauchy stress tensor
def sigma(v, p):
return 2.0*mu*sym(grad(v)) - p*Identity(v.cell().d)
# Mixed formulation
U = 0.5*(u_ + u)
F = (rho*(1/k)*inner(u - u_, v)*dx
+ rho*inner(grad(U)*(U - w), v)*dx
+ inner(sigma(U, p), sym(grad(v)))*dx
+ div(U)*q*dx
- inner(f, v)*dx
- inner(g, v)*ds)
if zero_average_pressure:
F += p*s*dx + q*r*dx
# Store variables needed for time-stepping
self.mesh_velocity = w
self.W = W
self.dt = dt
self.k = k
self.t_range = t_range
self.bcu = bcu
self.bcp = bcp
self.f = f
self.g = g
self.upr_ = upr_
self.upr = upr
self.u0 = u0
self.u1 = u1
self.p0 = p0
self.p1 = p1
self.F = F
# Empty file handlers / time series
self.velocity_file = None
self.pressure_file = None
self.velocity_series = None
self.pressure_series = None
# Assemble matrices
self.reassemble()
def solve(self):
"Solve problem and return computed solution (u, p)"
# Time loop
for t in self.t_range:
# Solve for current time step
self.step(self.dt)
# Update
self.update(t)
self._end_time_step(t, self.t_range[-1])
return self.u1, self.p1
def step(self, dt):
"Compute solution for new time step"
# Always do this
self.dt = dt
self.k.assign(dt)
self.reassemble()
# Allow pressure boundary conditions for debugging
bcs = self.bcu
if self.bcp != []:
info_green("Including pressure DirichletBC at your risk")
bcs += self.bcp
# Compute solution
begin("Computing velocity and pressure and multiplier")
solve(self.F == 0, self.upr, bcs)
self.u1.assign(self.upr.split()[0])
self.p1.assign(self.upr.split()[1])
end()
return (self.u1, self.p1)
def update(self, t):
# This is hardly robust
# Update the time on the body force
self.f.t = t
self.g.t = t
# Propagate values
self.upr_.assign(self.upr)
self.u0.assign(self.u1)
self.p0.assign(self.p1)
# Plot solution
if self.parameters["plot_solution"]:
plot(self.p1, title="Pressure", rescale=True)
plot(self.u1, title="Velocity", rescale=True)
# Store solution (for plotting)
if self.parameters["save_solution"]:
if self.velocity_file is None: self.velocity_file = File("velocity.pvd")
if self.pressure_file is None: self.pressure_file = File("pressure.pvd")
self.velocity_file << self.u1
self.pressure_file << self.p1
# Store solution data
if self.parameters["store_solution_data"]:
if self.series is None:
self.series = TimeSeries("velocity-pressure-multiplier")
self.series.store(self.upr.vector(), t)
return self.u1, self.p1
def reassemble(self):
"Reassemble matrices, needed when mesh or time step has changed"
info("(Re)assembling matrices")
info("No action taken here in this solver")
def solution(self):
"Return current solution values"
return self.u1, self.p1
def solution_values(self):
"Return solution values at t_{n-1} and t_n"
return (self.u0, self.u1, self.p0, self.p1)
| gpl-3.0 | 742,052,942,741,142,500 | 30.113537 | 84 | 0.543298 | false |
GaretJax/irco | irco/migrations/env.py | 1 | 2676 | from __future__ import with_statement
from alembic import context
from sqlalchemy import engine_from_config, pool
from logging.config import dictConfig
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
dictConfig({
'version': 1,
'formatters': {
'generic': {
'format': '%(levelname)-5.5s [%(name)s] %(message)s',
'datefmt': '%H:%M:%S',
},
},
'handlers': {
'console': {
'class': 'logging.StreamHandler',
'formatter': 'generic',
}
},
'loggers': {
'root': {
'level': 'WARN',
'handlers': ['console'],
},
'sqlalchemy': {
'level': 'WARN',
'handlers': ['console'],
'qualname': 'sqlalchemy.engine',
},
'alembic': {
'level': 'INFO',
'handlers': ['console'],
'qualname': 'alembic',
},
}
})
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(url=url, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
engine = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool
)
connection = engine.connect()
context.configure(
connection=connection,
target_metadata=target_metadata
)
try:
with context.begin_transaction():
context.run_migrations()
finally:
connection.close()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| mit | 2,895,014,743,879,245,000 | 24.730769 | 69 | 0.610613 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.