commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
1bd52a7019eb0ec47fb3594f2d67f2e36a02b823
|
Fix bug in the test which was causing a test failure under some scenarios when running tests in standalone fashion (so compute test didn't set token value on which test relies on).
|
libcloud/test/loadbalancer/test_brightbox.py
|
libcloud/test/loadbalancer/test_brightbox.py
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.loadbalancer.base import Member, Algorithm
from libcloud.loadbalancer.drivers.brightbox import BrightboxLBDriver
from libcloud.loadbalancer.types import State
from libcloud.test import MockHttp
from libcloud.test.secrets import LB_BRIGHTBOX_PARAMS
from libcloud.test.file_fixtures import LoadBalancerFileFixtures
class BrightboxLBTests(unittest.TestCase):
def setUp(self):
BrightboxLBDriver.connectionCls.conn_class = BrightboxLBMockHttp
BrightboxLBMockHttp.type = None
self.driver = BrightboxLBDriver(*LB_BRIGHTBOX_PARAMS)
def test_list_protocols(self):
protocols = self.driver.list_protocols()
self.assertEqual(len(protocols), 2)
self.assertTrue('tcp' in protocols)
self.assertTrue('http' in protocols)
def test_list_balancers(self):
balancers = self.driver.list_balancers()
self.assertEqual(len(balancers), 1)
self.assertEqual(balancers[0].id, 'lba-1235f')
self.assertEqual(balancers[0].name, 'lb1')
def test_get_balancer(self):
balancer = self.driver.get_balancer(balancer_id='lba-1235f')
self.assertEqual(balancer.id, 'lba-1235f')
self.assertEqual(balancer.name, 'lb1')
self.assertEqual(balancer.state, State.RUNNING)
def test_destroy_balancer(self):
balancer = self.driver.get_balancer(balancer_id='lba-1235f')
self.assertTrue(self.driver.destroy_balancer(balancer))
def test_create_balancer(self):
members = [Member('srv-lv426', None, None)]
balancer = self.driver.create_balancer(name='lb2', port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'lb2')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.state, State.PENDING)
def test_balancer_list_members(self):
balancer = self.driver.get_balancer(balancer_id='lba-1235f')
members = balancer.list_members()
self.assertEqual(len(members), 1)
self.assertEqual(members[0].balancer, balancer)
self.assertEqual('srv-lv426', members[0].id)
def test_balancer_attach_member(self):
balancer = self.driver.get_balancer(balancer_id='lba-1235f')
member = balancer.attach_member(Member('srv-kg983', ip=None,
port=None))
self.assertEqual(member.id, 'srv-kg983')
def test_balancer_detach_member(self):
balancer = self.driver.get_balancer(balancer_id='lba-1235f')
member = Member('srv-lv426', None, None)
self.assertTrue(balancer.detach_member(member))
class BrightboxLBMockHttp(MockHttp):
fixtures = LoadBalancerFileFixtures('brightbox')
def _token(self, method, url, body, headers):
if method == 'POST':
return (httplib.OK, self.fixtures.load('token.json'), {'content-type': 'application/json'},
httplib.responses[httplib.OK])
def _1_0_load_balancers(self, method, url, body, headers):
if method == 'GET':
return (httplib.OK, self.fixtures.load('load_balancers.json'), {'content-type': 'application/json'},
httplib.responses[httplib.OK])
elif method == 'POST':
body = self.fixtures.load('load_balancers_post.json')
return (httplib.ACCEPTED, body, {'content-type': 'application/json'},
httplib.responses[httplib.ACCEPTED])
def _1_0_load_balancers_lba_1235f(self, method, url, body, headers):
if method == 'GET':
body = self.fixtures.load('load_balancers_lba_1235f.json')
return (httplib.OK, body, {'content-type': 'application/json'},
httplib.responses[httplib.OK])
elif method == 'DELETE':
return (httplib.ACCEPTED, '', {'content-type': 'application/json'},
httplib.responses[httplib.ACCEPTED])
def _1_0_load_balancers_lba_1235f_add_nodes(self, method, url, body,
headers):
if method == 'POST':
return (httplib.ACCEPTED, '', {'content-type': 'application/json'},
httplib.responses[httplib.ACCEPTED])
def _1_0_load_balancers_lba_1235f_remove_nodes(self, method, url, body,
headers):
if method == 'POST':
return (httplib.ACCEPTED, '', {'content-type': 'application/json'},
httplib.responses[httplib.ACCEPTED])
if __name__ == "__main__":
sys.exit(unittest.main())
|
Python
| 0 |
@@ -1349,16 +1349,71 @@
= None%0A
+ BrightboxLBDriver.connectionCls.token = 'test'%0A
|
434f565dc7b2b1c037b36096ad10978cfc0eaa49
|
add yaml dump encode utf8
|
lmdo/resolvers/templates_resolver.py
|
lmdo/resolvers/templates_resolver.py
|
import os
import tempfile
import json
import yaml
from lmdo.resolvers import Resolver
from lmdo.convertors.env_var_convertor import EnvVarConvertor
from lmdo.convertors.stack_var_convertor import StackVarConvertor
from lmdo.convertors.nested_template_url_convertor import NestedTemplateUrlConvertor
from lmdo.file_loader import FileLoader
from lmdo.config import FILE_LOADER_TEMPLATE_ALLOWED_EXT
from lmdo.oprint import Oprint
class TemplatesResolver(Resolver):
"""
Resolve templates
"""
YAML_TO = {'!': '^'}
TO_YAML = {'^': '!'}
def __init__(self, template_path, repo_path=None):
if not os.path.isfile(template_path):
Oprint.err('Template not found by given path {}'.format(templated_path), 'cloudformation')
self._template_path = template_path
# Default to project root if not given
self._repo_path = repo_path or './'
self._temp_dir = tempfile.mkdtemp()
def resolve(self):
return self.get_templates()
def get_templates(self):
"""Get all the nested stacks"""
# Create master template
templates = {
"tmp_dir": self._temp_dir,
"master": None,
"children": []
}
def yaml_tmp_ctor(loader, tag_suffix, node):
if tag.suffix.startswith('!'):
return node
master_tpl = FileLoader(file_path=self._template_path, allowed_ext=FILE_LOADER_TEMPLATE_ALLOWED_EXT, yaml_replacements=self.YAML_TO).process()
template_urls = []
for name, resource in master_tpl['Resources'].iteritems():
if resource['Type'] == 'AWS::CloudFormation::Stack':
template_urls.append(resource['Properties']['TemplateURL'])
if template_urls:
for url in template_urls:
if url.startswith('$template'):
header, template_name = url.split("|")
templates['children'].append(self.create_template(template_name))
templates['master'] = self.create_template(self._template_path)
return templates
def create_template(self, template_name):
"""Create shadow template for upload"""
# Setup convertor chain
env_var_convertor = EnvVarConvertor()
stack_var_convertor = StackVarConvertor()
nested_template_convertor = NestedTemplateUrlConvertor()
env_var_convertor.successor = stack_var_convertor
stack_var_convertor.successor = nested_template_convertor
if not os.path.isfile(template_name):
file_path = self.find_template(template_name)
else:
file_path = template_name
if not file_path:
Oprint.err('Cannot find template {} in {}'.format(template_name, self._repo_path))
file_loader = FileLoader(file_path=file_path, allowed_ext=FILE_LOADER_TEMPLATE_ALLOWED_EXT, yaml_replacements=self.YAML_TO)
file_loader.successor = env_var_convertor
result = file_loader.process()
if file_loader.is_yaml():
result = yaml.safe_dump(result, default_flow_style=False)
for key, value in self.TO_YAML.iteritems():
result = result.replace(key, value)
template_name = os.path.basename(file_path)
new_file_path = os.path.join(self._temp_dir, template_name)
with open(new_file_path, 'w+') as f:
f.write(json.dumps(result))
f.close()
return new_file_path
def find_template(self, template_name):
"""Get list of params files"""
findings = []
if os.path.isdir(self._repo_path):
findings = FileLoader.find_files_by_names(search_path=self._repo_path, only_files=[template_name])
# Only return the first found
return findings[0] if findings else None
|
Python
| 0.000002 |
@@ -3158,16 +3158,36 @@
le=False
+, encoding=('utf-8')
)%0A
|
c2ca2932ba6aa29e002317ce7775611a2ed39d42
|
fix template resovler for yaml file
|
lmdo/resolvers/templates_resolver.py
|
lmdo/resolvers/templates_resolver.py
|
import os
import tempfile
import json
from lmdo.resolvers import Resolver
from lmdo.convertors.env_var_convertor import EnvVarConvertor
from lmdo.convertors.stack_var_convertor import StackVarConvertor
from lmdo.convertors.nested_template_url_convertor import NestedTemplateUrlConvertor
from lmdo.file_loader import FileLoader
from lmdo.config import FILE_LOADER_TEMPLATE_ALLOWED_EXT
from lmdo.oprint import Oprint
class TemplatesResolver(Resolver):
"""
Resolve templates
"""
def __init__(self, template_path, repo_path=None):
if not os.path.isfile(template_path):
Oprint.err('Template not found by given path {}'.format(templated_path), 'cloudformation')
self._template_path = template_path
# Default to project root if not given
self._repo_path = repo_path or './'
self._temp_dir = tempfile.mkdtemp()
def resolve(self):
return self.get_templates()
def get_templates(self):
"""Get all the nested stacks"""
# Create master template
templates = {
"tmp_dir": self._temp_dir,
"master": None,
"children": []
}
templates['master'] = self._template_path
with open(templates['master'], 'r') as outfile:
master_tpl = json.loads(outfile.read())
template_urls = []
for name, resource in master_tpl['Resources'].iteritems():
if resource['Type'] == 'AWS::CloudFormation::Stack':
template_urls.append(resource['Properties']['TemplateURL'])
if template_urls:
for url in template_urls:
if url.startswith('$template'):
header, template_name = url.split("|")
templates['children'].append(self.create_template(template_name))
return templates
def create_template(self, template_name):
"""Create shadow template for upload"""
# Setup convertor chain
env_var_convertor = EnvVarConvertor()
stack_var_convertor = StackVarConvertor()
nested_template_convertor = NestedTemplateUrlConvertor()
env_var_convertor.successor = stack_var_convertor
stack_var_convertor.successor = nested_template_convertor
file_path = self.find_template(template_name)
if not file_path:
Oprint.err('Cannot find template {} in {}'.format(template_name, self._repo_path))
file_loader = FileLoader(file_path=file_path, allowed_ext=FILE_LOADER_TEMPLATE_ALLOWED_EXT)
file_loader.successor = env_var_convertor
result = file_loader.process()
template_name = os.path.basename(file_path)
new_file_path = os.path.join(self._temp_dir, template_name)
with open(new_file_path, 'w+') as f:
f.write(unicode(result))
f.close()
return new_file_path
def find_template(self, template_name):
"""Get list of params files"""
findings = []
if os.path.isdir(self._repo_path):
findings = FileLoader.find_files_by_names(search_path=self._repo_path, only_files=[template_name])
print(findings)
# Only return the first found
return findings[0] if findings else None
|
Python
| 0 |
@@ -1171,158 +1171,127 @@
-templates%5B'master'%5D = self._template_path%0A with open(templates%5B'master'%5D, 'r') as outfile:%0A master_tpl = json.loads(outfile.read())%0A
+master_tpl = FileLoader(file_path=self._template_path, allowed_ext=FILE_LOADER_TEMPLATE_ALLOWED_EXT).process()%0A
%0A
@@ -1787,16 +1787,97 @@
e_name))
+%0A %0A templates%5B'master'%5D = self.create_template(self._template_path)
%0A%0A
@@ -2312,62 +2312,175 @@
tor%0A
-%0A file_path = self.find_template(template_name)
+ %0A if not os.path.isfile(template_name):%0A file_path = self.find_template(template_name)%0A else: %0A file_path = template_name%0A
%0A
@@ -3310,32 +3310,8 @@
%0A
- print(findings)%0A
|
4974eda0ffb690368fde95e0b3987bc4dabf9c2b
|
use nonshuffled dataset
|
vpdataset.py
|
vpdataset.py
|
from torchtext import data
import os
import pdb
import random
import math
import re
import torch
class VP(data.Dataset):
"""modeled after Shawn1993 github user's Pytorch implementation of Kim2014 - cnn for text categorization"""
filename = "wilkins_corrected.shuffled.tsv"
@staticmethod
def sort_key(ex):
return len(ex.text)
def __init__(self, text_field, label_field, path=None, examples=None, **kwargs):
"""Create a virtual patient (VP) dataset instance given a path and fields.
Arguments:
text_field: The field that will be used for text data.
label_field: The field that will be used for label data.
path: Path to the data file.
examples: The examples contain all the data.
Remaining keyword arguments: Passed to the constructor of
data.Dataset.
"""
#no preprocessing needed
fields = [('text', text_field), ('label', label_field)]
if examples is None:
path = self.dirname if path is None else path
examples = []
with open(os.path.join(path, self.filename)) as f:
lines = f.readlines()
#pdb.set_trace()
for line in lines:
label, text = line.split("\t")
this_example = data.Example.fromlist([text, label], fields)
examples += [this_example]
#assume "target \t source", one instance per line
# print(examples[0].text)
super(VP, self).__init__(examples, fields, **kwargs)
@classmethod
#def splits(cls, text_field, label_field, dev_ratio=.1, shuffle=True ,root='.', **kwargs):
def splits(cls, text_field, label_field, numfolds=10, foldid=None, dev_ratio=.1, shuffle=False, root='.',
num_experts=0, **kwargs):
"""Create dataset objects for splits of the VP dataset.
Arguments:
text_field: The field that will be used for the sentence.
label_field: The field that will be used for label data.
dev_ratio: The ratio that will be used to get split validation dataset.
shuffle: Whether to shuffle the data before split.
root: The root directory that the dataset's zip archive will be
expanded into; therefore the directory in whose trees
subdirectory the data files will be stored.
train: The filename of the train data. Default: 'train.txt'.
Remaining keyword arguments: Passed to the splits method of
Dataset.
"""
#path = cls.download_or_unzip(root)
#examples = cls(text_field, label_field, path=path, **kwargs).examples
examples = cls(text_field, label_field, path=root, **kwargs).examples
if shuffle: random.shuffle(examples)
fields = [('text', text_field), ('label', label_field)]
label_examples = []
label_filename = 'labels.txt'
with open(label_filename) as f:
lines = f.readlines()
# pdb.set_trace()
for line in lines:
label, text = line.split("\t")
this_example = data.Example.fromlist([text, label], fields)
label_examples += [this_example]
if foldid==None:
dev_index = -1 * int(dev_ratio*len(examples))
return (cls(text_field, label_field, examples=examples[:dev_index]),
cls(text_field, label_field, examples=examples[dev_index:]))
else:
#get all folds
fold_size = math.ceil(len(examples)/numfolds)
folds = []
for fold in range(numfolds):
startidx = fold*fold_size
endidx = startidx+fold_size if startidx+fold_size < len(examples) else len(examples)
folds += [examples[startidx:endidx]]
#take all folds except foldid as training/dev
traindev = [fold for idx, fold in enumerate(folds) if idx != foldid]
traindev = [item for sublist in traindev for item in sublist]
dev_index = -1 * int(dev_ratio*len(traindev))
#test will be entire held out section (foldid)
test = folds[foldid]
# print(len(traindev[:dev_index]), 'num_experts', num_experts)
if num_experts > 0:
assert num_experts <= 5
trains = []
devs = []
dev_length = math.floor(len(traindev) * dev_ratio)
# print(dev_length)
for i in range(num_experts):
devs.append(cls(text_field, label_field, examples=traindev[dev_length*i:dev_length*(i+1)]))
trains.append(cls(text_field, label_field, examples=traindev[:dev_length*i]+traindev[dev_length*(i+1):]+label_examples))
return (trains, devs, cls(text_field, label_field, examples=test))
else:
return (cls(text_field, label_field, examples=traindev[:dev_index]+label_examples),
cls(text_field, label_field, examples=traindev[dev_index:]),
cls(text_field, label_field, examples=test))
def clean_str(string):
"""
Tokenization/string cleaning for all datasets except for SST.
"""
string = re.sub("[^A-Za-z0-9(),!?\'\`]", " ", string)
string = re.sub("\'s", " \'s", string)
string = re.sub("\'m", " \'m", string)
string = re.sub("\'ve", " \'ve", string)
string = re.sub("n\'t", " n\'t", string)
string = re.sub("\'re", " \'re", string)
string = re.sub("\'d", " \'d", string)
string = re.sub("\'ll", " \'ll", string)
string = re.sub(",", " , ", string)
string = re.sub("!", " ! ", string)
string = re.sub("\(", " ( ", string)
string = re.sub("\)", " ) ", string)
string = re.sub("\?", " ? ", string)
string = re.sub("\s{2,}", " ", string)
return pad2(string.strip().lower().split(" "))
def pad2(x):
x = ['<pad>', '<pad>', '<pad>', '<pad>'] + x
return x
|
Python
| 0.00001 |
@@ -266,17 +266,8 @@
ted.
-shuffled.
tsv%22
|
93168e36773e9c4c52db7336ccea9ecf9614b12f
|
remove main block
|
src/robot/libraries/ProcessLibrary.py
|
src/robot/libraries/ProcessLibrary.py
|
# Copyright 2008-2013 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals
import subprocess
import tempfile
from robot.utils import ConnectionCache
class ProcessData(object):
def __init__(self, stdout, stderr):
self.stdout = stdout
self.stderr = stderr
class ProcessLibrary(object):
ROBOT_LIBRARY_SCOPE='GLOBAL'
def __init__(self):
self._started_processes = ConnectionCache()
self._logs = dict()
self._tempdir = tempfile.mkdtemp(suffix="processlib")
def run_process(self, command, *args, **conf):
active_process_index = self._started_processes.current_index
try:
p = self.start_new_process(command, *args, **conf)
return self.wait_for_process(p)
finally:
self._started_processes.switch(active_process_index)
def start_new_process(self, command, *args, **conf):
cmd = [command]+[str(i) for i in args]
config = _NewProcessConfig(conf, self._tempdir)
stdout_stream = config.stdout_stream
stderr_stream = config.stderr_stream
pd = ProcessData(stdout_stream.name, stderr_stream.name)
use_shell = config.use_shell
if use_shell and args:
cmd = subprocess.list2cmdline(cmd)
p = subprocess.Popen(cmd, stdout=stdout_stream, stderr=stderr_stream,
shell=use_shell, cwd=config.cwd)
index = self._started_processes.register(p, alias=config.alias)
self._logs[index] = pd
return index
def process_is_alive(self, handle=None):
if handle:
self._started_processes.switch(handle)
return self._started_processes.current.poll() is None
def process_should_be_alive(self, handle=None):
if not self.process_is_alive(handle):
raise AssertionError('Process is not alive')
def process_should_be_dead(self, handle=None):
if self.process_is_alive(handle):
raise AssertionError('Process is alive')
def wait_for_process(self, handle=None):
if handle:
self._started_processes.switch(handle)
exit_code = self._started_processes.current.wait()
logs = self._logs[handle]
return ExecutionResult(logs.stdout, logs.stderr, exit_code)
def kill_process(self, handle=None):
if handle:
self._started_processes.switch(handle)
self._started_processes.current.kill()
def terminate_process(self, handle=None):
if handle:
self._started_processes.switch(handle)
self._started_processes.current.terminate()
def kill_all_processes(self):
for handle in range(len(self._started_processes._connections)):
if self.process_is_alive(handle):
self.kill_process(handle)
def get_process_id(self, handle=None):
if handle:
self._started_processes.switch(handle)
return self._started_processes.current.pid
def input_to_process(self, handle, msg):
if not msg:
return
alog = self._logs[handle]
self._started_processes.switch(handle)
self._started_processes.current.wait()
with open(alog.stdout,'a') as f:
f.write(msg.encode('UTF-8'))
def switch_active_process(self, handle):
self._started_processes.switch(handle)
class ExecutionResult(object):
_stdout = _stderr = None
def __init__(self, stdout_name, stderr_name, exit_code=None):
self._stdout_name = stdout_name
self._stderr_name = stderr_name
self.exit_code = exit_code
@property
def stdout(self):
if self._stdout is None:
with open(self._stdout_name,'r') as f:
self._stdout = f.read()
return self._stdout
@property
def stderr(self):
if self._stderr is None:
with open(self._stderr_name,'r') as f:
self._stderr = f.read()
return self._stderr
if __name__ == '__main__':
r = ProcessLibrary().run_process('python', '-c', "print \'hello\'")
print repr(r.stdout)
class _NewProcessConfig(object):
def __init__(self, conf, tempdir):
self._tempdir = tempdir
self._conf = conf
self.stdout_stream = open(conf['stdout'], 'w') if 'stdout' in conf else self._get_temp_file("stdout")
self.stderr_stream = open(conf['stderr'], 'w') if 'stderr' in conf else self._get_temp_file("stderr")
self.use_shell = (conf.get('shell', 'False') != 'False')
self.cwd = conf.get('cwd', None)
self.alias = conf.get('alias', None)
def _get_temp_file(self, suffix):
return tempfile.NamedTemporaryFile(delete=False,
prefix='tmp_logfile_',
suffix="_%s" % suffix,
dir=self._tempdir)
|
Python
| 0.000096 |
@@ -4550,132 +4550,8 @@
rr%0A%0A
-if __name__ == '__main__':%0A r = ProcessLibrary().run_process('python', '-c', %22print %5C'hello%5C'%22)%0A print repr(r.stdout)%0A
%0Acla
|
4ab211d6dd50c043cacd24db93a6bc64cfdb9ed5
|
update tools/validate_runtests_log.py for pytest
|
tools/validate_runtests_log.py
|
tools/validate_runtests_log.py
|
#!/usr/bin/env python
"""
Take the test runner log output from the stdin, looking for
the magic line nose runner prints when the test run was successful.
In an ideal world, this should be done directly in runtests.py using the
nose API, some failure modes are fooling nose to terminate the python process
with zero exit code, see, eg, https://github.com/scipy/scipy/issues/4736
In short, lapack's xerbla can terminate the process with a fortran level STOP
command, which (i) aborts the py process so that runtests.py does not finish,
and (ii) the exit code is implementation-defined.
Also check that the number of tests run is larger than some baseline number
(taken from the state of the master branch at some random point in time.)
This probably could/should be made less brittle.
"""
from __future__ import print_function
import sys
import re
if __name__ == "__main__":
# full or fast test suite?
try:
testmode = sys.argv[1]
if testmode not in ('fast', 'full'):
raise IndexError
except IndexError:
raise ValueError("Usage: validate.py {full|fast} < logfile.")
# fetch the expected number of tests
# these numbers are for 6abad09
# XXX: this should probably track the commit hash or commit date
expected_size = {'full': 19055,
'fast': 17738}
# read in the log, parse for the nose printout:
# Ran NNN tests in MMMs
# <blank line>
# OK (SKIP=X, KNOWNFAIL=Y) or FAILED (errors=X, failures=Y)
r = re.compile("Ran (?P<num_tests>\d+) tests in (?P<time>\d+\S+)")
status, found_it = False, False
while True:
line = sys.stdin.readline()
if not line:
break
m = r.search(line)
if m:
found_it = True
sys.stdin.readline() # skip the next one
line = sys.stdin.readline()
if "OK" in line:
status = True
break
if found_it:
# did it errored or failed?
if not status:
print("*** Looks like some tests failed.")
sys.exit(-1)
# now check that the number of tests run is reasonable
expected = expected_size[testmode]
actual = int(m.group('num_tests'))
if actual < expected:
print("*** Too few tests: expected %s, run %s" % (expected, actual))
sys.exit(1)
else:
sys.exit(0)
else:
print('*** Test runner validation errored: did the run really finish?')
sys.exit(-1)
|
Python
| 0.000001 |
@@ -1368,20 +1368,22 @@
for the
-nose
+pytest
printou
@@ -1387,126 +1387,116 @@
tout
-:
%0A
-# Ran NNN tests in MMMs%0A # %3Cblank line%3E%0A # OK (SKIP=X, KNOWNFAIL=Y) or FAILED (errors=X, failures=Y
+r1 = re.compile(%22(?P%3Cnum_failed%3E%5Cd+) failed, (?P%3Cnum_passed%3E%5Cd+) passed,.* in (?P%3Ctime%3E%5Cd+%5CS+)%22
)%0A r
+2
= r
@@ -1510,20 +1510,16 @@
le(%22
-Ran
(?P%3Cnum_
test
@@ -1518,24 +1518,29 @@
num_
-tests%3E%5Cd+) tests
+passed%3E%5Cd+) passed,.*
in
@@ -1566,16 +1566,8 @@
%0A
- status,
fou
@@ -1577,15 +1577,8 @@
it =
- False,
Fal
@@ -1684,16 +1684,17 @@
m = r
+1
.search(
@@ -1710,16 +1710,20 @@
if
+not
m:%0A
@@ -1733,249 +1733,261 @@
-found_it = True%0A sys.stdin.read
+m = r2.search(
line
-(
)
+%0A
-# skip the next one%0A line = sys.stdin.readline()%0A if %22OK%22 in line:%0A status = True%0A break%0A%0A if found_it:%0A # did it errored or
+ if m:%0A found_it = True%0A break%0A%0A if found_it:%0A passed = int(m.group('num_passed'))%0A try:%0A failed = int(m.group('num_failed'))%0A except IndexError:%0A
failed
-?
+ = 0%0A
%0A
@@ -1998,18 +1998,14 @@
if
-not status
+failed
:%0A
@@ -2210,33 +2210,23 @@
l =
-int(m.group('num_tests'))
+passed + failed
%0A
|
1e819b6a003ac1c5094f08146a4c364d4112a9ef
|
Fix import
|
calvinextras/calvinsys/io/dht11temphumidity/raspberry_pi/DHT11.py
|
calvinextras/calvinsys/io/dht11temphumidity/raspberry_pi/DHT11.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2017 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from calvin.runtime.south.calvinsys.io.dht11temphumidity.BaseDHT11 import BaseDHT11
from calvin.utilities.calvinlogger import get_logger
from calvin.runtime.south.plugins.async import async
import pigpio
# FIXME: Allow faster operation by queing results and don't wait for readout
# before resetting _in_progress flag?
_log = get_logger(__name__)
class DHT11(BaseDHT11):
"""
Calvinsys object handling DHT11 temperature and humidity sensor
The temparature readout is not used.
"""
def init(self, pin, **kwargs):
self._pin = pin
self._in_progress = False
self._humidity = None
self._gpio = pigpio.pi()
self._gpio.set_pull_up_down(self._pin, pigpio.PUD_OFF)
self._gpio.set_mode(self._pin, pigpio.INPUT)
self._read_data_handle = None
self._edge_ticks = None
self._listen_timeout = 1.0
self._listen_in_progress = None
self._humidity_last_reading = 0.0
def can_write(self):
return self._in_progress is False
def write(self, measure):
# self._in_progress = async.DelayedCall(0.0, self._read_temp)
# 0 Switch to OUT ___ 18000 ___
# 1. Pull down for 20ms trigger \___ //___/
# 2. Switch to IN ___ 80 ____
# 3. Wait for 80us ack \____/
# 4. Read 40 bits ___ 50 ____________ 50 ____ ....
# Format is \____/ 27=0, 70=1 \____/
# +----+------------+
# and stop bit ___ 50 ___ ....
# \____/
# (just read 41 falling edges...)
self._in_progress = True
self._gpio.set_mode(self._pin, pigpio.OUTPUT)
self._gpio.write(self._pin, 0)
async.DelayedCall(0.025, self._switch_to_listen_cb)
self._listen_in_progress = async.DelayedCall(self._listen_timeout, self._listen_failed)
def _switch_to_listen_cb(self):
self._gpio.write(self._pin, 1)
self._edge_ticks = []
self._gpio.set_mode(self._pin, pigpio.INPUT)
self._read_data_handle = self._gpio.callback(self._pin, pigpio.FALLING_EDGE, self._read_data_cb)
def _listen_failed(self):
_log.info("DHT11 read timeout, returning {}".format(self._humidity_last_reading))
self._read_data_handle.cancel()
self._read_data_handle = None
self._humidity = self._humidity_last_reading
self.scheduler_wakeup()
def _read_data_cb(self, pin, edge, tick):
self._edge_ticks.append(tick)
if len(self._edge_ticks) < 41:
return
async.call_from_thread(self._listen_in_progress.cancel)
self._read_data_handle.cancel()
self._read_data_handle = None
self._parse_ticks()
def _parse_ticks(self):
res = []
t0 = self._edge_ticks.pop(0)
for t in self._edge_ticks:
res.append("1" if t-t0 > 99 else "0")
t0 = t
longbin = ''.join(res)
rhint = int(longbin[0:8], 2)
# rhdec = int(longbin[8:16], 2) # Decimal part is always 0, see DHT11 docs (resolution)
tint = int(longbin[16:24], 2)
# tdec = int(longbin[24:32], 2) # Decimal part is always 0, see DHT11 docs (resolution)
chksum = int(longbin[32:40], 2)
bytesum = rhint + tint
# print "RH={}.{}, T={}.{}, CS={}, BS={}, OK={}".format(rhint, 0, tint, 0, chksum, bytesum, chksum == bytesum)
self._humidity = rhint
async.call_from_thread(self.scheduler_wakeup)
def can_read(self):
return self._humidity is not None
def read(self):
self._in_progress = False
humidity = self._humidity
self._humidity_last_reading = humidity
self._humidity = None
return humidity
def close(self):
if self._read_data_handle:
self._read_data_handle.cancel()
del self._gpio
self._gpio = None
|
Python
| 0.000002 |
@@ -609,30 +609,22 @@
m calvin
-.runtime.south
+extras
.calvins
|
ab3088f7413cc605d9cd852d243726e5bcb353da
|
Disable shtest-timeout on Windows
|
utils/lit/tests/shtest-timeout.py
|
utils/lit/tests/shtest-timeout.py
|
# REQUIRES: python-psutil
# FIXME: This test is fragile because it relies on time which can
# be affected by system performance. In particular we are currently
# assuming that `short.py` can be successfully executed within 2
# seconds of wallclock time.
# Test per test timeout using external shell
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: -j 1 -v --debug --timeout 2 --param external=1 > %t.extsh.out 2> %t.extsh.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.extsh.out %s
# RUN: FileCheck --check-prefix=CHECK-EXTSH-ERR < %t.extsh.err %s
#
# CHECK-EXTSH-ERR: Using external shell
# Test per test timeout using internal shell
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: -j 1 -v --debug --timeout 2 --param external=0 > %t.intsh.out 2> %t.intsh.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.intsh.out %s
# RUN: FileCheck --check-prefix=CHECK-INTSH-OUT < %t.intsh.out %s
# RUN: FileCheck --check-prefix=CHECK-INTSH-ERR < %t.intsh.err %s
# CHECK-INTSH-OUT: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-INTSH-OUT: command output:
# CHECK-INTSH-OUT: command reached timeout: True
# CHECK-INTSH-ERR: Using internal shell
# Test per test timeout set via a config file rather than on the command line
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: -j 1 -v --debug --param external=0 \
# RUN: --param set_timeout=2 > %t.cfgset.out 2> %t.cfgset.err
# RUN: FileCheck --check-prefix=CHECK-OUT-COMMON < %t.cfgset.out %s
# RUN: FileCheck --check-prefix=CHECK-CFGSET-ERR < %t.cfgset.err %s
#
# CHECK-CFGSET-ERR: Using internal shell
# CHECK-OUT-COMMON: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-OUT-COMMON: Timeout: Reached timeout of 2 seconds
# CHECK-OUT-COMMON: Command {{([0-9]+ )?}}Output
# CHECK-OUT-COMMON: PASS: per_test_timeout :: short.py
# CHECK-OUT-COMMON: Expected Passes{{ *}}: 1
# CHECK-OUT-COMMON: Individual Timeouts{{ *}}: 1
# Test per test timeout via a config file and on the command line.
# The value set on the command line should override the config file.
# RUN: not %{lit} \
# RUN: %{inputs}/shtest-timeout/infinite_loop.py \
# RUN: %{inputs}/shtest-timeout/short.py \
# RUN: -j 1 -v --debug --param external=0 \
# RUN: --param set_timeout=1 --timeout=2 > %t.cmdover.out 2> %t.cmdover.err
# RUN: FileCheck --check-prefix=CHECK-CMDLINE-OVERRIDE-OUT < %t.cmdover.out %s
# RUN: FileCheck --check-prefix=CHECK-CMDLINE-OVERRIDE-ERR < %t.cmdover.err %s
# CHECK-CMDLINE-OVERRIDE-ERR: Forcing timeout to be 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: TIMEOUT: per_test_timeout :: infinite_loop.py
# CHECK-CMDLINE-OVERRIDE-OUT: Timeout: Reached timeout of 2 seconds
# CHECK-CMDLINE-OVERRIDE-OUT: Command {{([0-9]+ )?}}Output
# CHECK-CMDLINE-OVERRIDE-OUT: PASS: per_test_timeout :: short.py
# CHECK-CMDLINE-OVERRIDE-OUT: Expected Passes{{ *}}: 1
# CHECK-CMDLINE-OVERRIDE-OUT: Individual Timeouts{{ *}}: 1
|
Python
| 0.000429 |
@@ -20,16 +20,44 @@
psutil%0A%0A
+# PR33944%0A# XFAIL: windows%0A%0A
# FIXME:
|
d4cbcfff04738d8fea0070f724540345a04d517a
|
Fix NameError bug: s/Devirtualizer/SpeculativeDevirtualizer/
|
utils/pass-pipeline/src/passes.py
|
utils/pass-pipeline/src/passes.py
|
from pass_pipeline import Pass
# TODO: This should not be hard coded. Create a tool in the compiler that knows
# how to dump the passes and the pipelines themselves.
AADumper = Pass('AADumper')
ABCOpt = Pass('ABCOpt')
AllocBoxToStack = Pass('AllocBoxToStack')
CFGPrinter = Pass('CFGPrinter')
COWArrayOpts = Pass('COWArrayOpts')
CSE = Pass('CSE')
CapturePromotion = Pass('CapturePromotion')
CapturePropagation = Pass('CapturePropagation')
ClosureSpecializer = Pass('ClosureSpecializer')
CodeMotion = Pass('CodeMotion')
CopyForwarding = Pass('CopyForwarding')
DCE = Pass('DCE')
DeadFunctionElimination = Pass('DeadFunctionElimination')
DeadObjectElimination = Pass('DeadObjectElimination')
DefiniteInitialization = Pass('DefiniteInitialization')
Devirtualizer = Pass('SpeculativeDevirtualizer')
DiagnoseUnreachable = Pass('DiagnoseUnreachable')
DiagnosticConstantPropagation = Pass('DiagnosticConstantPropagation')
EarlyInliner = Pass('EarlyInliner')
EmitDFDiagnostics = Pass('EmitDFDiagnostics')
FunctionSignatureOpts = Pass('FunctionSignatureOpts')
GlobalARCOpts = Pass('GlobalARCOpts')
GlobalLoadStoreOpts = Pass('GlobalLoadStoreOpts')
GlobalOpt = Pass('GlobalOpt')
IVInfoPrinter = Pass('IVInfoPrinter')
InOutDeshadowing = Pass('InOutDeshadowing')
InstCount = Pass('InstCount')
LICM = Pass('LICM')
LateInliner = Pass('LateInliner')
LoopInfoPrinter = Pass('LoopInfoPrinter')
LoopRotate = Pass('LoopRotate')
LowerAggregateInstrs = Pass('LowerAggregateInstrs')
MandatoryInlining = Pass('MandatoryInlining')
Mem2Reg = Pass('Mem2Reg')
NoReturnFolding = Pass('NoReturnFolding')
PerfInliner = Pass('PerfInliner')
PerformanceConstantPropagation = Pass('PerformanceConstantPropagation')
PredictableMemoryOptimizations = Pass('PredictableMemoryOptimizations')
SILCleanup = Pass('SILCleanup')
SILCombine = Pass('SILCombine')
SILLinker = Pass('SILLinker')
SROA = Pass('SROA')
SimplifyCFG = Pass('SimplifyCFG')
SplitAllCriticalEdges = Pass('SplitAllCriticalEdges')
SplitNonCondBrCriticalEdges = Pass('SplitNonCondBrCriticalEdges')
StripDebugInfo = Pass('StripDebugInfo')
SwiftArrayOpts = Pass('SwiftArrayOpts')
PASSES = [
AADumper,
ABCOpt,
AllocBoxToStack,
CFGPrinter,
COWArrayOpts,
CSE,
CapturePromotion,
CapturePropagation,
ClosureSpecializer,
CodeMotion,
CopyForwarding,
DCE,
DeadFunctionElimination,
DeadObjectElimination,
DefiniteInitialization,
SpeculativeDevirtualizer,
DiagnoseUnreachable,
DiagnosticConstantPropagation,
EarlyInliner,
EmitDFDiagnostics,
FunctionSignatureOpts,
GlobalARCOpts,
GlobalLoadStoreOpts,
GlobalOpt,
IVInfoPrinter,
InOutDeshadowing,
InstCount,
LICM,
LateInliner,
LoopInfoPrinter,
LoopRotate,
LowerAggregateInstrs,
MandatoryInlining,
Mem2Reg,
NoReturnFolding,
PerfInliner,
PerformanceConstantPropagation,
PredictableMemoryOptimizations,
SILCleanup,
SILCombine,
SILLinker,
SROA,
SimplifyCFG,
SplitAllCriticalEdges,
SplitNonCondBrCriticalEdges,
StripDebugInfo,
SwiftArrayOpts,
]
|
Python
| 0.000001 |
@@ -739,16 +739,27 @@
ation')%0A
+Speculative
Devirtua
|
992b9c46dd432ad409025a3cbaeb1c06f880526c
|
Resolve readline/ncurses dependency when building Lua
|
var/spack/packages/lua/package.py
|
var/spack/packages/lua/package.py
|
from spack import *
import os
class Lua(Package):
""" The Lua programming language interpreter and library """
homepage = "http://www.lua.org"
url = "http://www.lua.org/ftp/lua-5.1.5.tar.gz"
version('5.3.1', '797adacada8d85761c079390ff1d9961')
version('5.3.0', 'a1b0a7e92d0c85bbff7a8d27bf29f8af')
version('5.2.4', '913fdb32207046b273fdb17aad70be13')
version('5.2.3', 'dc7f94ec6ff15c985d2d6ad0f1b35654')
version('5.2.2', 'efbb645e897eae37cad4344ce8b0a614')
version('5.2.1', 'ae08f641b45d737d12d30291a5e5f6e3')
version('5.2.0', 'f1ea831f397214bae8a265995ab1a93e')
version('5.1.5', '2e115fe26e435e33b0d5c022e4490567')
version('5.1.4', 'd0870f2de55d59c1c8419f36e8fac150')
version('5.1.3', 'a70a8dfaa150e047866dc01a46272599')
depends_on('ncurses')
def install(self, spec, prefix):
make('INSTALL_TOP=%s' % prefix,
'MYLDFLAGS="-L%s/lib -Wl,-rpath,%s"' % (spec['ncurses'].prefix,spec['ncurses'].prefix),
'linux')
make('INSTALL_TOP=%s' % prefix,
'MYLDFLAGS="-L%s/lib -Wl,-rpath,%s"' % (spec['ncurses'].prefix,spec['ncurses'].prefix),
'install')
|
Python
| 0 |
@@ -799,16 +799,43 @@
curses')
+%0A depends_on('readline')
%0A%0A de
@@ -933,37 +933,26 @@
AGS=
-%22
-L%25s
-/lib -Wl,-rpath,%25s%22
+ -lncurses
' %25
-(
spec
@@ -973,32 +973,12 @@
efix
-,spec%5B'ncurses'%5D.prefix)
+.lib
,%0A
@@ -1065,37 +1065,26 @@
AGS=
-%22
-L%25s
-/lib -Wl,-rpath,%25s%22
+ -lncurses
' %25
-(
spec
@@ -1105,32 +1105,12 @@
efix
-,spec%5B'ncurses'%5D.prefix)
+.lib
,%0A
|
971a1dd12c11c66ea3a42cddd693e7f20c45846c
|
Adding a blank line
|
mysite/missions/git/controllers.py
|
mysite/missions/git/controllers.py
|
# This file is part of OpenHatch.
# Copyright (C) 2010 Jack Grigg
# Copyright (C) 2010 John Stumpo
# Copyright (C) 2010, 2011 OpenHatch, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from mysite.missions.base.controllers import *
class GitRepository(object):
def __init__(self, username):
self.username = username
self.repo_path = os.path.join(settings.GIT_REPO_PATH, username)
self.file_url = 'file://' + self.repo_path
self.public_url = settings.GIT_REPO_URL_PREFIX + username
def reset(self):
if os.path.isdir(self.repo_path):
shutil.rmtree(self.repo_path)
subprocess.check_call(['git', 'init', self.repo_path])
subprocess.check_call(['git', 'config', 'user.name', '"The Brain"'], cwd=self.repo_path)
subprocess.check_call(['cp', '../../../missions/git/data/hello.py', '.'], cwd=self.repo_path)
subprocess.check_call(['git', 'add', '.'], cwd=self.repo_path)
subprocess.check_call(['git', 'commit', '-m', '"Initial commit"'], cwd=self.repo_path)
# Touch the git-daemon-export-ok file
file_obj = file(os.path.join(self.repo_path, '.git', 'git-daemon-export-ok'), 'w')
file_obj.close()
person = Person.objects.get(user__username=self.username)
def exists(self):
return os.path.isdir(self.repo_path)
class GitDiffMission(object):
@classmethod
def commit_if_ok(cls, username, diff):
repo = GitRepository(username)
commit_diff = subprocess.Popen(['git', 'am'], cwd=repo.repo_path, stdin=subprocess.PIPE)
commit_diff.communicate(str(diff))
if commit_diff.returncode == 0: # for shell commands, success is 0
commit_msg = """Fixed a terrible mistake. Thanks for reporting this %s.
Come to my house for a dinner party.
Knock 3 times and give the secret password: Pinky.""" % username
subprocess.Popen(['git', 'commit', '--allow-empty', '-m', commit_msg], cwd=repo.repo_path)
return True
else:
subprocess.check_call(['git', 'am', '--abort'], cwd=repo.repo_path)
return False
|
Python
| 0.999999 |
@@ -2409,16 +2409,17 @@
his %25s.%0A
+%0A
|
81e9bf3a9cca702af11f0d7ed17d25611b9a1380
|
Fix for creating node instance set
|
vcloud_plugin_common/workflows.py
|
vcloud_plugin_common/workflows.py
|
# Copyright (c) 2015 GigaSpaces Technologies Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cloudify.decorators import workflow
from cloudify.manager import update_node_instance
import cloudify.plugins.workflows as default_workflow
import vcloud_plugin_common
def update(ctx, instance, token, org_url):
"""update token and url in instance"""
node_instance = instance._node_instance
rt_properties = node_instance['runtime_properties']
rt_properties.update({
vcloud_plugin_common.SESSION_TOKEN: token,
vcloud_plugin_common.ORG_URL: org_url
})
version = node_instance['version']
node_instance['version'] = version if version else 0
if ctx.local:
version = node_instance['version']
state = node_instance.get('state')
node_id = instance.id
storage = ctx.internal.handler.storage
storage.update_node_instance(node_id, version, rt_properties, state)
else:
update_node_instance(node_instance)
def _get_all_nodes_instances(ctx, token, org_url):
"""return all instances from context nodes"""
node_instances = set()
for node in ctx.nodes:
if vcloud_plugin_common.VCLOUD_CONFIG in node.properties:
for instance in node.instances:
if token and org_url:
update(ctx, instance, token, org_url)
node_instances.add(instance)
return node_instances
@workflow
def install(ctx, **kwargs):
"""Score install workflow"""
default_workflow._install_node_instances(
ctx,
_get_all_nodes_instances(ctx, kwargs.get('session_token'),
kwargs.get('org_url')),
set(),
default_workflow.NodeInstallationTasksSequenceCreator(),
default_workflow.InstallationTasksGraphFinisher
)
@workflow
def uninstall(ctx, **kwargs):
"""Score uninstall workflow"""
default_workflow._uninstall_node_instances(
ctx,
_get_all_nodes_instances(ctx, kwargs.get('session_token'),
kwargs.get('org_url')),
set(),
default_workflow.NodeUninstallationTasksSequenceCreator(),
default_workflow.UninstallationTasksGraphFinisher
)
|
Python
| 0 |
@@ -1665,24 +1665,68 @@
ctx.nodes:%0A
+ for instance in node.instances:%0A
if v
@@ -1724,16 +1724,17 @@
if
+(
vcloud_p
@@ -1778,17 +1778,16 @@
operties
-:
%0A
@@ -1795,39 +1795,20 @@
-for instance in node.instances:
+ and token
%0A
@@ -1823,18 +1823,8 @@
- if token
and
@@ -1830,22 +1830,19 @@
org_url
+)
:%0A
-
|
f23a0c344a8683e47709d4fe940fe0298202f017
|
remove unused code in has_token decorator
|
api/src/api/views/decorators.py
|
api/src/api/views/decorators.py
|
# Copyright 2017 the Isard-vdi project authors:
# Josep Maria Viñolas Auquer
# Alberto Larraz Dalmases
# License: AGPLv3
import json
import os
from functools import wraps
from flask import request
from rethinkdb import RethinkDB
from api import app
r = RethinkDB()
import logging
import traceback
from flask import Flask, _request_ctx_stack, jsonify, request
from jose import jwt
from rethinkdb.errors import ReqlTimeoutError
from ..libv2.flask_rethink import RDB
db = RDB(app)
db.init_app(app)
from ..auth.tokens import Error, get_auto_register_jwt_payload, get_header_jwt_payload
from ..libv2.apiv2_exc import DesktopNotFound, TemplateNotFound
def has_token(f):
@wraps(f)
def decorated(*args, **kwargs):
payload = get_header_jwt_payload()
kwargs["payload"] = payload
return f(*args, **kwargs)
raise Error(
{"error": "not_allowed", "description": "Not enough rights" " token."}, 401
)
return decorated
def is_register(f):
@wraps(f)
def decorated(*args, **kwargs):
payload = get_header_jwt_payload()
if payload.get("type", "") == "register":
kwargs["payload"] = payload
return f(*args, **kwargs)
raise Error(
{"error": "not_allowed", "description": "Not register" " token."}, 401
)
return decorated
def is_auto_register(f):
@wraps(f)
def decorated(*args, **kwargs):
payload = get_auto_register_jwt_payload()
if payload.get("type", "") == "register":
kwargs["payload"] = payload
return f(*args, **kwargs)
raise Error(
{"error": "not_allowed", "description": "Not register" " token."}, 401
)
return decorated
def is_admin(f):
@wraps(f)
def decorated(*args, **kwargs):
payload = get_header_jwt_payload()
if payload["role_id"] == "admin":
kwargs["payload"] = payload
return f(*args, **kwargs)
raise Error(
{"error": "not_allowed", "description": "Not enough rights" " token."}, 403
)
return decorated
def is_admin_or_manager(f):
@wraps(f)
def decorated(*args, **kwargs):
payload = get_header_jwt_payload()
if payload["role_id"] == "admin" or payload["role_id"] == "manager":
kwargs["payload"] = payload
return f(*args, **kwargs)
raise Error(
{"error": "not_allowed", "description": "Not enough rights" " token."}, 403
)
return decorated
def is_admin_user(f):
@wraps(f)
def decorated(*args, **kwargs):
payload = get_header_jwt_payload()
if payload["role_id"] == "admin":
kwargs["payload"] = payload
return f(*args, **kwargs)
raise Error(
{"error": "not_allowed", "description": "Not enough rights" " token."}, 403
)
return decorated
def is_hyper(f):
@wraps(f)
def decorated(*args, **kwargs):
payload = get_header_jwt_payload()
return f(*args, **kwargs)
return decorated
### Helpers
def ownsUserId(payload, user_id):
if payload["role_id"] == "admin":
return True
if (
payload["role_id"] == "manager"
and user_id.split["-"][1] == payload["category_id"]
):
return True
if payload["user_id"] == user_id:
return True
return False
def ownsCategoryId(payload, category_id):
if payload["role_id"] == "admin":
return True
if payload["role_id"] == "manager" and category_id == payload["category_id"]:
return True
return False
def ownsDomainId(payload, desktop_id):
if payload["role_id"] == "admin":
return True
if (
payload["role_id"] == "manager"
and payload["category_id"] == desktop_id.split("-")[1]
):
return True
if payload["role_id"] == "advanced":
with app.app_context():
if str(
r.table("domains")
.get(desktop_id)
.pluck("tag")
.run(db.conn)
.get("tag", False)
).startswith("_" + payload["user_id"]):
return True
if desktop_id.startswith("_" + payload["user_id"]):
return True
return False
def allowedTemplateId(payload, template_id):
try:
with app.app_context():
template = (
r.table("domains")
.get(template_id)
.pluck("user", "allowed", "category")
.run(db.conn)
)
except:
raise Error(
{
"error": "template_not_found",
"msg": "Not found template " + template_id,
},
404,
)
if payload["user_id"] == template["user"]:
return True
alloweds = template["allowed"]
if payload["role_id"] == "admin":
return True
if (
payload["role_id"] == "manager"
and payload["category_id"] == template["category"]
):
return True
if alloweds["roles"] != False:
if alloweds["roles"] == []:
return True
if payload["role_id"] in alloweds["roles"]:
return True
if alloweds["categories"] != False:
if alloweds["categories"] == []:
return True
if payload["category_id"] in alloweds["categories"]:
return True
if alloweds["groups"] != False:
if alloweds["groups"] == []:
return True
if payload["group_id"] in alloweds["groups"]:
return True
if alloweds["users"] != False:
if alloweds["users"] == []:
return True
if payload["user_id"] in alloweds["users"]:
return True
return False
|
Python
| 0 |
@@ -843,127 +843,8 @@
rgs)
-%0A raise Error(%0A %7B%22error%22: %22not_allowed%22, %22description%22: %22Not enough rights%22 %22 token.%22%7D, 401%0A )
%0A%0A
|
2c71d178ae9d6ead586c7f7f3a2c21a21a998714
|
Fix Bed Bath & Beyond
|
locations/spiders/bed_bath_beyond.py
|
locations/spiders/bed_bath_beyond.py
|
# -*- coding: utf-8 -*-
import json
import scrapy
import re
from scrapy.utils.url import urljoin_rfc
from scrapy.utils.response import get_base_url
from locations.items import GeojsonPointItem
class BedBathBeyondSpider(scrapy.Spider):
name = "bed_bath_beyond"
allowed_domains = ["stores.bedbathandbeyond.com"]
start_urls = (
'https://stores.bedbathandbeyond.com/',
)
def store_hours(self, store_hours):
day_groups = []
this_day_group = None
for day_info in store_hours:
day = day_info['day'][:2].title()
hour_intervals = []
for interval in day_info['intervals']:
f_time = str(interval['start']).zfill(4)
t_time = str(interval['end']).zfill(4)
hour_intervals.append('{}:{}-{}:{}'.format(
f_time[0:2],
f_time[2:4],
t_time[0:2],
t_time[2:4],
))
hours = ','.join(hour_intervals)
if not this_day_group:
this_day_group = {
'from_day': day,
'to_day': day,
'hours': hours
}
elif this_day_group['hours'] != hours:
day_groups.append(this_day_group)
this_day_group = {
'from_day': day,
'to_day': day,
'hours': hours
}
elif this_day_group['hours'] == hours:
this_day_group['to_day'] = day
day_groups.append(this_day_group)
opening_hours = ""
if len(day_groups) == 1 and day_groups[0]['hours'] in ('00:00-23:59', '00:00-00:00'):
opening_hours = '24/7'
else:
for day_group in day_groups:
if day_group['from_day'] == day_group['to_day']:
opening_hours += '{from_day} {hours}; '.format(**day_group)
elif day_group['from_day'] == 'Su' and day_group['to_day'] == 'Sa':
opening_hours += '{hours}; '.format(**day_group)
else:
opening_hours += '{from_day}-{to_day} {hours}; '.format(**day_group)
opening_hours = opening_hours[:-2]
return opening_hours
def parse_store(self, response):
properties = {
'addr:full': response.xpath('//address[@itemprop="address"]/span[@itemprop="streetAddress"]/span/text()')[0].extract(),
'addr:city': response.xpath('//span[@itemprop="addressLocality"]/text()')[0].extract(),
'addr:state': response.xpath('//abbr[@itemprop="addressRegion"]/text()')[0].extract(),
'addr:postcode': response.xpath('//span[@itemprop="postalCode"]/text()')[0].extract(),
'ref': response.url,
'website': response.url,
}
phone = response.xpath('//a[@class="c-phone-number-link c-phone-main-number-link"]/text()')[0].extract()
if phone:
properties['phone'] = phone
hours = json.loads(response.xpath('//div[@class="c-location-hours-today js-location-hours"]/@data-days')[0].extract())
opening_hours = self.store_hours(hours)
if opening_hours:
properties['opening_hours'] = opening_hours
lon_lat = [
float(response.xpath('//span/meta[@itemprop="longitude"]/@content')[0].extract()),
float(response.xpath('//span/meta[@itemprop="latitude"]/@content')[0].extract()),
]
yield GeojsonPointItem(
properties=properties,
lon_lat=lon_lat,
)
def parse(self, response):
base_url = get_base_url(response)
urls = response.xpath('//a[@class="c-directory-list-content-item-link"]/@href').extract()
for path in urls:
if path.rsplit('-', 1)[0].isnumeric():
# If there's only one store, the URL will have a store number at the end
yield scrapy.Request(urljoin_rfc(base_url, path), callback=self.parse_store)
else:
yield scrapy.Request(urljoin_rfc(base_url, path))
urls = response.xpath('//a[@class="c-location-grid-item-link"]/@href').extract()
for path in urls:
yield scrapy.Request(urljoin_rfc(base_url, path), callback=self.parse_store)
|
Python
| 0 |
@@ -46,106 +46,8 @@
rapy
-%0Aimport re%0Afrom scrapy.utils.url import urljoin_rfc%0Afrom scrapy.utils.response import get_base_url
%0A%0Afr
@@ -90,16 +90,17 @@
ntItem%0A%0A
+%0A
class Be
@@ -3580,50 +3580,8 @@
e):%0A
- base_url = get_base_url(response)%0A
@@ -3735,17 +3735,18 @@
'-', 1)%5B
-0
+-1
%5D.isnume
@@ -3878,38 +3878,33 @@
Request(
-urljoin_rfc(base_url,
+response.urljoin(
path), c
@@ -3984,38 +3984,33 @@
Request(
-urljoin_rfc(base_url,
+response.urljoin(
path))%0A%0A
@@ -4161,30 +4161,25 @@
est(
-urljoin_rfc(base_url,
+response.urljoin(
path
|
679755a0bae0ce8a34b9907cbc2ff9bd90144822
|
Remove pwd from binding file
|
src/spatialite/deps/iconv/binding.gyp
|
src/spatialite/deps/iconv/binding.gyp
|
{
'variables': { 'target_arch%': 'ia32' },
'target_defaults': {
'default_configuration': 'Debug',
'configurations': {
'Debug': {
'defines': [ 'DEBUG', '_DEBUG' ],
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeLibrary': 1, # static debug
},
},
},
'Release': {
'defines': [ 'NDEBUG' ],
'msvs_settings': {
'VCCLCompilerTool': {
'RuntimeLibrary': 0, # static release
},
},
}
},
'msvs_settings': {
'VCCLCompilerTool': {
},
'VCLibrarianTool': {
},
'VCLinkerTool': {
'GenerateDebugInformation': 'true',
},
},
'include_dirs': [
'config/<(OS)/<(target_arch)',
'.',
'iconv/include',
'iconv/lib',
'iconv/srclib'
],
'conditions': [
['OS == "win"', {
'defines': [
'WIN32'
],
}]
],
},
'targets': [
{
'target_name': 'iconv',
'type': 'static_library',
'sources': [
# 'iconv/extras/iconv_string.c',
'iconv/lib/iconv.c',
# 'iconv/lib/iconv_relocatable.c',
'iconv/libcharset/lib/localcharset.c',
# 'iconv/libcharset/lib/localcharset_relocatable.c',
# 'iconv/srclib/allocator.c',
# 'iconv/srclib/areadlink.c',
# 'iconv/srclib/c-ctype.c',
# 'iconv/srclib/canonicalize-lgpl.c',
# 'iconv/srclib/careadlinkat.c',
# 'iconv/srclib/error.c',
# 'iconv/srclib/lstat.c',
# 'iconv/srclib/malloca.c',
# 'iconv/srclib/memmove.c',
# 'iconv/srclib/progname.c',
# 'iconv/srclib/progreloc.c',
# 'iconv/srclib/read.c',
# 'iconv/srclib/readlink.c',
# 'iconv/srclib/relocatable.c',
# 'iconv/srclib/safe-read.c',
# 'iconv/srclib/setenv.c',
# 'iconv/srclib/stat.c',
# 'iconv/srclib/stdio-write.c',
# 'iconv/srclib/strerror.c',
# 'iconv/srclib/xmalloc.c',
# 'iconv/srclib/xreadlink.c',
# 'iconv/srclib/xstrdup.c'
],
'defines': [
'BUILDING_LIBCHARSET',
'LIBDIR="."',
'INSTALLDIR="<!(pwd)"',
'NO_XMALLOC',
'HAVE_CONFIG_H',
'EXEEXT=""',
'LIBPATHVAR="."'
],
'direct_dependent_settings': {
'include_dirs': [
'config/<(OS)/<(target_arch)',
'.',
'iconv/include'
],
},
},
]
}
|
Python
| 0 |
@@ -2204,15 +2204,9 @@
IR=%22
-%3C!(pwd)
+.
%22',%0A
|
9d60b14e35917e7227a03d53bf6a6e83c25b7e81
|
Correct publication mapping dependencies
|
luigi/tasks/quickgo/load_mappings.py
|
luigi/tasks/quickgo/load_mappings.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from tasks.config import publications
from tasks.utils.pgloader import PGLoader
from tasks.publications.pubmed import PubmedLoader
from .quickgo_data import QuickGoData
CONTROL_FILE = """
LOAD CSV
FROM '{filename}'
WITH ENCODING ISO-8859-14
HAVING FIELDS ({fields})
INTO {db_url}
TARGET COLUMNS ({columns})
SET
search_path = '{search_path}'
WITH
fields escaped by double-quote,
fields terminated by ','
BEFORE LOAD DO
$$
create table if not exists {load_table} (
rna_id varchar(50),
qualifier varchar(30),
assigned_by varchar(50),
ontology_term_id varchar(15),
evidence_code varchar(15),
pubmed_id int
);
$$,
$$
truncate table {load_table};
$$
AFTER LOAD DO
$$ insert into {final_table} (go_term_annotation_id, ref_pubmed_id)
(
select
annotations.go_term_annotation_id,
{load_table}.pubmed_id
from {load_table}
join go_term_annotations annotations
on
annotations.rna_id = {load_table}.rna_id
AND annotations.qualifier = {load_table}.qualifier
AND annotations.assigned_by = {load_table}.assigned_by
AND annotations.ontology_term_id = {load_table}.ontology_term_id
AND annotations.evidence_code = {load_table}.evidence_code
)
ON CONFLICT (go_term_annotation_id, ref_pubmed_id)
DO NOTHING
;
$$,
$$
drop table {load_table};
$$
;
"""
class QuickGoLoadPublicationMapping(PGLoader):
def requires(self):
return [
QuickGoData(),
PubmedLoader(),
]
def control_file(self):
output = self.requires()[0].output()
table = 'go_term_publication_map'
load_table = 'load_' + table
fields = ', '.join(output.publication_mappings.headers)
return CONTROL_FILE.format(
filename=output.publication_mappings.filename,
directory=publications().to_load(),
final_table=table,
load_table=load_table,
db_url=self.db_url(table=load_table),
columns=fields,
fields=fields,
search_path=self.db_search_path(),
)
|
Python
| 0 |
@@ -777,16 +777,69 @@
ckGoData
+%0Afrom .load_annotations import QuickGoLoadAnnotations
%0A%0ACONTRO
@@ -2102,24 +2102,62 @@
edLoader(),%0A
+ QuickGoLoadAnnotations(),%0A
%5D%0A%0A
|
360345abd7e9751f376a79ed0f3b1f5228253875
|
allow metadata in Kernel-side Comm messages.
|
IPython/kernel/comm/comm.py
|
IPython/kernel/comm/comm.py
|
"""Base class for a Comm"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import uuid
from IPython.config import LoggingConfigurable
from IPython.core.getipython import get_ipython
from IPython.utils.traitlets import Instance, Unicode, Bytes, Bool, Dict, Any
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
class Comm(LoggingConfigurable):
shell = Instance('IPython.core.interactiveshell.InteractiveShellABC')
def _shell_default(self):
return get_ipython()
iopub_socket = Any()
def _iopub_socket_default(self):
return self.shell.kernel.iopub_socket
session = Instance('IPython.kernel.zmq.session.Session')
def _session_default(self):
if self.shell is None:
return
return self.shell.kernel.session
target_name = Unicode('comm')
topic = Bytes()
def _topic_default(self):
return ('comm-%s' % self.comm_id).encode('ascii')
_open_data = Dict(help="data dict, if any, to be included in comm_open")
_close_data = Dict(help="data dict, if any, to be included in comm_close")
_msg_callback = Any()
_close_callback = Any()
_closed = Bool(False)
comm_id = Unicode()
def _comm_id_default(self):
return uuid.uuid4().hex
primary = Bool(True, help="Am I the primary or secondary Comm?")
def __init__(self, data=None, **kwargs):
super(Comm, self).__init__(**kwargs)
get_ipython().comm_manager.register_comm(self)
if self.primary:
# I am primary, open my peer.
self.open(data)
def _publish_msg(self, msg_type, data=None, **keys):
"""Helper for sending a comm message on IOPub"""
data = {} if data is None else data
self.session.send(self.iopub_socket, msg_type,
dict(data=data, comm_id=self.comm_id, **keys),
parent=self.shell.get_parent(),
ident=self.topic,
)
def __del__(self):
"""trigger close on gc"""
self.close()
# publishing messages
def open(self, data=None):
"""Open the frontend-side version of this comm"""
if data is None:
data = self._open_data
self._publish_msg('comm_open', data, target_name=self.target_name)
def close(self, data=None):
"""Close the frontend-side version of this comm"""
if self._closed:
# only close once
return
if data is None:
data = self._close_data
self._publish_msg('comm_close', data)
self._closed = True
def send(self, data=None):
"""Send a message to the frontend-side version of this comm"""
self._publish_msg('comm_msg', data)
# registering callbacks
def on_close(self, callback):
"""Register a callback for comm_close
Will be called with the `data` of the close message.
Call `on_close(None)` to disable an existing callback.
"""
self._close_callback = callback
def on_msg(self, callback):
"""Register a callback for comm_msg
Will be called with the `data` of any comm_msg messages.
Call `on_msg(None)` to disable an existing callback.
"""
self._msg_callback = callback
# handling of incoming messages
def handle_close(self, msg):
"""Handle a comm_close message"""
self.log.debug("handle_close[%s](%s)", self.comm_id, msg)
if self._close_callback:
self._close_callback(msg)
def handle_msg(self, msg):
"""Handle a comm_msg message"""
self.log.debug("handle_msg[%s](%s)", self.comm_id, msg)
if self._msg_callback:
self._msg_callback(msg)
__all__ = ['Comm']
|
Python
| 0 |
@@ -2218,24 +2218,39 @@
, data=None,
+ metadata=None,
**keys):%0A
@@ -2344,24 +2344,80 @@
e else data%0A
+ metadata = %7B%7D if metadata is None else metadata%0A
self
@@ -2518,16 +2518,47 @@
*keys),%0A
+ metadata=metadata,%0A
@@ -2772,32 +2772,47 @@
(self, data=None
+, metadata=None
):%0A %22%22%22Op
@@ -2960,16 +2960,26 @@
', data,
+ metadata,
target_
@@ -3027,32 +3027,47 @@
(self, data=None
+, metadata=None
):%0A %22%22%22Cl
@@ -3287,24 +3287,34 @@
close', data
+, metadata
)%0A se
@@ -3356,32 +3356,47 @@
(self, data=None
+, metadata=None
):%0A %22%22%22Se
@@ -3491,24 +3491,34 @@
m_msg', data
+, metadata
)%0A %0A #
|
1d9a04db9347ff105f79fd0c6ff38a0b47d87ecb
|
Fix compare user.id
|
russianroulette/russianroulette.py
|
russianroulette/russianroulette.py
|
import discord
from discord.ext import commands
import os
from .utils.dataIO import dataIO
import time
import asyncio
client = discord.Client()
class Russianroulette:
"""Russian Roulette"""
def __init__(self, bot):
self.bot = bot
self.file_path = "data/russianroulette/russianroulette.json"
self.json_data = dataIO.load_json(self.file_path)
@commands.command(pass_context=True, aliases=["rr", "russian"])
async def russianroulette(self, ctx, type):
"""Type = start or join"""
#Your code will go here
user = ctx.message.author
bank = self.bot.get_cog("Economy").bank
if type.lower() == "start":
if self.json_data["System"]["Status"] == "Stopped":
await self.betAmount(user, bank)
elif self.json_data["System"]["Status"] == "Waiting":
await self.bot.say("Game has been made, to join it type `*rr join`")
else:
await self.bot.say("Game is in progress, please wait until it's finished")
elif type.lower() == "join":
if self.json_data["System"]["Status"] == "Waiting":
await self.joinGame(user, bank)
elif self.json_data["System"]["Status"] == "Stopped":
await self.bot.say("No game to join, type `*rr start` to create a game")
else:
await self.bot.say("Game is in progress, please wait until it's finished")
elif type.lower() == "stop":
if self.json_data["System"]["Status"] == "Stopped":
await self.bot.say("There is no game running to stop")
elif self.json_data["System"]["Status"] == "Running":
await self.bot.say("The game is running and can only be stopped when in the lobby waiting")
if user.id == self.json_data["Players"][0]:
self.json_data["Players"]["1"] = ""
self.json_data["Players"]["2"] = ""
self.json_data["Players"]["3"] = ""
self.json_data["Players"]["4"] = ""
self.json_data["Players"]["5"] = ""
self.json_data["Players"]["6"] = ""
self.json_data["System"]["Player Count"] = 0
self.json_data["System"]["Status"] = "Stopped"
self.json_data["System"]["Bet"] = 0
f = "data/russianroulette/russianroulette.json"
dataIO.save_json(f, self.json_data)
else:
await self.bot.say("You must be the person who started the roulette and you must currently be waiting for people to join")
else:
await self.bot.say(user.mention + "`This command only accepts `start` `stop` or `join`")
@client.event
async def betAmount(self, user, bank):
await self.bot.say("How much would you like to put on the line: $")
bet = await self.bot.wait_for_message(timeout=30, author=user)
if bet is None:
await self.bot.say("You didn't enter anything")
return
try:
bet = int(str(bet.content))
except ValueError:
pass
if isinstance(bet , int):
if bank.account_exists(user):
if bank.get_balance(user) > bet:
self.json_data["System"]["Bet"] = bet
self.json_data["Players"]["1"] = user.id
self.json_data["System"]["Player Count"] += 1
self.json_data["System"]["Status"] = "Waiting"
f = "data/russianroulette/russianroulette.json"
dataIO.save_json(f, self.json_data)
await self.bot.say("Bet placed at $" + str(bet) + "\nTo start the game you need atleast one another person to join with `*rr join`")
else:
await self.bot.say("You don't have enough to place a bet of $" + str(bet) + " You only have $" + str(bank.get_balance(user)))
else:
await self.bot.say("You don't have a bank account, create one first with *bank register")
return
else:
await self.bot.say("You must enter a number")
await self.betAmount(user, bank)
async def joinGame(self, user, bank):
await self.bot.say("asd")
def check_folders():
if not os.path.exists("data/russianroulette"):
print("Creating data/russianroulette floder...")
os.makedirs("data/russianroulette")
def check_files():
system = {"System": {"Bet": 0,
"Status": "Stopped",
"Player Count": 0},
"Players": {"1": "",
"2": "",
"3": "",
"4": "",
"5": "",
"6": ""}}
f = "data/russianroulette/russianroulette.json"
if not dataIO.is_valid_json(f):
print("Creating defualt russianroulette.json...")
dataIO.save_json(f, system)
def setup(bot):
check_folders()
check_files()
n = Russianroulette(bot)
bot.add_cog(n)
|
Python
| 0.000001 |
@@ -1883,17 +1883,19 @@
ayers%22%5D%5B
-0
+%221%22
%5D:%0A
|
fd8c3bdcd1fe3dea8186c4870285731e3081b396
|
modify parse request
|
src/server.py
|
src/server.py
|
# -*- coding: utf-8 -*-
"""Server module."""
from __future__ import unicode_literals
import socket
import email.utils
def resolve_uri(uri):
def parse_request(request):
"""Parse and validate request to confirm parts are correct."""
lines = request.split('\r\n')
try:
lines[1][:3] == 'GET'
except:
response_error(u'405 Method Not Allowed')
raise RuntimeError('Only accepts GET requests.')
try:
lines[1][-8:] == 'HTTP/1.1'
except:
response_error(u'505 HTTP Version Not Supported')
raise RuntimeError('Only accepts HTTP/1.1 protocol requests.')
try:
len(lines[1].split()) == 3 and lines[1][0] == '/'
uri = lines[1].split()
lines = resolve_uri(uri)
except:
response_error(u'404 Page Not Found')
raise RuntimeError('URI not properly formatted.')
return lines
def response_ok(uri):
"""Return 200 ok."""
first = u'HTTP/1.1 200 OK'
second_line = u'Content-Type: text/plain; charset=utf-8'
date = u'Date: ' + email.utils.formatdate(usegmt=True)
header_break = u''
body = uri
bytes_ = body.encode('utf-8')
fourth_line = u'Content-Length: {}'.format(len(bytes_))
string_list = [first, second_line, date, fourth_line, header_break, body]
string_list = '\r\n'.join(string_list)
return string_list
def response_error(error='500 Internal Server Error'):
"""Return 500 internal server error."""
first = u'HTTP/1.1 {}'.format(error)
second_line = u'Content-Type: text/plain; charset=utf-8'
date = email.utils.formatdate(usegmt=True)
header_break = u''
body = u'The system is down'
bytes_ = body.encode('utf-8')
fourth_line = u'Content-Length: {}'.format(len(bytes_))
string_list = [first, second_line, date, fourth_line, header_break, body]
string_list = '\r\n'.join(string_list)
return string_list
def server():
"""Return message to client."""
server = socket.socket(socket.AF_INET, socket.SOCK_STREAM, socket.IPPROTO_TCP)
server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
address = ('127.0.0.1', 5001)
server.bind(address)
server.listen(1)
conn, addr = server.accept()
try:
while True:
try:
buffer_length = 8
reply_complete = False
full_string = u""
while not reply_complete:
part = conn.recv(buffer_length)
full_string = full_string + part.decode('utf-8')
if len(part) < buffer_length:
reply_complete = True
print(full_string)
try:
conn.sendall(response_ok(parse_request(full_string)).encode('utf-8'))
except:
pass
# server.listen(1)
conn, addr = server.accept()
except:
response_error()
raise
except KeyboardInterrupt:
conn.close()
finally:
server.close()
if __name__ == '__main__':
server()
|
Python
| 0.000001 |
@@ -116,31 +116,465 @@
ls%0A%0A
-%0Adef resolve_uri(uri):%0A
+RESOURCES = %7B%0A 'images':%0A%0A%7D%0A%0A%0A%0Adef resolve_uri(uri):%0A %22%22%22Return request body and file type.%22%22%22%0A file_path = uri.split('/')%0A print(file_path)%0A if file_path%5B0%5D != 'webroot':%0A response_error(u'400 Bad Request')%0A raise LookupError('File path not found.')%0A else:%0A file = file_path%5B-1%5D.split('.')%0A file_type = file%5B1%5D%0A file_name = file%5B0%5D%0A if file_type == 'png' or file_type == 'jpg':%0A return
%0A%0A%0A%0A
@@ -702,27 +702,26 @@
'%5Cr%5Cn')%0A
-try
+if
:%0A li
@@ -724,17 +724,17 @@
lines%5B
-1
+0
%5D%5B:3%5D ==
@@ -749,64 +749,12 @@
e
-xcept:%0A response_error(u'405 Method Not Allowed')
+lse:
%0A
@@ -803,35 +803,34 @@
requests.')%0A
-try
+if
:%0A lines%5B
@@ -860,151 +860,43 @@
e
-xcept:%0A response_error(u'505 HTTP Version Not Supported')%0A raise RuntimeError('Only accepts HTTP/1.1 protocol requests.')
+lse:%0A raise TypeError
%0A
-try
+if
:%0A
@@ -1024,118 +1024,38 @@
e
-xcept:%0A response_error(u'404 Page Not Found')%0A raise RuntimeError('URI not properly formatted.')
+lse:%0A raise SyntaxError
%0A
@@ -3293,11 +3293,98 @@
ver()%0A%0A%0A
+%22%22%22u'405 Method Not Allowed'%0Au'505 HTTP Version Not Supported'%0Au'404 Page Not Found'%22%22%22
%0A%0A%0A
|
c25d97ccc5109064ef80141e62c44134702b8125
|
Add endpoints for the admin page
|
src/server.py
|
src/server.py
|
#!/usr/bin/env python3
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright 2017 - Edoardo Morassutto <[email protected]>
import sys
import traceback
import gevent.wsgi
from gevent import monkey
from werkzeug.exceptions import (HTTPException, InternalServerError, NotFound)
from werkzeug.routing import Map, Rule, RequestRedirect
from werkzeug.wrappers import Request
from werkzeug.wsgi import responder
from .config import Config
from .logger import Logger
from .handlers.contest_handler import ContestHandler
from .handlers.info_handler import InfoHandler
from .handlers.upload_handler import UploadHandler
from .handlers.admin_handler import AdminHandler
monkey.patch_all()
class Server:
""" Main server """
def __init__(self):
self.handlers = {
"contest": ContestHandler(),
"info": InfoHandler(),
"upload": UploadHandler(),
"admin": AdminHandler()
}
# The router tries to match the rules, the endpoint MUST be a string with this format
# CONTROLLER#ACTION
# Where CONTROLLER is an handler registered in self.handlers and ACTION is a valid
# method of that handler
self.router = Map([
Rule("/contest", methods=["GET"], endpoint="info#get_contest"),
Rule("/input/<id>", methods=["GET"], endpoint="info#get_input"),
Rule("/output/<id>", methods=["GET"], endpoint="info#get_output"),
Rule("/source/<id>", methods=["GET"], endpoint="info#get_source"),
Rule("/submission/<id>", methods=["GET"], endpoint="info#get_submission"),
Rule("/user/<token>", methods=["GET"], endpoint="info#get_user"),
Rule("/user/<token>/submissions/<task>", methods=["GET"], endpoint="info#get_submissions"),
Rule("/generate_input", methods=["POST"], endpoint="contest#generate_input"),
Rule("/submit", methods=["POST"], endpoint="contest#submit"),
Rule("/upload_source", methods=["POST"], endpoint="upload#upload_source"),
Rule("/upload_output", methods=["POST"], endpoint="upload#upload_output")
])
@responder
def __call__(self, environ, start_response):
try:
return self.wsgi_app(environ, start_response)
except:
Logger.error("UNCAUGHT_EXCEPTION", traceback.format_exc())
return InternalServerError()
def wsgi_app(self, environ, start_response):
route = self.router.bind_to_environ(environ)
request = Request(environ)
try:
endpoint, args = route.match()
except RequestRedirect as e:
return e
except HTTPException:
# TODO find a way to get the real ip address
Logger.warning("HTTP_ERROR", "%s %s %s 404" % (request.remote_addr, request.method, request.url))
return NotFound()
controller, action = endpoint.split("#")
res = self.handlers[controller].handle(action, args, request)
return res
def run(self):
"""
Start a greenlet with the main HTTP server loop
"""
server = gevent.wsgi.WSGIServer((Config.address, Config.port), self)
try:
server.init_socket()
except OSError:
Logger.error("PORT_ALREADY_IN_USE", "Address: '%s' Port: %d" % (Config.address, Config.port))
sys.exit(1)
greenlet = gevent.spawn(server.serve_forever)
Logger.info("SERVER_STATUS", "Server started")
greenlet.join()
|
Python
| 0.000001 |
@@ -2293,16 +2293,501 @@
output%22)
+,%0A%0A Rule(%22/admin/extract%22, methods=%5B%22POST%22%5D, endpoint=%22admin#extract%22),%0A Rule(%22/admin/log%22, methods=%5B%22POST%22%5D, endpoint=%22admin#log%22),%0A Rule(%22/admin/start%22, methods=%5B%22POST%22%5D, endpoint=%22admin#start%22),%0A Rule(%22/admin/set_extra_time%22, methods=%5B%22POST%22%5D, endpoint=%22admin#set_extra_time%22),%0A Rule(%22/admin/status%22, methods=%5B%22POST%22%5D, endpoint=%22admin#status%22),%0A Rule(%22/admin/user_list%22, methods=%5B%22POST%22%5D, endpoint=%22admin#user_list%22)
%0A
|
bde14d07446b3a0d36031b7e0cb0403233693370
|
Version 3.3.1
|
seleniumbase/__version__.py
|
seleniumbase/__version__.py
|
# seleniumbase package
__version__ = "3.3.0"
|
Python
| 0.000001 |
@@ -39,7 +39,7 @@
3.3.
-0
+1
%22%0A
|
a7acff51e61d3cdef0e61f960cf1f38e10b5a2a1
|
Version 3.1.5
|
seleniumbase/__version__.py
|
seleniumbase/__version__.py
|
# seleniumbase package
__version__ = "3.1.4"
|
Python
| 0.000001 |
@@ -39,7 +39,7 @@
3.1.
-4
+5
%22%0A
|
91f62f7b237a151ca54b20ec1d0f6d2924a47494
|
Version 2.3.9
|
seleniumbase/__version__.py
|
seleniumbase/__version__.py
|
# seleniumbase package
__version__ = "2.3.8"
|
Python
| 0 |
@@ -39,7 +39,7 @@
2.3.
-8
+9
%22%0A
|
7a3158d60315b6be148cefe6f8257c9377a0dcfd
|
Version 2.4.3
|
seleniumbase/__version__.py
|
seleniumbase/__version__.py
|
# seleniumbase package
__version__ = "2.4.2"
|
Python
| 0.000001 |
@@ -35,11 +35,11 @@
= %222.4.
-2
+3
%22%0A
|
b135dc0acb7a7c1bffce8506e97629ed9eedbf7c
|
Reset scaling to match SANDRA calculation - NB: this gives different intensity to SasView
|
sasmodels/models/adsorbed_layer.py
|
sasmodels/models/adsorbed_layer.py
|
#adsorbed_layer model
#conversion of Core2ndMomentModel.py
#converted by Steve King, Mar 2016
r"""
This model describes the scattering from a layer of surfactant or polymer adsorbed on spherical particles under the conditions that (i) the particles (cores) are contrast-matched to the dispersion medium, (ii) *S(Q)* ~ 1 (ie, the particle volume fraction is dilute), (iii) the particle radius is >> layer thickness (ie, the interface is locally flat), and (iv) scattering from excess unadsorbed adsorbate in the bulk medium is absent or has been corrected for.
Unlike many other core-shell models, this model does not assume any form for the density distribution of the adsorbed species normal to the interface (cf, a core-shell model normally assumes the density distribution to be a homogeneous step-function). For comparison, if the thickness of a (traditional core-shell like) step function distribution is *t*, the second moment about the mean of the density distribution (ie, the distance of the centre-of-mass of the distribution from the interface), |sigma| = sqrt((*t* :sup:`2` )/12).
Definition
----------
.. math::
I(q) = \text{scale} \cdot(\rho_\text{poly}-\rho_\text{solvent})^2 \left[\frac{6\pi\phi_\text{core}}{Q^2}\frac{\Gamma^2}{\delta_\text{poly}^2R_\text{core}} \exp(-Q^2\sigma^2)\right] + \text{background}
where *scale* is a scale factor, |rho|\ :sub:`poly` is the sld of the polymer (or surfactant) layer, |rho|\ :sub:`solv` is the sld of the solvent/medium and cores, |phi|\ :sub:`core` is the volume fraction of the core paraticles, |delta|\ :sub:`poly` is the bulk density of the polymer, |biggamma| is the adsorbed amount, and |sigma| is the second moment of the thickness distribution.
Note that all parameters except the |sigma| are correlated so fitting more than one of these parameters will generally fail. Also note that unlike other shape models, no volume normalization is applied to this model (the calculation is exact).
.. figure:: img/adsorbed_layer_1d.jpg
1D plot using the default values.
References
----------
S King, P Griffiths, J. Hone, and T Cosgrove, *SANS from Adsorbed Polymer Layers*,
*Macromol. Symp.*, 190 (2002) 33-42.
"""
from numpy import inf, sqrt, pi, exp
name = "adsorbed_layer"
title = "Scattering from an adsorbed layer on particles"
description = """
Evaluates the scattering from particles
with an adsorbed layer of surfactant or
polymer, independent of the form of the
density distribution.
"""
category = "shape:sphere"
# ["name", "units", default, [lower, upper], "type", "description"],
parameters = [["second_moment", "Ang", 23.0, [0.0, inf], "", "Second moment"],
["adsorbed_amount", "mg/m2", 1.9, [0.0, inf], "", "Adsorbed amount"],
["density_poly", "g/cm3", 0.7, [0.0, inf], "", "Polymer density"],
["radius", "Ang", 500.0, [0.0, inf], "", "Particle radius"],
["vol_frac", "none", 0.14, [0.0, inf], "", "Particle vol fraction"],
["polymer_sld", "1/Ang^2", 1.5e-06, [-inf, inf], "", "Polymer SLD"],
["solvent_sld", "1/Ang^2", 6.3e-06, [-inf, inf], "", "Solvent SLD"]]
# NB: Scale and Background are implicit parameters on every model
def Iq(q, second_moment, adsorbed_amount, density_poly, radius,
vol_frac, polymer_sld, solvent_sld):
# pylint: disable = missing-docstring
deltarhosqrd = (polymer_sld - solvent_sld) * (polymer_sld - solvent_sld)
numerator = 6.0 * pi * vol_frac * (adsorbed_amount * adsorbed_amount)
denominator = (q * q) * (density_poly * density_poly) * radius
eterm = exp(-1.0 * (q * q) * (second_moment * second_moment))
#scale by 10^10 for units conversion to cm^-1
inten = 1.0e+10 * deltarhosqrd * ((numerator / denominator) * eterm)
return inten * 9.4e-13
Iq.vectorized = True # Iq accepts an array of q values
def Iqxy(qx, qy, *args):
# pylint: disable = missing-docstring
return Iq(sqrt(qx ** 2 + qy ** 2), *args)
Iqxy.vectorized = True # Iqxy accepts an array of qx, qy values
demo = dict(scale = 1.0,
second_moment = 23.0,
adsorbed_amount = 1.9,
density_poly = 0.7,
radius = 500.0,
vol_frac = 0.14,
polymer_sld = 1.5e-06,
solvent_sld = 6.3e-06,
background = 0.0)
oldname = "Core2ndMomentModel"
oldpars = dict(scale = 'scale',
second_moment = 'second_moment',
adsorbed_amount = 'ads_amount',
density_poly = 'density_poly',
radius = 'radius_core',
vol_frac = 'volf_cores',
polymer_sld = 'sld_poly',
solvent_sld = 'sld_solv',
background = 'background')
tests = [
[{'scale': 1.0, 'second_moment': 23.0, 'adsorbed_amount': 1.9,
'density_poly': 0.7, 'radius': 500.0, 'vol_frac': 0.14,
'polymer_sld': 1.5e-06, 'solvent_sld': 6.3e-06, 'background': 0.0},
[0.0106939, 0.469418], [73.741, 9.65391e-53]],
]
|
Python
| 0 |
@@ -2958,17 +2958,17 @@
frac%22, %22
-n
+N
one%22, 0.
@@ -3830,18 +3830,8 @@
nten
- * 9.4e-13
%0AIq.
|
4f2fa4e43b314c9d05e0b9b9e73641463c16a9cb
|
Set up the proposal tasks on app startup
|
server/proposal/__init__.py
|
server/proposal/__init__.py
|
from django.apps import AppConfig
class ProposalConfig(AppConfig):
name = "proposal"
def ready(self):
# Register tasks with Celery:
from . import tasks
|
Python
| 0.000064 |
@@ -28,16 +28,17 @@
Config%0A%0A
+%0A
class Pr
@@ -172,8 +172,37 @@
t tasks%0A
+ tasks.set_up_hooks()%0A
|
c8db58d0e8d05a78659205053c4a722734ee39ce
|
Reorder loops for creation status agg sub tasks
|
app/celery/reporting_tasks.py
|
app/celery/reporting_tasks.py
|
from datetime import datetime, timedelta
from flask import current_app
from notifications_utils.timezones import convert_utc_to_bst
from app import db, notify_celery
from app.config import QueueNames
from app.cronitor import cronitor
from app.dao.fact_billing_dao import (
fetch_billing_data_for_day,
update_fact_billing,
)
from app.dao.fact_notification_status_dao import (
fetch_status_data_for_service_and_day,
update_fact_notification_status,
)
from app.models import EMAIL_TYPE, LETTER_TYPE, SMS_TYPE, Service
@notify_celery.task(name="create-nightly-billing")
@cronitor("create-nightly-billing")
def create_nightly_billing(day_start=None):
# day_start is a datetime.date() object. e.g.
# up to 4 days of data counting back from day_start is consolidated
if day_start is None:
day_start = convert_utc_to_bst(datetime.utcnow()).date() - timedelta(days=1)
else:
# When calling the task its a string in the format of "YYYY-MM-DD"
day_start = datetime.strptime(day_start, "%Y-%m-%d").date()
for i in range(0, 4):
process_day = (day_start - timedelta(days=i)).isoformat()
create_nightly_billing_for_day.apply_async(
kwargs={'process_day': process_day},
queue=QueueNames.REPORTING
)
current_app.logger.info(
f"create-nightly-billing task: create-nightly-billing-for-day task created for {process_day}"
)
@notify_celery.task(name="create-nightly-billing-for-day")
def create_nightly_billing_for_day(process_day):
process_day = datetime.strptime(process_day, "%Y-%m-%d").date()
current_app.logger.info(
f'create-nightly-billing-for-day task for {process_day}: started'
)
start = datetime.utcnow()
transit_data = fetch_billing_data_for_day(process_day=process_day)
end = datetime.utcnow()
current_app.logger.info(
f'create-nightly-billing-for-day task for {process_day}: data fetched in {(end - start).seconds} seconds'
)
for data in transit_data:
update_fact_billing(data, process_day)
current_app.logger.info(
f"create-nightly-billing-for-day task for {process_day}: "
f"task complete. {len(transit_data)} rows updated"
)
@notify_celery.task(name="create-nightly-notification-status")
@cronitor("create-nightly-notification-status")
def create_nightly_notification_status():
"""
Aggregate notification statuses into rows in ft_notification_status.
In order to minimise effort, this task assumes that:
- Email + SMS statuses don't change after 3 days. This is currently true
because all outstanding email / SMS are "timed out" after 3 days, and
we reject delivery receipts after this point.
- Letter statuses don't change after 9 days. There's no "timeout" for
letters but this is the longest we've had to cope with in the past - due
to major issues with our print provider.
Because the time range of the task exceeds the minimum possible retention
period (3 days), we need to choose which table to query for each service.
The aggregation happens for 1 extra day in case:
- This task or the "timeout" task fails to run.
- Data is (somehow) still in transit to the history table, which would
mean the aggregated results are temporarily incorrect.
"""
yesterday = convert_utc_to_bst(datetime.utcnow()).date() - timedelta(days=1)
for (service_id,) in db.session.query(Service.id):
for notification_type in [SMS_TYPE, EMAIL_TYPE, LETTER_TYPE]:
days = 10 if notification_type == LETTER_TYPE else 4
for i in range(days):
process_day = yesterday - timedelta(days=i)
create_nightly_notification_status_for_service_and_day.apply_async(
kwargs={
'process_day': process_day.isoformat(),
'notification_type': notification_type,
'service_id': service_id,
},
queue=QueueNames.REPORTING
)
@notify_celery.task(name="create-nightly-notification-status-for-service-and-day")
def create_nightly_notification_status_for_service_and_day(process_day, service_id, notification_type):
process_day = datetime.strptime(process_day, "%Y-%m-%d").date()
start = datetime.utcnow()
new_status_rows = fetch_status_data_for_service_and_day(
process_day=process_day,
notification_type=notification_type,
service_id=service_id,
)
end = datetime.utcnow()
current_app.logger.info(
f'create-nightly-notification-status-for-day task fetch '
f'for {service_id}, {notification_type} for {process_day}: '
f'data fetched in {(end - start).seconds} seconds'
)
start = datetime.utcnow()
update_fact_notification_status(
new_status_rows=new_status_rows,
process_day=process_day,
notification_type=notification_type,
service_id=service_id
)
end = datetime.utcnow()
current_app.logger.info(
f'create-nightly-notification-status-for-day task update '
f'for {service_id}, {notification_type} for {process_day}: '
f'data fetched in {(end - start).seconds} seconds'
)
|
Python
| 0 |
@@ -3458,67 +3458,8 @@
1)%0A%0A
- for (service_id,) in db.session.query(Service.id):%0A
@@ -3528,20 +3528,16 @@
-
-
days = 1
@@ -3590,20 +3590,16 @@
-
for i in
@@ -3620,28 +3620,24 @@
-
-
process_day
@@ -3669,16 +3669,80 @@
ays=i)%0A%0A
+ for (service_id,) in db.session.query(Service.id):%0A%0A
|
69a16e61f0b0d5eb6d1f0819ff379c0d86b67dc3
|
fix in lcb
|
robo/acquisition/lcb.py
|
robo/acquisition/lcb.py
|
import logging
import numpy as np
from robo.acquisition.base_acquisition import BaseAcquisitionFunction
logger = logging.getLogger(__name__)
class LCB(BaseAcquisitionFunction):
def __init__(self, model, X_lower, X_upper, par=0.0, **kwargs):
r"""
The lower confidence bound acquisition functions that computes for a
test point the acquisition value by:
.. math::
LCB(X) := \mu(X) - \kappa\sigma(X)
Parameters
----------
model: Model object
A model that implements at least
- predict(X)
- getCurrentBestX().
If you want to calculate derivatives than it should also support
- predictive_gradients(X)
X_lower: np.ndarray (D)
Lower bounds of the input space
X_upper: np.ndarray (D)
Upper bounds of the input space
par: float
Controls the balance between exploration
and exploitation of the acquisition function. Default is 0.01
"""
self.par = par
super(LCB, self).__init__(model, X_lower, X_upper)
def compute(self, X, derivative=False, **kwargs):
"""
Computes the LCB acquisition value and its derivatives.
Parameters
----------
X: np.ndarray(1, D), The input point where the acquisition function
should be evaluate. The dimensionality of X is (N, D), with N as
the number of points to evaluate at and D is the number of
dimensions of one X.
derivative: Boolean
If is set to true also the derivative of the acquisition
function at X is returned.
Returns
-------
np.ndarray(1,1)
LCB value of X
np.ndarray(1,D)
Derivative of LCB at X (only if derivative=True)
"""
mean, var = self.model.predict(X)
# Minimize in f so we maximize the negative lower bound
acq = - mean + self.par * np.sqrt(var)
if derivative:
dm, dv = self.model.predictive_gradients(X)
grad = -dm + self.par * dv / (2 * np.sqrt(var))
return acq, grad
else:
return acq
def update(self, model):
self.model = model
|
Python
| 0.000004 |
@@ -2007,14 +2007,15 @@
= -
+(
mean
-+
+-
sel
@@ -2034,16 +2034,17 @@
qrt(var)
+)
%0A
@@ -2139,12 +2139,13 @@
= -
+(
dm
-+
+-
sel
@@ -2167,32 +2167,33 @@
* np.sqrt(var))
+)
%0A ret
|
7dab7ffa870186cab8dd83ed497ec4e40fc9a7ba
|
Fix minor bug with the licencer script.
|
SCons/licencer.py
|
SCons/licencer.py
|
# -*- coding: UTF8 -*-
# ***** BEGIN LICENSE BLOCK *****
# Sconspiracy - Copyright (C) IRCAD, 2004-2009.
# Distributed under the terms of the BSD Licence as
# published by the Open Source Initiative.
# ****** END LICENSE BLOCK ******
import os
import textwrap
licence = {}
comments = {}
comments['.cpp'] = {
'beginblock' : '/* ' ,
'endblock' : ' */' ,
'indent' : ' * ' ,
}
comments['.py'] = {
'beginblock' : '# ' ,
'endblock' : '' ,
'indent' : '# ' ,
}
comments['.hpp'] = comments['.cpp']
comments['.c'] = comments['.cpp']
comments['.h'] = comments['.cpp']
comments['.cxx'] = comments['.cpp']
comments['.hxx'] = comments['.cpp']
comments['.py'] = comments['.py']
ignore_lines = ['vim:', 'utf8', 'UTF8']
licence['newbegin'] = '***** BEGIN LICENSE BLOCK *****'
licence['newend'] = '****** END LICENSE BLOCK ******'
licence['LGPL'] = """FW4SPL - Copyright (C) IRCAD, 2009.
Distributed under the terms of the GNU Lesser General Public License (LGPL) as
published by the Free Software Foundation. """
licence['BSD'] = """Sconspiracy - Copyright (C) IRCAD, 2004-2009.
Distributed under the terms of the BSD Licence as
published by the Open Source Initiative. """
sep = '\n'
def get_licence_block(filetype, licence_content):
if filetype not in comments:
raise KeyError, \
"Don't know how to write in '{0}' files".format(filetype)
com = comments[filetype]
#content = textwrap.wrap(
#licence_content,
#initial_indent = com['indent'],
#subsequent_indent = com['indent'],
#width = 80,
#)
content = [com['indent'] + el for el in licence_content.split(sep)]
header = ''.join([ com['beginblock'], licence['newbegin'] ])
content = sep.join(content)
footer = ''.join([ com['indent'], licence['newend'], com['endblock'] ])
licenceblock = [ header, sep, content, sep, footer, sep]
return licenceblock
def find_first_matching( needles, haystack, offset = 0):
for i, v in enumerate(haystack[offset:]):
if any(needle in v for needle in needles):
return i + offset
def find_first_not_matching( needles, haystack, offset = 0):
for i, v in enumerate(haystack[offset:]):
if not any(needle in v for needle in needles):
return i + offset
def licencify_list(lines, block):
fcontent = ''.join(lines)
replace_old_licence = licence['begin'] in fcontent
extra = []
if replace_old_licence:
id_begin = find_first_matching( [licence['begin']], lines )
id_end = find_first_matching( [licence['end'] ], lines , id_begin )
id_end += 1
print "updating block"
else:
id_begin = find_first_not_matching ( ignore_lines, lines )
id_end = id_begin
extra = [sep]
print "adding block"
lines[id_begin:id_end] = block + extra
return lines
def licencify_file(file, content):
basename, ext = os.path.splitext(file)
print 'reading', file, '...'
f = open(file)
lines = f.readlines()
f.close()
block = get_licence_block(ext, content)
licencify_list(lines, block)
print 'writing', file, '...'
f = open(file, "w")
f.writelines(lines)
f.close()
def licencify_dirs(dirs, content):
for dir in dirs:
if not os.path.isdir(dir):
print "warning : ", dir, "is not a dir"
continue
for root, dirs, files in os.walk(dir):
for file in files:
basename, ext = os.path.splitext(file)
if ext in comments:
licencify_file(os.path.join(root,file), content)
def main():
import sys
licencify_dirs(sys.argv[1:], licence['BSD'])
if __name__ == "__main__":
main()
|
Python
| 0 |
@@ -781,16 +781,121 @@
'UTF8'%5D%0A
+licence%5B'begin'%5D = '***** BEGIN LICENSE BLOCK *****'%0Alicence%5B'end'%5D = '****** END LICENSE BLOCK ******'
%0Alicence
@@ -1135,17 +1135,16 @@
LGPL) as
-
%0Apublish
@@ -3890,19 +3890,20 @@
icence%5B'
-BSD
+LGPL
'%5D)%0A%0Aif
|
3468b32964560f4092593e03ba552d7e6b56943d
|
Support renaming of _ to % routines
|
Scripts/PackRO.py
|
Scripts/PackRO.py
|
#!/usr/bin/env python
# Pack .m files into M[UMPS] routine transfer format (^%RO)
#
# python PackRO.py *.m > routines.ro
#
# or
#
# ls *.m | python PackRO.py > routines.ro
#
#---------------------------------------------------------------------------
# Copyright 2011 The Open Source Electronic Health Record Agent
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
import sys
import os
def pack(files, output):
output.write('Routines\n\n')
for f in files:
if not f.endswith('.m'):
sys.stderr.write('Skipping non-.m file: %s\n' % f)
continue
n = os.path.basename(f)[:-2]
m = open(f,"r")
output.write('%s\n'%n)
for line in m:
output.write(line)
output.write('\n')
output.write('\n')
output.write('\n')
def main():
files = sys.argv[1:]
if not files:
files = [a.rstrip() for a in sys.stdin]
pack(files, sys.stdout)
if __name__ == '__main__':
main()
|
Python
| 0.000003 |
@@ -1189,16 +1189,47 @@
f)%5B:-2%5D%0A
+ n = n.replace(%22_%22,%22%25%22)%0A
|
1b398b07b1ba1fa8a304f24d9e75d6f76fbc56f7
|
remove caps and spaces walmart
|
Server/walmart.py
|
Server/walmart.py
|
from crawl_lib import *
from pymongo import MongoClient
import unicodedata
import json
# Parses starting from the base_url and sends the data to the db
def parse():
deps_exclusions = {'91083', '5426', '4096', '4104'}
full_json = json.loads(urlopen('http://api.walmartlabs.com/v1/taxonomy?format=json&apiKey=' + api_key).read().decode('utf8'))
departments = full_json['categories']
for department in departments:
if department['id'] in deps_exclusions:
continue
print(department['id'])
categories = department['children']
for category in categories:
if 'name' in category:
cat_name = category['name']
else:
print('there is no name for this category! skipping it for now!')
print(category)
continue
if 'children' in category:
subcats = category['children']
else:
subcats = [category]
for subcat in subcats:
cat_id = subcat['id']
cat_json = json.loads(urlopen('http://api.walmartlabs.com/v1/paginated/items?format=json&category=' + cat_id + '&apiKey=' + api_key).read().decode('utf8'))
items = cat_json['items']
for item in items:
data = {}
name = item['name']
name.encode('ascii', 'ignore')
data['name'] = name.replace('.', '-').lower()
if 'salePrice' in item:
data['price'] = item['salePrice']
else:
continue # no price for this item
data['url'] = item['productUrl']
if 'thumbnailImage' in item:
data['image'] = item['thumbnailImage']
else:
data['image'] = ''
data['store'] = 'Walmart'
send_to_db(cat_name, data, cat_db, item_db, None, None)
return
if __name__ == '__main__':
api_key = 'dw25ngn8v6wa97qt757m2a97'
client = MongoClient()
cat_db = client.cat_db
item_db = client.items_db
parse()
# todo:
# add category exclusion list
# send timestamp along with json doc to server
|
Python
| 0.000002 |
@@ -676,32 +676,76 @@
category%5B'name'%5D
+.replace('.', '-').replace(' ', '-').lower()
%0A els
|
a1d02e3a88e5c46a700385e9cef50c9ebcad68b1
|
Simplify test_roi_pooling_2d with no_grads option in check_backward
|
tests/chainer_tests/functions_tests/pooling_tests/test_roi_pooling_2d.py
|
tests/chainer_tests/functions_tests/pooling_tests/test_roi_pooling_2d.py
|
import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
class TestROIPooling2D(unittest.TestCase):
def setUp(self):
N = 4
n_channels = 3
self.x = numpy.arange(
N * n_channels * 12 * 8,
dtype=numpy.float32).reshape((N, n_channels, 12, 8))
numpy.random.shuffle(self.x)
self.x = 2 * self.x / self.x.size - 1
self.rois = numpy.array([
[0, 1, 1, 6, 6],
[2, 6, 2, 7, 11],
[1, 3, 1, 5, 10],
[0, 3, 3, 3, 3]
], dtype=numpy.float32)
self.outh, self.outw = 5, 7
self.spatial_scale = 0.6
self.gy = numpy.random.uniform(
-1, 1, (N, n_channels, self.outh, self.outw)).astype(numpy.float32)
def check_forward(self, x_data, roi_data):
x = chainer.Variable(x_data)
rois = chainer.Variable(roi_data)
y = functions.roi_pooling_2d(
x, rois, outh=self.outh, outw=self.outw,
spatial_scale=self.spatial_scale)
self.assertEqual(y.data.dtype, numpy.float32)
y_data = cuda.to_cpu(y.data)
self.assertEqual(self.gy.shape, y_data.shape)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x, self.rois)
@attr.gpu
@condition.retry(3)
def test_forward_gpu(self):
self.check_forward(cuda.to_gpu(self.x), cuda.to_gpu(self.rois))
@attr.gpu
@condition.retry(3)
def test_forward_cpu_gpu_equal(self):
# cpu
x_cpu = chainer.Variable(self.x)
rois_cpu = chainer.Variable(self.rois)
y_cpu = functions.roi_pooling_2d(
x_cpu, rois_cpu, outh=self.outh, outw=self.outw,
spatial_scale=self.spatial_scale)
# gpu
x_gpu = chainer.Variable(cuda.to_gpu(self.x))
rois_gpu = chainer.Variable(cuda.to_gpu(self.rois))
y_gpu = functions.roi_pooling_2d(
x_gpu, rois_gpu, outh=self.outh, outw=self.outw,
spatial_scale=self.spatial_scale)
gradient_check.assert_allclose(y_cpu.data, cuda.to_cpu(y_gpu.data))
def check_backward(self, x_data, roi_data, y_grad):
x = chainer.Variable(x_data)
rois = chainer.Variable(roi_data)
y = functions.roi_pooling_2d(x, rois, outh=self.outh, outw=self.outw,
spatial_scale=self.spatial_scale)
y.grad = y_grad
y.backward()
xs = (x.data, rois.data)
def f():
func = y.creator
return func.forward(xs)
gx, _ = gradient_check.numerical_grad(f, xs, (y.grad,))
gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x.grad))
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.rois, self.gy)
@attr.gpu
@condition.retry(3)
def test_backward_gpu(self):
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.rois),
cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
|
Python
| 0.000003 |
@@ -2331,78 +2331,38 @@
-x = chainer.Variable(x_data)%0A rois = chainer.Variable(roi_data)
+gradient_check.check_backward(
%0A
@@ -2358,35 +2358,35 @@
ckward(%0A
-y =
+
functions.roi_p
@@ -2384,32 +2384,21 @@
ons.
-roi_p
+ROIP
ooling
-_2d(x, rois,
+2D(
outh
@@ -2400,32 +2400,67 @@
(outh=self.outh,
+%0A
outw=self.outw,
@@ -2452,34 +2452,32 @@
outw=self.outw,%0A
-
@@ -2520,32 +2520,33 @@
f.spatial_scale)
+,
%0A y.grad
@@ -2542,62 +2542,15 @@
-y.grad = y_grad%0A y.backward()%0A%0A xs = (x.
+ (x_
data
@@ -2558,239 +2558,46 @@
roi
-s.
+_
data)
-%0A%0A def f():%0A func = y.creator%0A return func.forward(xs)%0A%0A gx, _ = gradient_check.numerical_grad(f, xs, (y.grad,))%0A gradient_check.assert_allclose(cuda.to_cpu(gx), cuda.to_cpu(x.grad)
+, y_grad, no_grads=%5BFalse, True%5D
)%0A%0A
|
c34244ae414533491290d2e3502119c4d11cafe9
|
Fix in 27
|
scripts/27-pubyear-stats.py
|
scripts/27-pubyear-stats.py
|
#!/usr/bin/env python3
import json
import re
from collections import defaultdict, Counter
from xdfile.utils import error, debug, info
from xdfile import utils, metasql, metadatabase as metadb
from xdfile import year_from_date, dow_from_date
import xdfile
def diff_authors(a1, a2):
if not a1 or not a2:
return False # inconclusive
a1 = a1.lower()
a2 = a2.lower()
if a1 in a2 or a2 in a1:
return False
def main():
args = utils.get_args('generate pub-years data')
outf = utils.open_output()
weekdays = [ 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun' ]
pubyears = {} # set()
for xd in xdfile.corpus():
puby = (xd.publication_id(), xd.year())
if puby not in pubyears:
pubyears[puby] = []
pubyears[puby].append(xd)
if pubyears:
metasql.execute("DELETE FROM stats;")
for puby, xdlist in sorted(pubyears.items()):
pubid, year = puby
npublic = 0
# TODO: SELECT FROM publications
nexisting = 0
# organize by day-of-week
byweekday = {}
byweekday_similar = {}
for w in weekdays:
byweekday[w] = []
byweekday_similar[w] = []
for xd in xdlist:
dow = dow_from_date(xd.get_header('Date'))
if dow: # Might be empty date or only a year
byweekday[dow].append(xd)
for r in metasql.select("SELECT * FROM similar_grids WHERE xdid LIKE '{}%' AND GridMatchPct >= 25".format(pubid + str(year))):
xd = xdfile.get_xd(r['xdid'])
if xd:
dt = xd.get_header('Date')
if dt:
assert dt
dow = dow_from_date(dt)
if dow: # Might be empty date or only a year
byweekday_similar[dow].append(r)
else:
debug("Date not set for: %s" % xd)
# tally stats
for weekday in weekdays:
copyrights = Counter() # [copyright_text] -> number of xd
editors = Counter() # [editor_name] -> number of xd
formats = Counter() # ["15x15 RS"] -> number of xd
# todo
nexisting = 0
nxd = len(byweekday[weekday])
public_xdids = [] # Empty for now
for xd in byweekday[weekday]:
xdid = xd.xdid()
if (year.isdigit() and int(year) <= 1965) or xdid in public_xdids:
npublic += 1
editor = xd.get_header('Editor').strip()
if editor:
editors[editor] += 1
sizestr = xd.sizestr()
if sizestr:
formats[sizestr] += 1
copyright = xd.get_header('Copyright').strip()
if copyright:
copyrights[copyright] += 1
# debug("ME: %s MCPR: %s MF: %s" % (list(editors), list(copyrights), list(formats)))
def process_counter(count, comp_value):
# Process counter comparing with comp_value
if count:
item, num = count.most_common(1)[0]
if num != comp_value:
item += " (%s)" % num
else:
item = ''
return item
#
maineditor = process_counter(editors, nxd)
maincopyright = process_counter(copyrights, nxd)
mainformat = process_counter(formats, nxd)
reprints = 0
touchups = 0
redones = 0
copies = 0
themecopies = 0
for r in byweekday_similar[weekday]:
# debug("Xdid %s Xdidmatch %s" % (r['xdid'], r['xdidMatch']))
xd1 = xdfile.get_xd(r['xdid'])
xd2 = xdfile.get_xd(r['xdidMatch'])
if xd1 is None or xd2 is None:
continue
# debug("XD1: %s XD2: %s" % (xd1, xd2))
dt1 = xd1.get_header('Date')
dt2 = xd2.get_header('Date')
aut1 = xd1.get_header('Author')
aut2 = xd2.get_header('Author')
pct = int(r['GridMatchPct'])
if dt2 < dt1: # only capture the later one
##deduce_similarity_type
if diff_authors(aut1, aut2): # suspicious
if aut
if pct >= 50:
copies += 1
elif pct >= 30:
themecopies += 1
else:
if pct == 100:
reprints += 1
elif pct >= 50:
touchups += 1
elif pct >= 30:
themecopies += 1
metasql.execute("INSERT INTO stats VALUES (?,?,?, ?,?,?, ?, ?,?,?, ?,?, ?,?)",
(pubid, year, weekday,
mainformat, maineditor, maincopyright,
nexisting, nxd, npublic,
reprints, touchups, redones,
copies, themecopies))
if __name__ == "__main__":
main()
|
Python
| 0.999934 |
@@ -3933,16 +3933,72 @@
s None:%0A
+ info(%22skipping %25s %25s%22 %25 (xd1, xd2))%0A
@@ -4476,39 +4476,8 @@
ous%0A
- if aut%0A
|
d6c1774b75839192b0235e5737cdba0d17759fde
|
Update mqtt_easydriver_stepper.py
|
linkit/easydriver/mqtt_easydriver_stepper.py
|
linkit/easydriver/mqtt_easydriver_stepper.py
|
import paho.mqtt.client as mqtt
import json, time
import mraa
pin19 = mraa.Pwm(19)
pin0 = mraa.Gpio(0)
pin0.dir(mraa.DIR_OUT)
# ----- CHANGE THESE FOR YOUR SETUP -----
MQTT_HOST = "190.97.168.236"
MQTT_PORT = 1883
def on_connect(client, userdata, rc):
print("\nConnected with result code " + str(rc) + "\n")
#Subscribing in on_connect() means that if we lose the connection and
# reconnect then subscriptions will be renewed.
#client.subscribe("/pyxo/xyusers/{USERNAME}/{APIKEY}/iot/control/".format(**vars()), 2) # Connect to everything in /mcu topic
client.subscribe("/pryxo/yxusers/motor/control/")
print("Subscribed to homecontrol")
def on_message_iotrl(client, userdata, msg):
print("\n\t* Linkit UPDATED ("+msg.topic+"): " + str(msg.payload))
if msg.payload == "m1":
pin0.write(0)
pin1 = mraa.Gpio(1)
pin1.dir(mraa.DIR_OUT)
pin1.write(0)
pin19.period_us(300)
pin19.enable(True)
pin19.write(0.1)
time.sleep(2)
client.publish("/pryxo/yxusers/iot/status/", "derecha", 2)
if msg.payload == "m0":
pin1 = mraa.Gpio(1)
pin1.dir(mraa.DIR_OUT)
pin1.write(1)
pin19.period_us(300)
pin19.enable(True)
pin19.write(0.1)
time.sleep(2)
client.publish("/pryxo/yxusers/iot/status/", "izquierda", 2)
if msg.payload == "m2":
pin0.write(1)
client.publish("/pryxo/yxusers/iot/status/", "STOP", 2)
def command_error():
print("Error: Unknown command")
client = mqtt.Client(client_id="linkit7688-stepper-motor")
# Callback declarations (functions run based on certain messages)
client.on_connect = on_connect
client.message_callback_add("/pryxo/yxusers/motor/control/", on_message_iotrl)
# This is where the MQTT service connects and starts listening for messages
client.connect(MQTT_HOST, MQTT_PORT, 60)
client.loop_start() # Background thread to call loop() automatically
# Main program loop
while True:
time.sleep(10)
|
Python
| 0 |
@@ -211,16 +211,57 @@
T = 1883
+%0A#---------------------------------------
%0A%0Adef on
@@ -355,266 +355,8 @@
%5Cn%22)
-%0A%0A #Subscribing in on_connect() means that if we lose the connection and%0A # reconnect then subscriptions will be renewed.%0A #client.subscribe(%22/pyxo/xyusers/%7BUSERNAME%7D/%7BAPIKEY%7D/iot/control/%22.format(**vars()), 2) # Connect to everything in /mcu topic
%0A
@@ -448,17 +448,16 @@
trol%22)%0A%0A
-%0A
def on_m
@@ -846,16 +846,35 @@
= %22m0%22:%0A
+ %09pin0.write(0)%0A
%09pin1 =
@@ -1403,74 +1403,8 @@
%22)%0A%0A
-# Callback declarations (functions run based on certain messages)%0A
clie
@@ -1514,84 +1514,8 @@
l)%0A%0A
-# This is where the MQTT service connects and starts listening for messages%0A
clie
@@ -1575,57 +1575,8 @@
t()
- # Background thread to call loop() automatically
%0A%0A#
|
2577c9de30eef0b872bb5a8d0d34b498589c95b7
|
fix property called [skip ci]
|
custom/enikshay/management/commands/data_dumps_person_case.py
|
custom/enikshay/management/commands/data_dumps_person_case.py
|
from __future__ import (
absolute_import,
print_function,
)
from corehq.apps.users.models import CommCareUser
from corehq.form_processor.interfaces.dbaccessors import CaseAccessors
from corehq.apps.es.case_search import CaseSearchES
from corehq.apps.es import queries
from custom.enikshay.case_utils import (
CASE_TYPE_EPISODE,
CASE_TYPE_PERSON,
get_all_occurrence_cases_from_person,
)
from custom.enikshay.const import ENROLLED_IN_PRIVATE
from custom.enikshay.management.commands.base_data_dump import BaseDataDump
DOMAIN = "enikshay"
class Command(BaseDataDump):
""" data dumps for person cases
https://docs.google.com/spreadsheets/d/1OPp0oFlizDnIyrn7Eiv11vUp8IBmc73hES7qqT-mKKA/edit#gid=1039030624
"""
TASK_NAME = "data_dumps_person_case"
INPUT_FILE_NAME = "data_dumps_person_case.csv"
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.case_type = CASE_TYPE_PERSON
def get_last_episode(self, case):
self.context['last_episode'] = (
self.context.get('last_episode') or
get_last_episode(case)
)
if not self.context['last_episode']:
return Exception("could not find last episode for person %s" % case.case_id)
return self.context['last_episode']
def get_custom_value(self, column_name, case):
if column_name == "Commcare UUID":
return case.case_id
elif column_name == "Created by Username":
user_id = None
try:
user_id = case.opened_by
user = CommCareUser.get_by_user_id(user_id, DOMAIN)
return user.username
except Exception as e:
return Exception("Could not get username. case opened by %s, %s" % (user_id, e))
elif column_name == "Created by User ID":
return case.opened_by
elif column_name == "Date of creation":
return case.opened_on
elif column_name == "Current Owner - PHI":
return case.owner_id
elif column_name == 'Person Status':
if case.closed:
return "closed"
elif case.owner_id == "_invalid_":
return "removed"
elif case.owner_id == '_archive_':
return "archived"
else:
return "active"
elif column_name == "Latest Episode creation Date":
return get_last_episode(case).opened_on
elif column_name == "Latest Episode Closed?":
return get_last_episode(case).closed
elif column_name == "Latest Episode - Date Closed (If any)":
return get_last_episode(case).closed_date
raise Exception("unknown custom column %s" % column_name)
def get_case_reference_value(self, case_reference, case, calculation):
if case_reference == 'last_episode':
try:
return self.get_last_episode(case).get_case_property(calculation)
except Exception as e:
return str(e)
return Exception("unknown case reference %s" % case_reference)
def get_case_ids_query(self, case_type):
"""
All open and closed person cases with person.dataset = 'real' and person.enrolled_in_private != 'true'
"""
return (CaseSearchES()
.domain(DOMAIN)
.case_type(case_type)
.case_property_query(ENROLLED_IN_PRIVATE, 'true', clause=queries.MUST_NOT)
.case_property_query("dataset", 'real')
)
def get_recently_closed_case(person_case, all_cases):
recently_closed_case = None
recently_closed_time = None
for case in all_cases:
case_closed_time = case.closed_on
if case_closed_time:
if recently_closed_time is None:
recently_closed_time = case_closed_time
recently_closed_case = case
elif recently_closed_time and recently_closed_time < case_closed_time:
recently_closed_time = case_closed_time
recently_closed_case = case
elif recently_closed_time and recently_closed_time == case_closed_time:
raise Exception("This looks like a super edge case that can be looked at. "
"Two episodes closed at the same time. Case id: {case_id}"
.format(case_id=case.case_id))
return recently_closed_case
def get_all_episode_cases_from_person(domain, person_case_id):
occurrence_cases = get_all_occurrence_cases_from_person(domain, person_case_id)
return [
case for case in CaseAccessors(domain).get_reverse_indexed_cases(
[c.case_id for c in occurrence_cases], case_types=[CASE_TYPE_EPISODE])
]
def get_last_episode(person_case):
"""
For all episode cases under the person (the host of the host of the episode is the primary person case):
If count(open episode cases with episode.is_active = 'yes') > 1, report error
If count(open episode cases with episode.is_active = 'yes') = 1, pick this case
If count(open episode cases with episode.is_active = 'yes') = 0:
pick the episode with the latest episode.closed_date if there is one
Else report 'No episodes'
"""
episode_cases = get_all_episode_cases_from_person(person_case.domain, person_case.case_id)
open_episode_cases = [
episode_case for episode_case in episode_cases
if not episode_case.closed
]
active_open_episode_cases = [
episode_case for episode_case in open_episode_cases
if episode_case.get_case_property('is_active') == 'yes'
]
if len(active_open_episode_cases) > 1:
raise Exception("Multiple active open episode cases found for %s" % person_case.case_id)
elif len(active_open_episode_cases) == 1:
return active_open_episode_cases[0]
else:
recently_closed_case = get_recently_closed_case(person_case, episode_cases)
if recently_closed_case:
return recently_closed_case
else:
raise Exception("No episodes for %s" % person_case.case_id)
|
Python
| 0 |
@@ -2717,20 +2717,18 @@
.closed_
-date
+on
%0A
|
6b7bd1c412b21a748b39a07a792f8b2c8461f9e2
|
Fix issue #17
|
marmoset/installimage/installimage_config.py
|
marmoset/installimage/installimage_config.py
|
import os
class InstallimageConfig:
CFG_DIR = '/srv/tftp/installimage/'
def __init__(self, mac):
self.variables = {}
self.mac = mac
if self.exists():
self.__read_config_file()
def add_or_set(self, key, value):
self.variables[key] = value
def create(self):
self.__write_config_file()
def exists(self):
return os.path.isfile(self.file_path())
def file_name(self):
'''Return the file name in the Installimage file name style.'''
return self.mac.replace(":", "_")
def file_path(self, name=None):
'''Return the path to the config file of th instance.'''
if name is None:
name = self.file_name()
cfgdir = InstallimageConfig.CFG_DIR.rstrip('/')
return os.path.join(cfgdir, name)
def __read_config_file(self, path=None):
if path is None:
path = self.file_path()
lines = []
with open(path, 'r') as f:
lines = f.readlines()
f.close()
for line in lines:
key = line.split(" ")[0]
value = line.split(" ", 1)[1]
self.variables[key] = value
def __write_config_file(self, path=None):
if path is None:
path = self.file_path()
variable_lines = []
for key in self.variables:
variable_lines.append("%s %s" % (key.upper(), self.variables[key]))
content = "\n".join(variable_lines)
os.makedirs(InstallimageConfig.CFG_DIR, exist_ok=True)
with open(path, 'w') as f:
f.write(content)
f.close()
|
Python
| 0 |
@@ -274,32 +274,40 @@
lf.variables%5Bkey
+.upper()
%5D = value%0A%0A d
@@ -1419,24 +1419,16 @@
%22 %25 (key
-.upper()
, self.v
|
74cfb1bd8e60e1d348115677e92c5e64858ec785
|
Add clearer instructions on no component support. (#2685)
|
packaged_releases/patches/patch_component_custom.py
|
packaged_releases/patches/patch_component_custom.py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core._util import CLIError
MSG_TMPL = """
az component and subcommands are not available with the current Azure CLI installation.
If installed with apt-get, please use apt-get to update this installation.
{}
"""
def _raise_error(msg):
raise CLIError(MSG_TMPL.format(msg))
def list_components():
""" List the installed components """
_raise_error("Use 'az --version' to list component versions.")
def list_available_components():
""" List publicly available components that can be installed """
_raise_error("No additional components available.")
def remove(component_name):
""" Remove a component """
_raise_error("Components cannot be removed.")
def update(private=False, pre=False, link=None, additional_components=None, allow_third_party=False):
""" Update the CLI and all installed components """
_raise_error("Components cannot be updated.")
|
Python
| 0.000552 |
@@ -527,16 +527,17 @@
use
+'
apt-get
to u
@@ -532,16 +532,185 @@
apt-get
+update' to update this installation.%0AIf installed with Docker, please use 'docker pull' to update this installation.%0AIf installed with Windows MSI, download the new MSI
to updat
|
c5a31be1bd452224c2b35c4f3e3132b2df1431e7
|
reorder imports
|
meinberlin/apps/documents/exports.py
|
meinberlin/apps/documents/exports.py
|
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from adhocracy4.comments.models import Comment
from adhocracy4.exports import mixins as export_mixins
from adhocracy4.exports import views as export_views
from adhocracy4.projects.mixins import ProjectMixin
from meinberlin.apps.exports import mixins as mb_export_mixins
from meinberlin.apps.exports import register_export
@register_export(_('Documents with comments'))
class DocumentExportView(
export_views.BaseItemExportView,
export_mixins.ExportModelFieldsMixin,
mb_export_mixins.UserGeneratedContentExportMixin,
export_mixins.ItemExportWithLinkMixin,
export_mixins.ItemExportWithRatesMixin,
mb_export_mixins.ItemExportWithRepliesToMixin,
ProjectMixin):
model = Comment
fields = ['id', 'comment', 'created']
def get_queryset(self):
comments = (
Comment.objects.filter(paragraph__chapter__module=self.module) |
Comment.objects.filter(chapter__module=self.module) |
Comment.objects.filter(
parent_comment__paragraph__chapter__module=self.module) |
Comment.objects.filter(parent_comment__chapter__module=self.module)
)
return comments
def get_base_filename(self):
return '%s_%s' % (self.project.slug,
timezone.now().strftime('%Y%m%dT%H%M%S'))
|
Python
| 0.000009 |
@@ -243,60 +243,8 @@
ews%0A
-from adhocracy4.projects.mixins import ProjectMixin%0A
from
@@ -298,24 +298,24 @@
port_mixins%0A
+
from meinber
@@ -433,49 +433,8 @@
ew(%0A
- export_views.BaseItemExportView,%0A
@@ -695,20 +695,40 @@
-ProjectMixin
+export_views.BaseItemExportView%0A
):%0A%0A
|
48fa1b07892cf4431b0f24b2c4ad89258ed6b5e3
|
Change "Additional Parameters" to "Other Parameters".
|
scikits/image/filter/lpi_filter.py
|
scikits/image/filter/lpi_filter.py
|
"""
:author: Stefan van der Walt, 2008
:license: modified BSD
"""
__all__ = ['inverse', 'wiener', 'LPIFilter2D']
__docformat__ = 'restructuredtext en'
import numpy as np
from scipy.fftpack import fftshift, ifftshift
eps = np.finfo(float).eps
def _min_limit(x, val=eps):
mask = np.abs(x) < eps
x[mask] = np.sign(x[mask]) * eps
def _centre(x, oshape):
"""Return an array of oshape from the centre of x.
"""
start = (np.array(x.shape) - np.array(oshape)) / 2. + 1
out = x[[slice(s, s + n) for s, n in zip(start, oshape)]]
return out
def _pad(data, shape):
"""Pad the data to the given shape with zeros.
Parameters
----------
data : 2-d ndarray
Input data
shape : (2,) tuple
"""
out = np.zeros(shape)
out[[slice(0, n) for n in data.shape]] = data
return out
class LPIFilter2D(object):
"""Linear Position-Invariant Filter (2-dimensional)
"""
def __init__(self, impulse_response, **filter_params):
"""
Parameters
----------
impulse_response : callable `f(r, c, **filter_params)`
Function that yields the impulse response. `r` and
`c` are 1-dimensional vectors that represent row and
column positions, in other words coordinates are
(r[0],c[0]),(r[0],c[1]) etc. `**filter_params` are
passed through.
In other words, example would be called like this:
>>> r = [0,0,0,1,1,1,2,2,2]
>>> c = [0,1,2,0,1,2,0,1,2]
>>> impulse_response(r, c, **filter_params)
Examples
--------
Gaussian filter:
>>> def filt_func(r, c):
return np.exp(-np.hypot(r, c)/1)
>>> filter = LPIFilter2D(filt_func)
"""
if impulse_response is None:
raise ValueError("Impulse response must be a callable.")
self.impulse_response = impulse_response
self.filter_params = filter_params
self._cache = None
def _prepare(self, data):
"""Calculate filter and data FFT in preparation for filtering.
"""
dshape = np.array(data.shape)
dshape += (dshape % 2 == 0) # all filter dimensions must be uneven
oshape = np.array(data.shape) * 2 - 1
if self._cache is None or np.any(self._cache.shape != oshape):
coords = np.mgrid[[slice(0, float(n)) for n in dshape]]
# this steps over two sets of coordinates,
# not over the coordinates individually
for k,coord in enumerate(coords):
coord -= (dshape[k] - 1)/2.
coords = coords.reshape(2, -1).T # coordinate pairs (r,c)
f = self.impulse_response(coords[:,0],coords[:,1],
**self.filter_params).reshape(dshape)
f = _pad(f,oshape)
F = np.dual.fftn(f)
self._cache = F
else:
F = self._cache
data = _pad(data, oshape)
G = np.dual.fftn(data)
return F, G
def __call__(self,data):
"""Apply the filter to the given data.
*Parameters*:
data : (M,N) ndarray
"""
F, G = self._prepare(data)
out = np.dual.ifftn(F * G)
out = np.abs(_centre(out, data.shape))
return out
def forward(data, impulse_response=None, filter_params={},
predefined_filter=None):
"""Apply the given filter to data.
Parameters
----------
data : (M,N) ndarray
Input data.
impulse_response : callable `f(r, c, **filter_params)`
Impulse response of the filter. See LPIFilter2D.__init__.
filter_params : dict
Additional keyword parameters to the impulse_response function.
Other Parameters
----------------
predefined_filter : LPIFilter2D
If you need to apply the same filter multiple times over
different images, construct the LPIFilter2D and specify
it here.
Examples
--------
Gaussian filter:
>>> def filt_func(r, c):
return np.exp(-np.hypot(r, c)/1)
>>> forward(data, filt_func)
"""
if predefined_filter is None:
predefined_filter = LPIFilter2D(impulse_response, **filter_params)
return predefined_filter(data)
def inverse(data, impulse_response=None, filter_params={}, max_gain=2,
predefined_filter=None):
"""Apply the filter in reverse to the given data.
Parameters
----------
data : (M,N) ndarray
Input data.
impulse_response : callable `f(r, c, **filter_params)`
Impulse response of the filter. See LPIFilter2D.__init__.
filter_params : dict
Additional keyword parameters to the impulse_response function.
max_gain : float
Limit the filter gain. Often, the filter contains
zeros, which would cause the inverse filter to have
infinite gain. High gain causes amplification of
artefacts, so a conservative limit is recommended.
Additional Parameters
---------------------
predefined_filter : LPIFilter2D
If you need to apply the same filter multiple times over
different images, construct the LPIFilter2D and specify
it here.
"""
if predefined_filter is None:
filt = LPIFilter2D(impulse_response, **filter_params)
else:
filt = predefined_filter
F, G = filt._prepare(data)
_min_limit(F)
F = 1/F
mask = np.abs(F) > max_gain
F[mask] = np.sign(F[mask]) * max_gain
return _centre(np.abs(ifftshift(np.dual.ifftn(G * F))), data.shape)
def wiener(data, impulse_response=None, filter_params={}, K=0.25,
predefined_filter=None):
"""Minimum Mean Square Error (Wiener) inverse filter.
Parameters
----------
data : (M,N) ndarray
Input data.
K : float or (M,N) ndarray
Ratio between power spectrum of noise and undegraded
image.
impulse_response : callable `f(r, c, **filter_params)`
Impulse response of the filter. See LPIFilter2D.__init__.
filter_params : dict
Additional keyword parameters to the impulse_response function.
Additional Parameters
---------------------
predefined_filter : LPIFilter2D
If you need to apply the same filter multiple times over
different images, construct the LPIFilter2D and specify
it here.
"""
if predefined_filter is None:
filt = LPIFilter2D(impulse_response, **filter_params)
else:
filt = predefined_filter
F, G = filt._prepare(data)
_min_limit(F)
H_mag_sqr = np.abs(F)**2
F = 1/F * H_mag_sqr / (H_mag_sqr + K)
return _centre(np.abs(ifftshift(np.dual.ifftn(G * F))), data.shape)
def constrained_least_squares(data, lam, impulse_response=None,
filter_params={}):
raise NotImplementedError
|
Python
| 0 |
@@ -5021,34 +5021,29 @@
ended.%0A%0A
-Additional
+Other
Parameters%0A
@@ -5054,37 +5054,32 @@
----------------
------
%0A predefined_
@@ -6175,26 +6175,21 @@
n.%0A%0A
-Additional
+Other
Paramet
@@ -6204,37 +6204,32 @@
----------------
------
%0A predefined_
|
1c6c31653889c8acb60a54dc1dc9ea0f8795f122
|
bump to next dev version: 0.6.7-dev
|
ulmo/version.py
|
ulmo/version.py
|
# set version number
__version__ = '0.6.6'
|
Python
| 0 |
@@ -33,11 +33,15 @@
= '0.6.
-6
+7-dev
'%0A
|
8546e14e152c79f137e0db15e3cd7de71cd0e8b4
|
bump to next dev version: 0.7.3-dev
|
ulmo/version.py
|
ulmo/version.py
|
# set version number
__version__ = '0.7.2'
|
Python
| 0 |
@@ -37,7 +37,11 @@
0.7.
-2
+3-dev
'%0A
|
a8b43950610adb41a3de4c342c51d5b22fd5454b
|
Fix indents
|
saleor/product/forms.py
|
saleor/product/forms.py
|
import json
from django import forms
from django.utils.encoding import smart_text
from django.utils.translation import pgettext_lazy
from django_prices.templatetags.prices_i18n import gross
from ..cart.forms import AddToCartForm
class VariantChoiceField(forms.ModelChoiceField):
discounts = None
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def label_from_instance(self, obj):
variant_label = smart_text(obj)
label = pgettext_lazy(
'Variant choice field label',
'%(variant_label)s - %(price)s') % {
'variant_label': variant_label,
'price': gross(obj.get_price(discounts=self.discounts))}
return label
def update_field_data(self, product, cart):
""" Function initializing fields custom data """
self.queryset = product.variants
self.discounts = cart.discounts
self.empty_label = None
images_map = {variant.pk: [vi.image.image.url
for vi in variant.variant_images.all()]
for variant in product.variants.all()}
self.widget.attrs['data-images'] = json.dumps(images_map)
# Don't display select input if there are less than two variants
if self.queryset.count() < 2:
self.widget = forms.HiddenInput(
{'value': product.variants.all()[0].pk})
class ProductForm(AddToCartForm):
variant = VariantChoiceField(queryset=None)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
variant_field = self.fields['variant']
variant_field.update_field_data(self.product, self.cart)
def get_variant(self, cleaned_data):
return cleaned_data.get('variant')
|
Python
| 0.000034 |
@@ -600,20 +600,16 @@
-
'variant
@@ -632,20 +632,16 @@
_label,%0A
-
|
833f8ce0673701eb64fb20ee067ccd8c58e473c6
|
Correct wrong inheritance on sponsorship_typo3 child_depart wizard.
|
child_sync_typo3/wizard/child_depart_wizard.py
|
child_sync_typo3/wizard/child_depart_wizard.py
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Compassion CH (http://www.compassion.ch)
# Releasing children from poverty in Jesus' name
# @author: Emanuel Cino <[email protected]>
#
# The licence is in the file __openerp__.py
#
##############################################################################
from openerp.osv import orm
from ..model.sync_typo3 import Sync_typo3
class end_sponsorship_wizard(orm.TransientModel):
_inherit = 'end.sponsorship.wizard'
def child_depart(self, cr, uid, ids, context=None):
wizard = self.browse(cr, uid, ids[0], context)
child = wizard.child_id
res = True
if child.state == 'I':
res = child.child_remove_from_typo3()
res = super(end_sponsorship_wizard, self).child_depart(
cr, uid, ids, context) and res
return res or Sync_typo3.typo3_index_error(cr, uid, self, context)
|
Python
| 0 |
@@ -488,31 +488,28 @@
%0D%0Aclass
-end_sponsorship
+child_depart
_wizard(
@@ -550,23 +550,20 @@
= '
-end.sponsorship
+child.depart
.wiz
@@ -847,23 +847,20 @@
per(
-end_sponsorship
+child_depart
_wiz
|
bda38b1e168e227cba2baedb1c5e2e079ccaede8
|
Drop Py2 and six on salt/beacons/service.py
|
salt/beacons/service.py
|
salt/beacons/service.py
|
# -*- coding: utf-8 -*-
"""
Send events covering service status
"""
# Import Python Libs
from __future__ import absolute_import, unicode_literals
import logging
import os
import time
from salt.ext.six.moves import map
log = logging.getLogger(__name__) # pylint: disable=invalid-name
LAST_STATUS = {}
__virtualname__ = "service"
def validate(config):
"""
Validate the beacon configuration
"""
# Configuration for service beacon should be a list of dicts
if not isinstance(config, list):
return False, ("Configuration for service beacon must be a list.")
else:
_config = {}
list(map(_config.update, config))
if "services" not in _config:
return False, ("Configuration for service beacon requires services.")
else:
for config_item in _config["services"]:
if not isinstance(_config["services"][config_item], dict):
return (
False,
(
"Configuration for service beacon must "
"be a list of dictionaries."
),
)
return True, "Valid beacon configuration"
def beacon(config):
"""
Scan for the configured services and fire events
Example Config
.. code-block:: yaml
beacons:
service:
- services:
salt-master: {}
mysql: {}
The config above sets up beacons to check for
the salt-master and mysql services.
The config also supports two other parameters for each service:
`onchangeonly`: when `onchangeonly` is True the beacon will fire
events only when the service status changes. Otherwise, it will fire an
event at each beacon interval. The default is False.
`delay`: when `delay` is greater than 0 the beacon will fire events only
after the service status changes, and the delay (in seconds) has passed.
Applicable only when `onchangeonly` is True. The default is 0.
`emitatstartup`: when `emitatstartup` is False the beacon will not fire
event when the minion is reload. Applicable only when `onchangeonly` is True.
The default is True.
`uncleanshutdown`: If `uncleanshutdown` is present it should point to the
location of a pid file for the service. Most services will not clean up
this pid file if they are shutdown uncleanly (e.g. via `kill -9`) or if they
are terminated through a crash such as a segmentation fault. If the file is
present, then the beacon will add `uncleanshutdown: True` to the event. If
not present, the field will be False. The field is only added when the
service is NOT running. Omitting the configuration variable altogether will
turn this feature off.
Please note that some init systems can remove the pid file if the service
registers as crashed. One such example is nginx on CentOS 7, where the
service unit removes the pid file when the service shuts down (IE: the pid
file is observed as removed when kill -9 is sent to the nginx master
process). The 'uncleanshutdown' option might not be of much use there,
unless the unit file is modified.
Here is an example that will fire an event 30 seconds after the state of nginx
changes and report an uncleanshutdown. This example is for Arch, which
places nginx's pid file in `/run`.
.. code-block:: yaml
beacons:
service:
- services:
nginx:
onchangeonly: True
delay: 30
uncleanshutdown: /run/nginx.pid
"""
ret = []
_config = {}
list(map(_config.update, config))
for service in _config.get("services", {}):
ret_dict = {}
service_config = _config["services"][service]
ret_dict[service] = {"running": __salt__["service.status"](service)}
ret_dict["service_name"] = service
ret_dict["tag"] = service
currtime = time.time()
# If no options is given to the service, we fall back to the defaults
# assign a False value to oncleanshutdown and onchangeonly. Those
# key:values are then added to the service dictionary.
if not service_config:
service_config = {}
if "oncleanshutdown" not in service_config:
service_config["oncleanshutdown"] = False
if "emitatstartup" not in service_config:
service_config["emitatstartup"] = True
if "onchangeonly" not in service_config:
service_config["onchangeonly"] = False
if "delay" not in service_config:
service_config["delay"] = 0
# We only want to report the nature of the shutdown
# if the current running status is False
# as well as if the config for the beacon asks for it
if "uncleanshutdown" in service_config and not ret_dict[service]["running"]:
filename = service_config["uncleanshutdown"]
ret_dict[service]["uncleanshutdown"] = (
True if os.path.exists(filename) else False
)
if "onchangeonly" in service_config and service_config["onchangeonly"] is True:
if service not in LAST_STATUS:
LAST_STATUS[service] = ret_dict[service]
if service_config["delay"] > 0:
LAST_STATUS[service]["time"] = currtime
elif not service_config["emitatstartup"]:
continue
else:
ret.append(ret_dict)
if LAST_STATUS[service]["running"] != ret_dict[service]["running"]:
LAST_STATUS[service] = ret_dict[service]
if service_config["delay"] > 0:
LAST_STATUS[service]["time"] = currtime
else:
ret.append(ret_dict)
if "time" in LAST_STATUS[service]:
elapsedtime = int(round(currtime - LAST_STATUS[service]["time"]))
if elapsedtime > service_config["delay"]:
del LAST_STATUS[service]["time"]
ret.append(ret_dict)
else:
ret.append(ret_dict)
return ret
|
Python
| 0 |
@@ -1,28 +1,4 @@
-# -*- coding: utf-8 -*-%0A
%22%22%22%0A
@@ -41,88 +41,8 @@
%22%22%22%0A
-%0A# Import Python Libs%0Afrom __future__ import absolute_import, unicode_literals%0A%0A
impo
@@ -79,44 +79,8 @@
me%0A%0A
-from salt.ext.six.moves import map%0A%0A
log
@@ -112,40 +112,8 @@
e__)
- # pylint: disable=invalid-name
%0A%0ALA
|
b80608605827d138eca6daab2633660716660721
|
Fix plotting test.
|
src/tests/unit_tests/plotting_test.py
|
src/tests/unit_tests/plotting_test.py
|
import nose.tools
import unittest
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import mia
from mia.plotting import *
from ..test_utils import get_file_path
class IOTests(unittest.TestCase):
@classmethod
def setupClass(cls):
blobs_path = get_file_path("blob_detection.csv")
cls._df = pd.DataFrame.from_csv(blobs_path)
# run plots on a seperate thread
plt.ion()
def tearDown(cls):
# close plots after test is run
plt.close()
def test_plot_multiple_images(self):
img1 = np.random.rand(10, 10)
img2 = np.random.rand(10, 10)
plot_multiple_images([img1, img2])
def test_plot_region_props(self):
img = np.random.rand(10, 10)
region = [2, 2, 5, 5]
plot_region_props(img, [region])
def test_plot_linear_structure(self):
img = np.random.rand(10, 10)
line_image = np.zeros(img.shape)
line_image[5, 5] = 1
plot_linear_structure(img, line_image)
def test_plot_blobs(self):
img = np.random.rand(10, 10)
blob = [5, 5, 3]
plot_blobs(img, [blob])
def test_plot_image_orthotope(self):
img1 = np.random.rand(10, 10)
img2 = np.random.rand(10, 10)
img3 = np.random.rand(10, 10)
img4 = np.random.rand(10, 10)
cube = np.array([[img1, img2], [img3, img4]])
plot_image_orthotope(cube)
def test_plot_image_orthotope_with_titles(self):
img1 = np.random.rand(10, 10)
img2 = np.random.rand(10, 10)
img3 = np.random.rand(10, 10)
img4 = np.random.rand(10, 10)
titles = ['x', 'y', 'w', 'u']
cube = np.array([[img1, img2], [img3, img4]])
plot_image_orthotope(cube, titles)
def test_plot_risk_classes(self):
self._df['class'] = np.random.randint(4, self._df.shape[0])
plot_risk_classes(self._df, ['class'])
def test_plot_risk_classes_single(self):
self._df['class'] = np.random.randint(4, self._df.shape[0])
plot_risk_classes_single(self._df, ['class'])
def test_plot_scatter_2d(self):
data = np.random.rand(10, 2)
df = pd.DataFrame(data, columns=['x', 'y'])
df['class'] = np.random.randint(4, self._df.shape[0])
plot_scatter_2d(df, ['x', 'y'], df['class'])
def test_plot_scatter_2d_with_annotate(self):
data = np.random.rand(10, 2)
df = pd.DataFrame(data, columns=['x', 'y'])
df['class'] = np.random.randint(4, self._df.shape[0])
plot_scatter_2d(df, ['x', 'y'], df['class'], annotate=True)
def test_plot_scatter_2d_incorrect_dimensions(self):
data = np.random.rand(10, 3)
df = pd.DataFrame(data, columns=['x', 'y', 'z'])
df['class'] = np.random.randint(4, self._df.shape[0])
try:
plot_scatter_2d(df, ['x', 'y', 'z'], df['class'])
except ValueError, e:
nose.tools.assert_equal(e.message,
"Number of columns must be exactly 2")
def test_plot_scatter_3d(self):
data = np.random.rand(10, 3)
df = pd.DataFrame(data, columns=['x', 'y', 'z'])
df['class'] = np.random.randint(4, self._df.shape[0])
plot_scatter_3d(df, ['x', 'y', 'z'], df['class'])
def test_plot_scatter_3d_incorrect_dimensions(self):
data = np.random.rand(10, 4)
df = pd.DataFrame(data, columns=['x', 'y', 'z', 'w'])
df['class'] = np.random.randint(4, self._df.shape[0])
try:
plot_scatter_3d(df, ['x', 'y', 'z', 'w'], df['class'])
except ValueError, e:
nose.tools.assert_equal(e.message,
"Number of columns must be exactly 3")
def test_plot_mapping_2d(self):
data = np.random.rand(10, 2)
df = pd.DataFrame(data, columns=['x', 'y'])
labels = np.random.randint(4, self._df.shape[0])
index_a = np.arange(5)
index_b = np.arange(5, 10)
plot_mapping_2d(df, index_a, index_b, labels)
def test_plot_mapping_3d(self):
data = np.random.rand(10, 3)
df = pd.DataFrame(data, columns=['x', 'y', 'z'])
labels = np.random.randint(4, self._df.shape[0])
index_a = np.arange(5)
index_b = np.arange(5, 10)
plot_mapping_3d(df, index_a, index_b, labels)
|
Python
| 0.000002 |
@@ -3880,32 +3880,42 @@
labels =
+pd.Series(
np.random.randin
@@ -3929,32 +3929,33 @@
lf._df.shape%5B0%5D)
+)
%0A%0A index_
@@ -3950,32 +3950,42 @@
index_a =
+pd.Series(
np.arange(5)%0A
@@ -3972,32 +3972,33 @@
ies(np.arange(5)
+)
%0A index_b
@@ -3992,32 +3992,42 @@
index_b =
+pd.Series(
np.arange(5, 10)
@@ -4018,32 +4018,33 @@
np.arange(5, 10)
+)
%0A%0A plot_m
@@ -4231,16 +4231,26 @@
abels =
+pd.Series(
np.rando
@@ -4280,16 +4280,17 @@
hape%5B0%5D)
+)
%0A%0A
@@ -4301,16 +4301,26 @@
dex_a =
+pd.Series(
np.arang
@@ -4323,16 +4323,17 @@
range(5)
+)
%0A
@@ -4343,16 +4343,26 @@
dex_b =
+pd.Series(
np.arang
@@ -4365,24 +4365,25 @@
range(5, 10)
+)
%0A%0A pl
|
e8ead2bfed01d2b9b71b066b525ce4356615b5b7
|
Allow for tcp transport in publish
|
salt/modules/publish.py
|
salt/modules/publish.py
|
# -*- coding: utf-8 -*-
'''
Publish a command from a minion to a target
'''
from __future__ import absolute_import
# Import python libs
import time
import logging
# Import salt libs
import salt.crypt
import salt.payload
import salt.transport
import salt.utils.args
from salt.exceptions import SaltReqTimeoutError
log = logging.getLogger(__name__)
__virtualname__ = 'publish'
def __virtual__():
return __virtualname__ if __opts__.get('transport', '') == 'zeromq' else False
def _parse_args(arg):
'''
yamlify `arg` and ensure it's outermost datatype is a list
'''
yaml_args = salt.utils.args.yamlify_arg(arg)
if yaml_args is None:
return []
elif not isinstance(yaml_args, list):
return [yaml_args]
else:
return yaml_args
def _publish(
tgt,
fun,
arg=None,
expr_form='glob',
returner='',
timeout=5,
form='clean',
wait=False):
'''
Publish a command from the minion out to other minions, publications need
to be enabled on the Salt master and the minion needs to have permission
to publish the command. The Salt master will also prevent a recursive
publication loop, this means that a minion cannot command another minion
to command another minion as that would create an infinite command loop.
The arguments sent to the minion publish function are separated with
commas. This means that for a minion executing a command with multiple
args it will look like this::
salt system.example.com publish.publish '*' user.add 'foo,1020,1020'
CLI Example:
.. code-block:: bash
salt system.example.com publish.publish '*' cmd.run 'ls -la /tmp'
'''
if 'master_uri' not in __opts__:
log.error('Cannot run publish commands without a connection to a salt master. No command sent.')
return {}
if fun.startswith('publish.'):
log.info('Cannot publish publish calls. Returning {}')
return {}
arg = _parse_args(arg)
log.info('Publishing {0!r} to {master_uri}'.format(fun, **__opts__))
auth = salt.crypt.SAuth(__opts__)
tok = auth.gen_token('salt')
load = {'cmd': 'minion_pub',
'fun': fun,
'arg': arg,
'tgt': tgt,
'tgt_type': expr_form,
'ret': returner,
'tok': tok,
'tmo': timeout,
'form': form,
'id': __opts__['id']}
channel = salt.transport.Channel.factory(__opts__)
try:
peer_data = channel.send(load)
except SaltReqTimeoutError:
return '{0!r} publish timed out'.format(fun)
if not peer_data:
return {}
# CLI args are passed as strings, re-cast to keep time.sleep happy
if wait:
loop_interval = 0.3
matched_minions = set(peer_data['minions'])
returned_minions = set()
loop_counter = 0
while len(returned_minions ^ matched_minions) > 0:
load = {'cmd': 'pub_ret',
'id': __opts__['id'],
'tok': tok,
'jid': peer_data['jid']}
ret = channel.send(load)
returned_minions = set(ret.keys())
end_loop = False
if returned_minions >= matched_minions:
end_loop = True
elif (loop_interval * loop_counter) > timeout:
# This may be unnecessary, but I am paranoid
if len(returned_minions) < 1:
return {}
end_loop = True
if end_loop:
if form == 'clean':
cret = {}
for host in ret:
cret[host] = ret[host]['ret']
return cret
else:
return ret
loop_counter = loop_counter + 1
time.sleep(loop_interval)
else:
time.sleep(float(timeout))
load = {'cmd': 'pub_ret',
'id': __opts__['id'],
'tok': tok,
'jid': peer_data['jid']}
ret = channel.send(load)
if form == 'clean':
cret = {}
for host in ret:
cret[host] = ret[host]['ret']
return cret
else:
return ret
def publish(tgt, fun, arg=None, expr_form='glob', returner='', timeout=5):
'''
Publish a command from the minion out to other minions.
Publications need to be enabled on the Salt master and the minion
needs to have permission to publish the command. The Salt master
will also prevent a recursive publication loop, this means that a
minion cannot command another minion to command another minion as
that would create an infinite command loop.
The expr_form argument is used to pass a target other than a glob into
the execution, the available options are:
- glob
- pcre
- grain
- grain_pcre
- pillar
- pillar_pcre
- ipcidr
- range
- compound
Note that for pillar matches must be exact, both in the pillar matcher
and the compound matcher. No globbing is supported.
The arguments sent to the minion publish function are separated with
commas. This means that for a minion executing a command with multiple
args it will look like this:
.. code-block:: bash
salt system.example.com publish.publish '*' user.add 'foo,1020,1020'
salt system.example.com publish.publish 'os:Fedora' network.interfaces '' grain
CLI Example:
.. code-block:: bash
salt system.example.com publish.publish '*' cmd.run 'ls -la /tmp'
.. admonition:: Attention
If you need to pass a value to a function argument and that value
contains an equal sign, you **must** include the argument name.
For example:
.. code-block:: bash
salt '*' publish.publish test.kwarg arg='cheese=spam'
Multiple keyword arguments should be passed as a list.
.. code-block:: bash
salt '*' publish.publish test.kwarg arg="['cheese=spam','spam=cheese']"
'''
return _publish(tgt,
fun,
arg=arg,
expr_form=expr_form,
returner=returner,
timeout=timeout,
form='clean',
wait=True)
def full_data(tgt, fun, arg=None, expr_form='glob', returner='', timeout=5):
'''
Return the full data about the publication, this is invoked in the same
way as the publish function
CLI Example:
.. code-block:: bash
salt system.example.com publish.full_data '*' cmd.run 'ls -la /tmp'
.. admonition:: Attention
If you need to pass a value to a function argument and that value
contains an equal sign, you **must** include the argument name.
For example:
.. code-block:: bash
salt '*' publish.full_data test.kwarg arg='cheese=spam'
'''
return _publish(tgt,
fun,
arg=arg,
expr_form=expr_form,
returner=returner,
timeout=timeout,
form='full',
wait=True)
def runner(fun, arg=None, timeout=5):
'''
Execute a runner on the master and return the data from the runner
function
CLI Example:
.. code-block:: bash
salt publish.runner manage.down
'''
arg = _parse_args(arg)
if 'master_uri' not in __opts__:
return 'No access to master. If using salt-call with --local, please remove.'
log.info('Publishing runner {0!r} to {master_uri}'.format(fun, **__opts__))
auth = salt.crypt.SAuth(__opts__)
tok = auth.gen_token('salt')
load = {'cmd': 'minion_runner',
'fun': fun,
'arg': arg,
'tok': tok,
'tmo': timeout,
'id': __opts__['id']}
channel = salt.transport.Channel.factory(__opts__)
try:
return channel.send(load)
except SaltReqTimeoutError:
return '{0!r} runner publish timed out'.format(fun)
|
Python
| 0 |
@@ -457,19 +457,20 @@
'')
-==
+in (
'zeromq'
els
@@ -465,16 +465,24 @@
'zeromq'
+, 'tcp')
else Fa
|
97c5cb0312d7b093752376a373cc3773fcf44f34
|
Add SunOS to the basic service module
|
salt/modules/service.py
|
salt/modules/service.py
|
'''
The default service module, if not otherwise specified salt will fall back
to this basic module
'''
import os
grainmap = {
'Arch': '/etc/rc.d',
'Debian': '/etc/init.d',
'Fedora': '/etc/init.d',
'RedHat': '/etc/init.d',
'Ubuntu': '/etc/init.d',
'Gentoo': '/etc/init.d',
'CentOS': '/etc/init.d',
}
def __virtual__():
'''
Only work on systems which default to systemd
'''
# Disable on these platforms, specific service modules exist:
disable = [
'RedHat',
'CentOS',
'Scientific',
'Fedora',
'Gentoo',
'Ubuntu',
'FreeBSD',
'Windows',
]
if __grains__['os'] in disable:
return False
return 'service'
def start(name):
'''
Start the specified service
CLI Example::
salt '*' service.start <service name>
'''
cmd = os.path.join(grainmap[__grains__['os']],
name + ' start')
return not __salt__['cmd.retcode'](cmd)
def stop(name):
'''
Stop the specified service
CLI Example::
salt '*' service.stop <service name>
'''
cmd = os.path.join(grainmap[__grains__['os']],
name + ' stop')
return not __salt__['cmd.retcode'](cmd)
def restart(name):
'''
Restart the named service
CLI Example::
salt '*' service.restart <service name>
'''
cmd = os.path.join(grainmap[__grains__['os']],
name + ' restart')
return not __salt__['cmd.retcode'](cmd)
def status(name, sig=None):
'''
Return the status for a service, returns the PID or an empty string if the
service is running or not, pass a signature to use to find the service via
ps
CLI Example::
salt '*' service.status <service name> [service signature]
'''
sig = name if not sig else sig
cmd = "{0[ps]} | grep {1} | grep -v grep | awk '{{print $2}}'".format(
__grains__, sig)
return __salt__['cmd.run'](cmd).strip()
|
Python
| 0 |
@@ -362,32 +362,67 @@
'/etc/init.d',%0A
+ 'SunOS': '/etc/init.d',%0A
%7D%0A%0Adef
|
d90be6a8765e3c22b092221db44c3d40917ef45e
|
Fix bug 1659: set correct language as source
|
scripts/harvest_template.py
|
scripts/harvest_template.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Usage:
python harvest_template.py -lang:nl -template:"Taxobox straalvinnige" orde P70 familie P71 geslacht P74
This will work on all pages that transclude the template in the article
namespace
You can use any typical pagegenerator to provide with a list of pages:
python harvest_template.py -lang:nl -cat:Sisoridae -template:"Taxobox straalvinnige" -namespace:0 orde P70 familie P71 geslacht P74
"""
#
# (C) 2013 Multichill, Amir
# (C) 2013 Pywikipediabot team
#
# Distributed under the terms of MIT License.
#
__version__ = '$Id$'
#
import re
import json
import pywikibot
from pywikibot import pagegenerators as pg
class HarvestRobot:
"""
A bot to add Wikidata claims
"""
def __init__(self, generator, templateTitle, fields):
"""
Arguments:
* generator - A generator that yields Page objects.
* templateTitle - The template to work on
* fields - A dictionary of fields that are of use to us
"""
self.generator = generator
self.templateTitle = templateTitle.replace(u'_', u' ')
# TODO: Make it a list which also includes the redirects to the template
self.fields = fields
self.repo = pywikibot.Site().data_repository()
self.source = None
self.setSource(pywikibot.Site().language())
def setSource(self, lang):
'''
Get the source
'''
page = pywikibot.Page(self.repo, 'Wikidata:List of wikis/python')
source_values = json.loads(page.get())
source_values = source_values['wikipedia']
for lang in source_values:
source_values[lang] = pywikibot.ItemPage(self.repo, source_values[lang])
if lang in source_values:
self.source = pywikibot.Claim(self.repo, 'p143')
self.source.setTarget(source_values.get(lang))
def run(self):
"""
Starts the robot.
"""
for page in self.generator:
self.procesPage(page)
def procesPage(self, page):
"""
Proces a single page
"""
item = pywikibot.ItemPage.fromPage(page)
pywikibot.output('Processing %s' % page)
if not item.exists():
pywikibot.output('%s doesn\'t have a wikidata item :(' % page)
#TODO FIXME: We should provide an option to create the page
else:
pagetext = page.get()
templates = pywikibot.extract_templates_and_params(pagetext)
for (template, fielddict) in templates:
# We found the template we were looking for
if template.replace(u'_', u' ') == self.templateTitle:
for field, value in fielddict.items():
# This field contains something useful for us
if field in self.fields:
# Check if the property isn't already set
claim = pywikibot.Claim(self.repo, self.fields[field])
if claim.getID() in item.get().get('claims'):
pywikibot.output(
u'A claim for %s already exists. Skipping'
% claim.getID())
# TODO FIXME: This is a very crude way of dupe
# checking
else:
if claim.getType() == 'wikibase-item':
# Try to extract a valid page
match = re.search(pywikibot.link_regex, value)
if match:
try:
link = pywikibot.Link(match.group(1))
linkedPage = pywikibot.Page(link)
if linkedPage.isRedirectPage():
linkedPage = linkedPage.getRedirectTarget()
linkedItem = pywikibot.ItemPage.fromPage(linkedPage)
claim.setTarget(linkedItem)
except pywikibot.exceptions.NoPage:
pywikibot.output('[[%s]] doesn\'t exist so I can\'t link to it' % (linkedItem.title(),))
continue
elif claim.getType() == 'string':
claim.setTarget(value.strip())
else:
print "%s is not a supported datatype." % claim.getType()
continue
pywikibot.output('Adding %s --> %s' % (claim.getID(), claim.getTarget()))
item.addClaim(claim)
if self.source:
claim.addSource(self.source, bot=True)
def main():
gen = pg.GeneratorFactory()
commandline_arguments = list()
templateTitle = u''
for arg in pywikibot.handleArgs():
if arg.startswith('-template'):
if len(arg) == 9:
templateTitle = pywikibot.input(
u'Please enter the template to work on:')
else:
templateTitle = arg[10:]
elif gen.handleArg(arg):
continue
else:
commandline_arguments.append(arg)
if len(commandline_arguments) % 2 or not templateTitle:
raise ValueError # or something.
fields = dict()
for i in xrange(0, len(commandline_arguments), 2):
fields[commandline_arguments[i]] = commandline_arguments[i + 1]
generator = gen.getCombinedGenerator()
if not generator:
# TODO: Build a transcluding generator based on templateTitle
return
bot = HarvestRobot(generator, templateTitle, fields)
bot.run()
if __name__ == "__main__":
main()
|
Python
| 0.001339 |
@@ -1636,16 +1636,23 @@
for
+source_
lang in
@@ -1688,24 +1688,31 @@
urce_values%5B
+source_
lang%5D = pywi
@@ -1751,16 +1751,23 @@
_values%5B
+source_
lang%5D)%0A%0A
|
6ddb38eee5624ea8753e52cfde4b8d17c0ac2b14
|
Making salt.output.json_out python3 compatible
|
salt/output/json_out.py
|
salt/output/json_out.py
|
# -*- coding: utf-8 -*-
'''
Display return data in JSON format
==================================
:configuration: The output format can be configured in two ways:
Using the ``--out-indent`` CLI flag and specifying a positive integer or a
negative integer to group JSON from each minion to a single line.
Or setting the ``output_indent`` setting in the Master or Minion
configuration file with one of the following values:
* ``Null``: put each minion return on a single line.
* ``pretty``: use four-space indents and sort the keys.
* An integer: specify the indentation level.
Salt's outputters operate on a per-minion basis. Each minion return will be
output as a single JSON object once it comes in to the master.
Some JSON parsers can guess when an object ends and a new one begins but many
can not. A good way to differentiate between each minion return is to use the
single-line output format and to parse each line individually. Example output
(truncated)::
{"dave": {"en0": {"hwaddr": "02:b0:26:32:4c:69", ...}}}
{"jerry": {"en0": {"hwaddr": "02:26:ab:0d:b9:0d", ...}}}
{"kevin": {"en0": {"hwaddr": "02:6d:7f:ce:9f:ee", ...}}}
{"mike": {"en0": {"hwaddr": "02:48:a2:4b:70:a0", ...}}}
{"phill": {"en0": {"hwaddr": "02:1d:cc:a2:33:55", ...}}}
{"stuart": {"en0": {"hwaddr": "02:9a:e0:ea:9e:3c", ...}}}
'''
# Import python libs
import json
import logging
log = logging.getLogger(__name__)
# Define the module's virtual name
__virtualname__ = 'json'
def __virtual__():
'''
Rename to json
'''
return __virtualname__
def output(data):
'''
Print the output data in JSON
'''
try:
if 'output_indent' not in __opts__:
return json.dumps(data, default=repr, indent=4)
indent = __opts__.get('output_indent')
sort_keys = False
if indent is None:
indent = None
elif indent == 'pretty':
indent = 4
sort_keys = True
elif isinstance(indent, int):
if indent >= 0:
indent = indent
else:
indent = None
return json.dumps(data, default=repr, indent=indent, sort_keys=sort_keys)
except TypeError:
log.debug('An error occurred while outputting JSON', exc_info=True)
# Return valid JSON for unserializable objects
return json.dumps({})
|
Python
| 0.998869 |
@@ -1358,16 +1358,55 @@
.%7D%7D%7D%0A'''
+%0Afrom __future__ import absolute_import
%0A%0A# Impo
|
f422535179d9f55e28d5c1b0e098e0a4931d366b
|
Making salt.pillar.cmd_json python3 compatible
|
salt/pillar/cmd_json.py
|
salt/pillar/cmd_json.py
|
# -*- coding: utf-8 -*-
'''
Execute a command and read the output as JSON. The JSON data is then directly overlaid onto the minion's Pillar data.
'''
# Don't "fix" the above docstring to put it on two lines, as the sphinx
# autosummary pulls only the first line for its description.
# Import python libs
import logging
import json
# Set up logging
log = logging.getLogger(__name__)
def ext_pillar(minion_id, # pylint: disable=W0613
pillar, # pylint: disable=W0613
command):
'''
Execute a command and read the output as JSON
'''
try:
return json.loads(__salt__['cmd.run'](command))
except Exception:
log.critical(
'JSON data from {0} failed to parse'.format(command)
)
return {}
|
Python
| 0.998816 |
@@ -142,16 +142,55 @@
ata.%0A'''
+%0Afrom __future__ import absolute_import
%0A%0A# Don'
|
e40a9b3676101d7d7bd65cff8487f48a285f3139
|
Fix typo
|
scripts/obtain_user_auth.py
|
scripts/obtain_user_auth.py
|
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This program obtains a set of user credentials.
These credentials are needed to run the system test for OAuth2 credentials.
It's expected that a developer will run this program manually once to obtain
a refresh token. It's highly recommended to use a Google account created
specifically created for testing.
"""
import json
import os
from oauth2client import client
from oauth2client import tools
HERE = os.path.dirname(__file__)
CLIENT_SECRETS_PATH = os.path.abspath(os.path.join(
HERE, '..', 'system_tests', 'data', 'client_secret.json'))
AUTHORIZED_USER_PATH = os.path.abspath(os.path.join(
HERE, '..', 'system_tests', 'data', 'authorized_user.json'))
SCOPES = ['email', 'profile']
class NullStorage(client.Storage):
"""Null storage implementation to prevent oauth2client from failing
on storage.put."""
def locked_put(self, credentials):
pass
def main():
flow = client.flow_from_clientsecrets(CLIENT_SECRETS_PATH, SCOPES)
print('Starting credentials flow...')
credentials = tools.run_flow(flow, NullStorage())
# Save the credentials in the same format as the Cloud SDK's authorized
# user file.
data = {
'type': 'authorized_user',
'client_id': flow.client_id,
'client_secret': flow.client_secret,
'refresh_token': credentials.refresh_token
}
with open(AUTHORIZED_USER_PATH, 'w') as fh:
json.dump(data, fh, indent=4)
print('Created {}.'.format(AUTHORIZED_USER_PATH))
if __name__ == '__main__':
main()
|
Python
| 0.999999 |
@@ -865,16 +865,8 @@
lly
-created
for
|
8ab674582e88582f06d729237d9cb1c00562451e
|
fix pbs dependency stuff
|
util/pbs/pbs.py
|
util/pbs/pbs.py
|
# Copyright (c) 2005 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Nathan Binkert
import os, popen2, re, sys
class MyPOpen(object):
def __init__(self, cmd, input = None, output = None, bufsize = -1):
self.status = -1
if input is None:
p2c_read, p2c_write = os.pipe()
self.tochild = os.fdopen(p2c_write, 'w', bufsize)
else:
p2c_write = None
if isinstance(input, file):
p2c_read = input.fileno()
elif isinstance(input, str):
input = file(input, 'r')
p2c_read = input.fileno()
elif isinstance(input, int):
p2c_read = input
else:
raise AttributeError
if output is None:
c2p_read, c2p_write = os.pipe()
self.fromchild = os.fdopen(c2p_read, 'r', bufsize)
else:
c2p_read = None
if isinstance(output, file):
c2p_write = output.fileno()
elif isinstance(output, str):
output = file(output, 'w')
c2p_write = output.fileno()
elif isinstance(output, int):
c2p_write = output
else:
raise AttributeError
self.pid = os.fork()
if self.pid == 0:
os.dup2(p2c_read, sys.stdin.fileno())
os.dup2(c2p_write, sys.stdout.fileno())
os.dup2(c2p_write, sys.stderr.fileno())
try:
os.execvp(cmd[0], cmd)
finally:
os._exit(1)
os.close(p2c_read)
os.close(c2p_write)
def poll(self):
if self.status < 0:
pid, status = os.waitpid(self.pid, os.WNOHANG)
if pid == self.pid:
self.status = status
return self.status
def wait(self):
if self.status < 0:
pid, status = os.waitpid(self.pid, 0)
if pid == self.pid:
self.status = status
return self.status
class qsub:
def __init__(self):
self.afterok = None
self.hold = False
self.join = False
self.keep_stdout = False
self.keep_stderr = False
self.node_type = None
self.mail_abort = False
self.mail_begin = False
self.mail_end = False
self.name = None
self.stdout = None
self.priority = None
self.queue = None
self.pbshost = None
self.qsub = 'qsub'
self.env = {}
def build(self, script, args = []):
self.cmd = [ self.qsub ]
if self.env:
arg = '-v'
arg += ','.join([ '%s=%s' % i for i in self.env.iteritems() ])
self.cmd.append(arg)
if self.hold:
self.cmd.append('-h')
if self.stdout:
self.cmd.append('-olocalhost:' + self.stdout)
if self.keep_stdout and self.keep_stderr:
self.cmd.append('-koe')
elif self.keep_stdout:
self.cmd.append('-ko')
elif self.keep_stderr:
self.cmd.append('-ke')
else:
self.cmd.append('-kn')
if self.join:
self.cmd.append('-joe')
if self.node_type:
self.cmd.append('-lnodes=' + self.node_type)
if self.mail_abort or self.mail_begin or self.mail_end:
flags = ''
if self.mail_abort:
flags.append('a')
if self.mail_begin:
flags.append('b')
if self.mail_end:
flags.append('e')
if len(flags):
self.cmd.append('-m ' + flags)
if self.name:
self.cmd.append("-N%s" % self.name)
if self.priority:
self.cmd.append('-p' + self.priority)
if self.queue:
self.cmd.append('-q' + self.queue)
if self.afterok:
self.cmd.append('-Wdepend=afterok:%s' % self.after)
self.cmd.extend(args)
self.script = script
self.command = ' '.join(self.cmd + [ self.script ])
def do(self):
pbs = MyPOpen(self.cmd + [ self.script ])
self.result = pbs.fromchild.read()
ec = pbs.wait()
if ec != 0 and self.pbshost:
cmd = ' '.join(self.cmd + [ '-' ])
cmd = [ 'ssh', '-x', self.pbshost, cmd ]
self.command = ' '.join(cmd)
ssh = MyPOpen(cmd, input = self.script)
self.result = ssh.fromchild.read()
ec = ssh.wait()
return ec
|
Python
| 0.000009 |
@@ -5456,16 +5456,18 @@
lf.after
+ok
)%0A%0A
|
070605a66393c588ed7cdc9bc061e1bfbf105866
|
Remove space
|
app/lib/twitter_api/search.py
|
app/lib/twitter_api/search.py
|
# -*- coding: utf-8 -*-
"""
Search Tweets application file.
Search for tweets in the Twitter API based on a query string and return the
tweepy tweet objects, which have an author attribute.
See the search docs in this project for details on search syntax and links
to the Twitter developer docs.
"""
from __future__ import absolute_import
from __future__ import print_function
import datetime
import logging
import tweepy
from lib.config import AppConf
conf = AppConf()
logger = logging.getLogger("lib.twitter.search")
def getSearchQueryHelp(argName='--query'):
"""
Return help text, as a guide for search queries which can be safely
entered on the command-line and conform to the Twitter Search API rules.
See search.py docstring for more info.
Multiple words could technically be entered without quotes and joined
and as hashtag or double quotes could be escaped with a backslash.
But it is simplest to always expect it the input already as a single
quoted string, rather than as a list.
:param argName: Name of argument to be inserted into the output template,
based on where the help is shown to the user.
:return: Help text as string, with argName substituted in.
"""
return """\
Note that text with multiple words, double quotes or a hashtag symbol
must be inside a quoted string, as show below. The search is not case
sensitive.
Examples:
single term
{0} wordA
{0} '#abc'
{0} @handleA
AND terms, found in a tweet in no specific order
{0} 'wordA wordB wordC wordD'
{0} 'to:handleA wordA'
{0} 'from:handleA wordA'
OR terms
{0} 'wordA OR wordB'
{0} '#def OR xyz OR #ghi'
Exclusion
{0} 'wordA -wordB'
AND on groupings
{0} '(wordA OR wordB) (wordC OR house)'
{0} '(wordA OR wordB) -(wordC OR wordD OR wordE)'
Exact match
{0} '"My Quote"'
{0} '"My Quote" OR "Another quote" OR wordC'
Note that for the last case, double-quoted phrases must be *before*
ordinary terms, due to a known Twitter Search API bug.
To and from are provided by the Twitters docs. Using '@' and a handle may
provide the say as 'to:' but I have not tested. Using '@' might include tweets
by the user too.
Note you may wish to leave off the '@' to get more results which are still relevant.
When combing AND and OR functionality in a single rule, AND logic is
evaluated first, such that 'wordA OR wordB wordC' is equivalent to
'wordA OR (wordB wordC)'. Though, braces are preferred for readability.
""".format(argName)
def fetchTweetsPaging(APIConn, searchQuery, pageCount=1, extended=True):
"""
Search for tweets in Twitter API and yield a page of results.
Though the Cursor object is a generator, it is fine to add generator on top
of it, even using a conditional statement if necessary.
See https://pybit.es/generators.html for their Cursor example.
The Cursor object here is wrapped in a generator so that the duration for
each query request can be logged. We set the current time before looping
back to the start of the for loop where the query is done. Note that
any time between the yield statement and setting of queryStartTime is
ignored, meaning the duration logged is for the request alone and excludes
time to process the data.
:param APIConn: authorised API connection.
:param searchQuery: tweet text to search, following Twitter REST API search
format, as string.
:param pageCount: Count pages of tweets to fetch. Each page contains 100
tweets, which is the Search API's limit.
:param extended: If True, get the expanded tweet message instead of the
truncated form.
:return page: tweepy.Cursor object. Iterate over this to do a query for a
page of 100 tweets and return the page as a list of tweets objects
in the current iteration. If there are no more pages to return,
a completion message is printed and None is returned.
"""
assert APIConn, ("Authenticate with Twitter API before doing"
" a search for tweets.")
# Be verbose with printing and logging the start and end of each search.
# But, log without printing when doing a request for a page, since there
# mights be a lot to do.
message = "Starting Search. Expected pages: {pageCount:,d}. "\
" Expected tweets: {tweetCount:,d}.".format(
pageCount=pageCount,
tweetCount=pageCount * 100
)
print(message)
logger.info(message)
params = {'tweet_mode': 'extended'} if extended else {}
# TODO: Move these comments to Github project notes.
# TODO: Move these out to a function handles optional values and validates
# them before sending to the API.
# If running daily, then consider putting a date limit or tweet ID limit
# to get just 1 day of data. Except for the first time when you want
# all 7 days.
params['result_type'] = conf.get('APIRequests', 'searchResultsType')
# TODO: Look at cache functionality in tweepy. And possibly writing out
# last processed twitter ID so that in case of error the search and start
# from there instead of the beginning.
# TODO: Work around edgecase of bad data.
# tweepy.error.TweepError: Failed to parse JSON payload: Unterminated
# string starting at: line 1 column 592381 (char 592380)
# TODO: Handle foreign characters - see how it is printed or opened in
# CSV editor, text editor, etc. In particular Russian characters.
cursor = tweepy.Cursor(
APIConn.search,
q=searchQuery,
count=100,
**params
).pages(pageCount)
startTime = queryStartTime = datetime.datetime.now()
i = -1
for i, page in enumerate(cursor):
queryDuration = datetime.datetime.now() - queryStartTime
logger.info(
"Retrieved tweets from Search API. Page number: {pageNumber}."
" Request duration: {duration:3.2f}s.".format(
pageNumber=i + 1,
duration=queryDuration.total_seconds()
)
)
yield page
queryStartTime = datetime.datetime.now()
duration = datetime.datetime.now() - startTime
message = "Completed Search. Total received pages: {actualPages}."\
" Total duration: {duration}.".format(
actualPages=i + 1,
duration=str(duration)
)
print(message)
logger.info(message)
|
Python
| 0.032829 |
@@ -4399,17 +4399,16 @@
unt:,d%7D.
-
%22%5C%0A
|
f1e1513cf739b8f25b9364226cc8ce987a47fa56
|
Fix check for helpers with staff perms
|
utils/checks.py
|
utils/checks.py
|
import discord
from discord import app_commands
from discord.ext import commands
from utils.configuration import StaffRank
from typing import Union, TYPE_CHECKING
if TYPE_CHECKING:
from kurisu import Kurisu
class InsufficientStaffRank(commands.CheckFailure):
message: str
def is_staff(role: str):
async def predicate(ctx: commands.Context):
if check_staff(ctx.bot, role, ctx.author.id) or (ctx.guild and ctx.author == ctx.guild.owner):
return True
raise InsufficientStaffRank(f"You must be at least {role} to use this command.")
return commands.check(predicate)
def is_staff_app(role: str):
async def predicate(interaction: discord.Interaction) -> bool:
if (interaction.guild and interaction.user == interaction.guild.owner) or check_staff(interaction.client, role, interaction.user.id): # type: ignore
return True
raise InsufficientStaffRank(f"You must be at least {role} to use this command.")
return app_commands.check(predicate)
def check_staff(bot: 'Kurisu', role: str, user_id: int) -> bool:
if bot.configuration.helpers.get(user_id):
position = StaffRank.Helper
else:
position = bot.configuration.staff.get(user_id)
if position is None:
return False
return position <= StaffRank[role]
async def check_bot_or_staff(ctx: Union[commands.Context, discord.Interaction], target: Union[discord.Member, discord.User], action: str):
bot = ctx.bot if isinstance(ctx, commands.Context) else ctx.client
if target.bot:
who = "a bot"
elif check_staff(bot, "Helper", target.id):
who = "another staffer"
else:
return False
if isinstance(ctx, commands.Context):
await ctx.send(f"You can't {action} {who} with this command!")
else:
await ctx.response.send_message(f"You can't {action} {who} with this command!", ephemeral=True)
return True
def check_if_user_can_sr():
async def predicate(ctx):
author = ctx.author
if not check_staff(ctx.bot, 'Helper', author.id) and (ctx.bot.roles['Verified'] not in author.roles) and (
ctx.bot.roles['Trusted'] not in author.roles) and (ctx.bot.roles['Retired Staff'] not in author.roles):
return False
return True
return commands.check(predicate)
def check_if_user_can_ready():
async def predicate(ctx):
channel = ctx.channel
if channel != ctx.bot.channels['newcomers']:
return False
return True
return commands.check(predicate)
|
Python
| 0 |
@@ -1086,18 +1086,87 @@
ol:%0A
-if
+position = bot.configuration.staff.get(user_id)%0A if not position and
bot.con
@@ -1238,75 +1238,8 @@
per%0A
- else:%0A position = bot.configuration.staff.get(user_id)%0A%0A
|
935ee603885b8af15376ce950eba1c413c9aa310
|
Set connection for workers.
|
samples/mnist/worker.py
|
samples/mnist/worker.py
|
# -*- coding: utf-8 -*-
import inspyred
import inspyred.ec.cea_parallel_evaluator
import pyvotune
import pyvotune.sklearn
import random
import sys
import redis
import time
from sklearn.cross_validation import train_test_split
from sklearn.pipeline import Pipeline
import sklearn.datasets
import multiprocessing
from loader import load_mnist
log = pyvotune.log.logger()
def generator(random, args):
gen = args['pyvotune_generator']
genome = gen.generate(max_retries=150)
if not genome:
print "ERROR: Failed to generate a genome after 50 tries"
sys.exit(0)
return genome
@inspyred.ec.evaluators.evaluator
def evaluator(candidate, args):
try:
individual = train_candidate(
candidate, args['train_X'], args['train_y'])
if not individual:
print "Failed to train", candidate
return 0.
return test_individual(
individual, args['test_X'], args['test_y'])
except Exception as e:
try:
print "Exception:", e
print candidate
except Exception as e:
print "Exception in exception handler!!!"
print e
return 0.
def train_candidate(candidate, train_X, train_y):
if not candidate.assemble():
print "Candidate failed to assemble", candidate
return
pipeline = Pipeline([
(str(i), s) for i, s in enumerate(candidate.assembled)])
pipeline.fit(train_X, train_y)
return pipeline
def test_individual(pipeline, test_X, test_y, display=False):
observed_y = pipeline.predict(test_X)
f1 = sklearn.metrics.f1_score(test_y, observed_y)
if display:
print sklearn.metrics.classification_report(test_y, observed_y)
return round(f1 * 100., 2)
if __name__ == '__main__':
pyvotune.set_debug(True)
############################
# Load the initial dataset #
############################
X, y = load_mnist()
print "Dataset loaded"
print X.shape
print y.shape
# Split the dataset into training, testing and then validation parts
train_X, temp_X, train_y, temp_y = train_test_split(X, y, test_size=0.25)
print "Split"
test_X, validate_X, test_y, validate_y = train_test_split(
temp_X, temp_y, test_size=0.5)
print "Training", train_X.shape
print "Testing", test_X.shape
print "Validation", validate_X.shape
n_features = X.shape[1]
#################################
# Initialize PyvoTune Generator #
#################################
gen = pyvotune.Generate(
initial_state={
'sparse': False
},
gene_pool=pyvotune.sklearn.get_classifiers(n_features) +
pyvotune.sklearn.get_decomposers(n_features) +
pyvotune.sklearn.get_image_features(n_features) +
pyvotune.sklearn.get_preprocessors(n_features),
max_length=4,
noop_frequency=0.2)
####################################
# Initialize Inspyred Genetic Algo #
####################################
ea = inspyred.ec.cEA(random.Random())
ea.logger = log
ea.terminator = [
#inspyred.ec.terminators.time_termination,
inspyred.ec.terminators.average_fitness_termination
]
ea.selector = inspyred.ec.selectors.fitness_proportionate_selection
ea.observer = pyvotune.observers.stats_observer
# Use PyvoTun variators
ea.variator = [
pyvotune.variators.random_reset_mutation,
pyvotune.variators.param_reset_mutation,
pyvotune.variators.scramble_mutation,
pyvotune.variators.uniform_crossover,
pyvotune.variators.n_point_crossover
]
nprocs = int(multiprocessing.cpu_count() * 2)
#nprocs = 4
con_str = "redis://ip-10-157-0-244.ec2.internal:6379/3"
# Start redis queue workers
pyvotune.evaluators.cea_rq_worker.start_workers(processes=nprocs, con_str=con_str)
while True:
time.sleep(10000000000)
|
Python
| 0 |
@@ -3754,53 +3754,19 @@
r =
-%22redis://ip-10-157-0-244.ec2.internal:6379/3%22
+sys.argv%5B1%5D
%0A%0A
|
8eae3c9e1994a0254b7c05158153d169fc19f974
|
Add --reforms option to openfisca-run-test
|
openfisca_core/scripts/run_test.py
|
openfisca_core/scripts/run_test.py
|
# -*- coding: utf-8 -*-
import argparse
import logging
import sys
import os
import importlib
from openfisca_core.tools.test_runner import run_tests
from openfisca_core.tools import detect_country_packages
def build_parser():
parser = argparse.ArgumentParser()
parser.add_argument('path', help = "paths (files or directories) of tests to execute", nargs = '+')
parser.add_argument('-c', '--country_package', action = 'store', help = 'country package to use to run the test. If not provided, an automatic detection will be attempted by scanning the python packages installed in your environment which name contains the word "openfisca".')
parser.add_argument('-e', '--extensions', action = 'store', help = 'extensions to load, separated by commas (e.g -e "extension_1, extension_2")')
parser.add_argument('-f', '--force', action = 'store_true', default = False,
help = 'force testing of tests with "ignore" flag and formulas belonging to "ignore_output_variables" list')
parser.add_argument('-n', '--name_filter', default = None, help = "partial name of tests to execute. Only tests with the given name_filter in their name, file name, or keywords will be run.")
parser.add_argument('-v', '--verbose', action = 'store_true', default = False, help = "increase output verbosity")
parser.add_argument('-m', '--default_relative_error_margin', help = u"relative error margin to use for tests that don't define any", action = 'store', type = float)
parser.add_argument('-M', '--default_absolute_error_margin', help = u"absolute error margin to use for tests that don't define any", action = 'store', type = float)
return parser
def main():
parser = build_parser()
args = parser.parse_args()
logging.basicConfig(level = logging.DEBUG if args.verbose else logging.WARNING, stream = sys.stdout)
if args.country_package:
try:
country_package = importlib.import_module(args.country_package)
except:
print('ERROR: `{}` does not seem to be a valid Openfisca country package.'.format(args.country_package))
sys.exit(1)
else:
installed_country_packages = detect_country_packages()
if len(installed_country_packages) == 0:
print('ERROR: No country package has been detected on your environment. If your country package is installed but not detected, please use the --country_package option.')
sys.exit(1)
country_package_name = installed_country_packages[0]
country_package = importlib.import_module(country_package_name)
if len(installed_country_packages) > 1:
print('WARNING: Several country packages detected : `{}`. Using `{}` by default. To use another package, please use the --country_package option.'.format(', '.join(installed_country_packages), country_package_name))
tax_benefit_system = country_package.CountryTaxBenefitSystem()
if args.extensions:
extensions = [name.strip(' ') for name in args.extensions.split(',')]
for extension in extensions:
tax_benefit_system.load_extension(extension)
options = {
'verbose': args.verbose,
'force': args.force,
'name_filter': args.name_filter,
'default_relative_error_margin': args.default_relative_error_margin,
'default_absolute_error_margin': args.default_absolute_error_margin,
}
tests_found = False
for path in args.path:
path = os.path.abspath(path)
nb_tests = run_tests(tax_benefit_system, path, options)
tests_found = tests_found or nb_tests > 0
if not tests_found:
print("No tests found!")
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
main()
|
Python
| 0.000009 |
@@ -796,16 +796,194 @@
n_2%22)')%0A
+ parser.add_argument('-r', '--reforms', action = 'store', help = 'reforms to apply to the country package, separated by commas (e.g -r openfisca_france.reforms.some_reform)')%0A
pars
@@ -3301,16 +3301,615 @@
nsion)%0A%0A
+ if args.reforms:%0A reforms = %5Bname.strip(' ') for name in args.reforms.split(',')%5D%0A for reform_path in reforms:%0A try:%0A %5Breform_package, reform_name%5D = reform_path.rsplit('.', 1)%0A reform_module = importlib.import_module(reform_package)%0A reform = getattr(reform_module, reform_name)%0A tax_benefit_system = reform(tax_benefit_system)%0A except:%0A print('ERROR: %60%7B%7D%60 does not seem to be a valid Openfisca reform for %60%7B%7D%60.'.format(reform_path, country_package.__name__))%0A raise%0A%0A
opti
|
2031c415144fe7c616fb1b020c9571ced5726654
|
Handle yanked Projects, Versions, and Files
|
warehouse/synchronize/commands.py
|
warehouse/synchronize/commands.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import eventlet
from progress.bar import ShadyBar
from warehouse import create_app, db, script
from warehouse.packages import store
from warehouse.synchronize.fetchers import PyPIFetcher
eventlet.monkey_patch()
class DummyBar(object):
def iter(self, iterable):
for x in iterable:
yield x
def synchronize_project(app, project, fetcher, force=False):
with app.test_request_context():
project = store.project(project)
versions = fetcher.versions(project.name)
for v in versions:
version = store.version(project, fetcher.release(project.name, v))
distributions = fetcher.distributions(project.name, version.version)
for dist in distributions:
distribution = store.distribution(project, version, dist)
# Check if the stored hash matches what the fetcher says
if (force or
distribution.hashes is None or
dist["md5_digest"] != distribution.hashes.get("md5")):
# The fetcher has a different file
store.distribution_file(project, version, distribution,
fetcher.file(dist["url"]))
# Commit our changes
db.session.commit()
def syncer(projects=None, fetcher=None, pool=None, progress=True, force=False):
if pool is None:
pool = eventlet.GreenPool(10)
if fetcher is None:
fetcher = PyPIFetcher()
# Sync the Classifiers
for classifier in fetcher.classifiers():
store.classifier(classifier)
# Commit the classifiers
db.session.commit()
# Sync the Projects/Versions/Files
if not projects:
# TODO(dstufft): Determine how to make this do the "since last sync"
projects = fetcher.projects()
if progress:
bar = ShadyBar("Synchronizing", max=len(projects))
else:
bar = DummyBar()
app = create_app()
with app.app_context():
for project in bar.iter(projects):
pool.spawn_n(synchronize_project, app, project, fetcher, force)
@script.option("--force-download", action="store_true", dest="force")
@script.option("--concurrency", dest="concurrency", type=int, default=10)
@script.option("--no-progress", action="store_false", dest="progress")
@script.option("projects", nargs="*", metavar="project")
def synchronize(projects=None, concurrency=10, progress=True, force=False):
# This is a hack to normalize the incoming projects to unicode
projects = [x.decode("utf-8") for x in projects]
# Create the Pool that Synchronization will use
pool = eventlet.GreenPool(concurrency)
# Run the actual Synchronization
syncer(projects, pool=pool, progress=progress, force=force)
|
Python
| 0 |
@@ -239,16 +239,77 @@
t store%0A
+from warehouse.packages.models import Project, Version, File%0A
from war
@@ -355,16 +355,16 @@
Fetcher%0A
-
%0A%0Aeventl
@@ -1418,16 +1418,1392 @@
rl%22%5D))%0A%0A
+ # Get a list of filesnames%0A filenames = %5Bx%5B%22filename%22%5D for x in distributions%5D%0A%0A # Find what files no longer exist in PyPI to yank them%0A if filenames:%0A # If there any files we use IN%0A files_to_yank = File.query.filter(%0A File.version == version,%0A ~File.filename.in_(filenames),%0A )%0A else:%0A # If there are no filenames we can do a simpler query%0A files_to_yank = File.query.filter(File.version == version)%0A%0A # Actually preform the yanking%0A files_to_yank.update(%7B%22yanked%22: False%7D, synchronize_session=False)%0A%0A # Find what versions no longer exist in PyPI to yank them%0A if versions:%0A # If there are any versions we use IN%0A versions_to_yank = Version.query.filter(%0A Version.project == project,%0A ~Version.version.in_(versions),%0A )%0A else:%0A # If there are no versions we can do a simpler query%0A versions_to_yank = Version.query.filter(Version.project == project)%0A%0A # Actually preform the yanking%0A versions_to_yank.update(%7B%22yanked%22: True%7D, synchronize_session=False)%0A%0A
@@ -3597,16 +3597,16 @@
jects):%0A
-
@@ -3674,16 +3674,268 @@
force)%0A%0A
+ # Yank no longer existing projects (and versions and files)%0A Project.query.filter(%0A ~Project.name.in_(projects)%0A ).update(%7B%22yanked%22: True%7D, synchronize_session=False)%0A%0A # Commit the deletion%0A db.session.commit()%0A%0A
%0A@script
|
6cba22ad2c26185f6b3454116c3e31ea14160db8
|
Make collect-sprite-metadata.py work from any directory
|
scripts/collect-sprite-metadata.py
|
scripts/collect-sprite-metadata.py
|
from collections import OrderedDict
import glob
import json
import os
def main():
c = collect()
c.sort(key = lambda x: x[0])
c = OrderedDict(c)
print(json.dumps(c, separators=(',',':')))
def collect():
root = '../build/website/static/sprites'
hitboxes = []
for (dirpath, dirnames, filenames) in os.walk(root):
for fn in glob.glob(dirpath + '/*.json'):
metadata = json.load(open(fn))
name = os.path.relpath(fn, root).replace('\\', '/')[:-5]
hitboxes.append((name, metadata['hitbox']))
return hitboxes
if __name__ == '__main__':
main()
|
Python
| 0.000007 |
@@ -225,17 +225,63 @@
root =
-'
+os.path.dirname(os.path.abspath(__file__)) + '/
../build
@@ -293,14 +293,14 @@
ite/
-static
+assets
/spr
|
2192219d92713c6eb76593d0c6c29413d040db6a
|
Revert "Added script for cron job to load surveys to database."
|
scripts/cronRefreshEdxQualtrics.py
|
scripts/cronRefreshEdxQualtrics.py
|
from surveyextractor import QualtricsExtractor
import getopt
import sys
### Script for scheduling regular EdxQualtrics updates
### Usage for cron should be "cronRefreshEdxQualtrics.py -m -s -r"
# Append directory for dependencies to PYTHONPATH
sys.path.append("/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/")
qe = QualtricsExtractor()
opts, args = getopt.getopt(sys.argv[1:], 'amsr', ['--reset', '--loadmeta', '--loadsurveys', '--loadresponses'])
for opt, arg in opts:
if opt in ('-a', '--reset'):
qe.resetMetadata()
qe.resetSurveys()
qe.resetResponses()
elif opt in ('-m', '--loadmeta'):
qe.loadSurveyMetadata()
elif opt in ('-s', '--loadsurvey'):
qe.resetSurveys()
qe.loadSurveyData()
elif opt in ('-r', '--loadresponses'):
qe.loadResponseData()
|
Python
| 0 |
@@ -57,23 +57,15 @@
topt
-%0Aimport
+,
sys%0A%0A
-##
# Sc
@@ -113,18 +113,16 @@
updates%0A
-##
# Usage
@@ -183,130 +183,8 @@
r%22%0A%0A
-# Append directory for dependencies to PYTHONPATH%0Asys.path.append(%22/home/dataman/Code/qualtrics_etl/src/qualtrics_etl/%22)%0A%0A
qe =
|
442bc4edea792e15871b8f00c3dec833615ec101
|
Add the ability the get the status of a single machine
|
floki/machines/__init__.py
|
floki/machines/__init__.py
|
from vmrun_wrapper.vmrun import machine
import yaml
import sys
import os
import time
class Machines:
def __init__(self, config):
self.load_config(config)
if os.environ.get('VMRUN'):
self.vm = machine.machine(os.environ['VMRUN'])
else:
self.vm = machine.machine()
def load_config(self, path):
try:
self.config = yaml.load(file(path, 'r'))
except IOError, e:
print str(e)
def get_vmx_path(self, env, group, name):
machines = self.config[1]['machines'][env]
if group is not None:
machine = machines[group][name]
else:
for group in machines:
if name in machines[group]:
machine = machines[group][name]
if type(machine) is dict and 'path' in machine:
path = machine['path']
else:
path = "".join(self.config[0]['project']['dir'] + '/' +
self.config[0]['project']['name'] + '/' + env +
'/' + name + '.vmwarevm')
return path
def does_machine_exists(self, machine_path):
if os.path.isdir(machine_path) and machine_path.endswith('vmwarevm'):
for file in os.listdir(machine_path):
if file.endswith('.vmx'):
if os.path.isfile(machine_path + '/' + file):
return True
elif machine_path.endswith('vmx') and os.path.isfile(machine_path):
if os.path.isfile(machine_path):
return True
else:
return False
def get_list(self, env, groups):
list = dict()
if groups[0] is 'all':
groups = self.config[1]['machines'][env].keys()
for group in groups:
try:
for name in self.config[1]['machines'][env][group]:
list[name] = self.get_vmx_path(env, group, name)
except KeyError:
if env not in self.config[1]['machines']:
print "ERROR: Enviroment %s doesn't exist" % env
else:
print "ERROR: Group %s doesn't exist" % group
if any(list):
return list
else:
sys.exit(1)
def get_list_running(self, running, env, groups):
running_list = dict()
machine_list = self.get_list(env, groups)
if running['count'] is not 0:
for machine in machine_list:
for path in running['machines']:
if machine_list[machine] in path:
running_list[machine] = path
return running_list
def generate_inventory(self, env, groups):
"""
Generate a inventory file to be used with Ansible
"""
if groups[0] is 'all':
groups = self.config[1]['machines'][env].keys()
print "[%s] Crating the inventory file:" % env,
try:
with open(env + '.ini', 'w') as inventory:
for group in groups:
inventory.write('[' + group + ']' + '\n')
machines = self.config[1]['machines'][env][group]
for name in machines:
ip = self.vm.get_ip(self.get_vmx_path(env, group,
name))
if ip == '\n':
for i in range(0, 5):
time.sleep(5)
ip = self.vm.get_ip(self.get_vmx_path(env,
group,
name))
if ip != '\n':
break
if 'Error' not in ip:
ipaddr = ' ansible_ssh_host=' + ip
inventory.write(name + ipaddr)
else:
raise ValueError
print "ok."
except:
print "failed."
def start(self, env, groups):
machines_running = self.get_list_running(self.vm.list(), env, groups)
machine_list = self.get_list(env, groups)
for machine in machine_list:
try:
print "[%s] Starting %s:" % (env, machine),
if self.does_machine_exists(machine_list[machine]):
self.vm.start(machine_list[machine], False)
print "ok"
except IOError, e:
print " %s" % str(e)
self.generate_inventory(env, groups)
def stop(self, env, groups):
machine_list = self.get_list_running(self.vm.list(), env, groups)
for machine in machine_list:
try:
print "[%s] Stopping %s:" % (env, machine),
self.vm.stop(machine_list[machine]), False
print "ok."
except:
print "failed."
def restart(self, env, groups):
stop(env, groups)
start(env, groups)
def suspend(self, env, groups):
for machine in self.get_list_running(self.vm.list(), env, groups):
try:
print "[%s] Suspending %s" % (env, machine),
self.vm.stop(get_vmx_path(env, group, machine), False)
print "ok."
finally:
print "failed."
def status(self, env, group):
running_list = self.get_list_running(self.vm.list(), env, group)
if len(running_list) is 0:
print 'No machine running'
else:
print "Machines running:"
for machine in running_list:
print machine
print "Total: %s machine(s) running" % len(running_list)
def create(self, env, groups):
machine_list = self.get_list(env, groups)
template = self.config[0]['project']['template']
if not self.vm.vmx_path_is_valid(template):
print "The template %s is invalid" % template
for machine in machine_list:
if self.vm.vmx_path_is_valid(machine_list[machine]) and False:
print "%s" % machine_list[machine],
print "Already exists, not creating."
else:
print "[%s] Creating %s..." % (env, machine),
try:
os.makedirs(machine_list[machine])
except OSError:
if not os.path.isdir(machine_list[machine]):
raise
vmx_dest_path = machine_list[machine] + '/' + machine + '.vmx'
if not os.path.isfile(vmx_dest_path):
self.vm.clone(template, vmx_dest_path)
print "done."
else:
print "skiping, virtual machine already exists."
|
Python
| 0.000121 |
@@ -5525,16 +5525,24 @@
v, group
+, single
):%0A
@@ -5609,16 +5609,47 @@
group)%0A
+ if single is None:%0A
@@ -5671,24 +5671,28 @@
list) is 0:%0A
+
@@ -5718,32 +5718,36 @@
unning'%0A
+
else:%0A
@@ -5744,24 +5744,28 @@
+
+
print %22Machi
@@ -5778,16 +5778,20 @@
nning:%22%0A
+
@@ -5835,24 +5835,28 @@
+
+
print machin
@@ -5857,16 +5857,20 @@
machine%0A
+
@@ -5929,16 +5929,183 @@
ng_list)
+%0A elif single in running_list:%0A print %22The machine %25s is running.%22 %25 single%0A else:%0A print %22The machine %25s is not running.%22 %25 single
%0A%0A de
|
3469c8ab2bb296f5a5eddc9f320c8b7e121fb38c
|
Fix affluent communication
|
confluent_server/confluent/plugins/hardwaremanagement/affluent.py
|
confluent_server/confluent/plugins/hardwaremanagement/affluent.py
|
# Copyright 2019-2020 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import eventlet
import eventlet.queue as queue
import confluent.exceptions as exc
webclient = eventlet.import_patched('pyghmi.util.webclient')
import confluent.messages as msg
import confluent.util as util
class SwitchSensor(object):
def __init__(self, name, states, value=None, health=None):
self.name = name
self.value = value
self.states = states
self.health = health
class WebClient(object):
def __init__(self, node, configmanager, creds):
self.node = node
self.wc = webclient.SecureHTTPConnection(node, port=443, verifycallback=util.TLSCertVerifier(
configmanager, node, 'pubkeys.tls_hardwaremanager').verify_cert)
self.wc.set_basic_credentials(creds[node]['secret.hardwaremanagementuser']['value'], creds[node]['secret.hardwaremanagementpassword']['value'])
def fetch(self, url, results):
rsp, status = self.wc.grab_json_response_with_status(url)
if status == 401:
results.put(msg.ConfluentTargetInvalidCredentials(self.node, 'Unable to authenticate'))
return {}
elif status != 200:
results.put(msg.ConfluentNodeError(self.node, 'Unknown error: ' + rsp + ' while retrieving ' + url))
return {}
return rsp
def update(nodes, element, configmanager, inputdata):
for node in nodes:
yield msg.ConfluentNodeError(node, 'Not Implemented')
def delete(nodes, element, configmanager, inputdata):
for node in nodes:
yield msg.ConfluentNodeError(node, 'Not Implemented')
def create(nodes, element, configmanager, inputdata):
for node in nodes:
yield msg.ConfluentNodeError(node, 'Not Implemented')
def _run_method(method, workers, results, configmanager, nodes, element):
creds = configmanager.get_node_attributes(
nodes, ['secret.hardwaremanagementuser', 'secret.hardwaremanagementpassword'], decrypt=True)
for node in nodes:
workers.add(eventlet.spawn(method, configmanager, creds,
node, results, element))
def retrieve(nodes, element, configmanager, inputdata):
results = queue.LightQueue()
workers = set([])
if element == ['power', 'state']:
for node in nodes:
yield msg.PowerState(node=node, state='on')
return
elif element == ['health', 'hardware']:
_run_method(retrieve_health, workers, results, configmanager, nodes, element)
elif element[:3] == ['inventory', 'hardware', 'all']:
_run_method(retrieve_inventory, workers, results, configmanager, nodes, element)
elif element[:3] == ['inventory', 'firmware', 'all']:
_run_method(retrieve_firmware, workers, results, configmanager, nodes, element)
elif element == ['sensors', 'hardware', 'all']:
_run_method(list_sensors, workers, results, configmanager, nodes, element)
elif element[:3] == ['sensors', 'hardware', 'all']:
_run_method(retrieve_sensors, workers, results, configmanager, nodes, element)
else:
for node in nodes:
yield msg.ConfluentNodeError(node, 'Not Implemented')
return
while workers:
try:
datum = results.get(10)
while datum:
if datum:
yield datum
datum = results.get_nowait()
except queue.Empty:
pass
eventlet.sleep(0.001)
for t in list(workers):
if t.dead:
workers.discard(t)
try:
while True:
datum = results.get_nowait()
if datum:
yield datum
except queue.Empty:
pass
def retrieve_inventory(configmanager, creds, node, results, element):
if len(element) == 3:
results.put(msg.ChildCollection('all'))
results.put(msg.ChildCollection('system'))
return
wc = WebClient(node, configmanager, creds)
invinfo = wc.fetch('/affluent/inventory/hardware/all', results)
if invinfo:
results.put(msg.KeyValueData(invinfo, node))
def retrieve_firmware(configmanager, creds, node, results, element):
if len(element) == 3:
results.put(msg.ChildCollection('all'))
return
wc = WebClient(node, configmanager, creds)
fwinfo = wc.fetch('/affluent/inventory/firmware/all', results)
if fwinfo:
results.put(msg.Firmware(fwinfo, node))
def list_sensors(configmanager, creds, node, results, element):
wc = WebClient(node, configmanager, creds)
sensors = wc.fetch('/affluent/sensors/hardware/all', results)
for sensor in sensors['item']:
results.put(msg.ChildCollection(sensor))
def retrieve_sensors(configmanager, creds, node, results, element):
wc = WebClient(node, configmanager, creds)
sensors = wc.fetch('/affluent/sensors/hardware/all/{0}'.format(element[-1]), results)
if sensors:
results.put(msg.SensorReadings(sensors['sensors'], node))
def retrieve_health(configmanager, creds, node, results, element):
wc = WebClient(node, configmanager, creds)
hinfo = wc.fetch('/affluent/health', results)
if hinfo:
results.put(msg.HealthSummary(hinfo.get('health', 'unknown'), name=node))
results.put(msg.SensorReadings(hinfo.get('sensors', []), name=node))
|
Python
| 0 |
@@ -1449,24 +1449,41 @@
, results):%0A
+ try:%0A
rsp,
@@ -1536,16 +1536,321 @@
us(url)%0A
+ except exc.PubkeyInvalid:%0A results.put(msg.ConfluentNodeError(self.node,%0A 'Extended information unavailable, mismatch detected between '%0A 'target certificate fingerprint and '%0A 'pubkeys.tls_hardwaremanager attribute'))%0A return %7B%7D%0A
@@ -4125,16 +4125,36 @@
lts.get(
+block=True, timeout=
10)%0A
|
2c4b555b95429156185c7a1ac499f8d69ffeefe2
|
use renamed xmlid in newly added test
|
addons/stock/tests/test_robustness.py
|
addons/stock/tests/test_robustness.py
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.exceptions import UserError
from odoo.tests.common import TransactionCase
class TestRobustness(TransactionCase):
def setUp(self):
super(TestRobustness, self).setUp()
self.stock_location = self.env.ref('stock.stock_location_stock')
self.customer_location = self.env.ref('stock.stock_location_customers')
self.uom_unit = self.env.ref('product.product_uom_unit')
self.uom_dozen = self.env.ref('product.product_uom_dozen')
self.product1 = self.env['product.product'].create({
'name': 'Product A',
'type': 'product',
'categ_id': self.env.ref('product.product_category_all').id,
})
def test_uom_factor(self):
""" Changing the factor of a unit of measure shouldn't be allowed while
quantities are reserved, else the existing move lines won't be consistent
with the `reserved_quantity` on quants.
"""
# make some stock
self.env['stock.quant']._update_available_quantity(
self.product1,
self.stock_location,
12,
)
# reserve a dozen
move1 = self.env['stock.move'].create({
'name': 'test_uom_rounding',
'location_id': self.stock_location.id,
'location_dest_id': self.customer_location.id,
'product_id': self.product1.id,
'product_uom': self.uom_dozen.id,
'product_uom_qty': 1,
})
move1._action_confirm()
move1._action_assign()
self.assertEqual(move1.state, 'assigned')
quant = self.env['stock.quant']._gather(
self.product1,
self.stock_location,
)
# assert the reservation
self.assertEqual(quant.reserved_quantity, 12)
self.assertEqual(move1.product_qty, 12)
# change the factor
with self.assertRaises(UserError):
with self.cr.savepoint():
move1.product_uom.factor = 0.05
# assert the reservation
self.assertEqual(quant.reserved_quantity, 12)
self.assertEqual(move1.state, 'assigned')
self.assertEqual(move1.product_qty, 12)
# unreserve
move1._do_unreserve()
def test_location_usage(self):
""" Changing the usage of a location shouldn't be allowed while
quantities are reserved, else the existing move lines won't be
consistent with the `reserved_quantity` on the quants.
"""
# change stock usage
self.stock_location.scrap_location = True
# make some stock
self.env['stock.quant']._update_available_quantity(
self.product1,
self.stock_location,
1,
)
# reserve a unit
move1 = self.env['stock.move'].create({
'name': 'test_location_archive',
'location_id': self.stock_location.id,
'location_dest_id': self.customer_location.id,
'product_id': self.product1.id,
'product_uom': self.uom_unit.id,
'product_uom_qty': 1,
})
move1._action_confirm()
move1._action_assign()
self.assertEqual(move1.state, 'assigned')
quant = self.env['stock.quant']._gather(
self.product1,
self.stock_location,
)
# assert the reservation
self.assertEqual(quant.reserved_quantity, 0) # reservation is bypassed in scrap location
self.assertEqual(move1.product_qty, 1)
# change the stock usage
with self.assertRaises(UserError):
with self.cr.savepoint():
self.stock_location.scrap_location = False
# unreserve
move1._do_unreserve()
def test_package_unpack(self):
""" Unpack a package that contains quants with a reservation
should also remove the package on the reserved move lines.
"""
package = self.env['stock.quant.package'].create({
'name': 'Shell Helix HX7 10W30',
})
self.env['stock.quant']._update_available_quantity(
self.product1,
self.stock_location,
10,
package_id=package
)
# reserve a dozen
move1 = self.env['stock.move'].create({
'name': 'test_uom_rounding',
'location_id': self.stock_location.id,
'location_dest_id': self.customer_location.id,
'product_id': self.product1.id,
'product_uom': self.uom_unit.id,
'product_uom_qty': 10,
})
move1._action_confirm()
move1._action_assign()
move1.result_package_id = False
package.unpack()
# unreserve
move1._do_unreserve()
self.assertEqual(len(self.env['stock.quant']._gather(self.product1, self.stock_location)), 1)
self.assertEqual(len(self.env['stock.quant']._gather(self.product1, self.stock_location, package_id=package)), 0)
self.assertEqual(self.env['stock.quant']._gather(self.product1, self.stock_location).reserved_quantity, 0)
|
Python
| 0 |
@@ -466,39 +466,35 @@
= self.env.ref('
-product
+uom
.product_uom_uni
@@ -528,39 +528,35 @@
= self.env.ref('
-product
+uom
.product_uom_doz
|
98311b8b80d28ac6e6d92dbae3bcf987d5027e7a
|
Fix for housekeeping script error
|
photonix/photos/management/commands/housekeeping.py
|
photonix/photos/management/commands/housekeeping.py
|
import os
from pathlib import Path
from shutil import rmtree
from time import sleep
from django.conf import settings
from django.core.management.base import BaseCommand
from photonix.photos.models import Photo, Task
from photonix.photos.utils.thumbnails import THUMBNAILER_VERSION
class Command(BaseCommand):
help = 'Makes sure that if there have been upgrades to thumbnailing or image analysis code then jobs get rescheduled.'
def housekeeping(self):
# Remove old cache directories
for directory in os.listdir(settings.THUMBNAIL_ROOT):
if directory not in ['photofile']:
path = Path(settings.THUMBNAIL_ROOT) / directory
print(f'Removing old cache directory {path}')
rmtree(path)
# Regenerate any outdated thumbnails
photos = Photo.objects.filter(thumbnailed_version__lt=THUMBNAILER_VERSION)
if photos.count():
print(f'Rescheduling {photos.count()} photos to have their thumbnails regenerated')
for photo in photos:
Task(
type='generate_thumbnails', subject_id=photo.id,
library=photo.library).save()
def handle(self, *args, **options):
self.housekeeping()
|
Python
| 0 |
@@ -497,16 +497,33 @@
ctories%0A
+ try:%0A
@@ -588,16 +588,20 @@
+
if direc
@@ -627,16 +627,20 @@
file'%5D:%0A
+
@@ -704,32 +704,36 @@
+
+
print(f'Removing
@@ -778,16 +778,20 @@
+
rmtree(p
@@ -794,16 +794,116 @@
ee(path)
+%0A except FileNotFoundError: # In case thumbnail dir hasn't been created yet%0A pass
%0A%0A
|
1eccbbfc073e37febf495afbbad2ccc255eb4932
|
Fix configuration file for boolean values
|
django_migration_linter/management/commands/lintmigrations.py
|
django_migration_linter/management/commands/lintmigrations.py
|
import configparser
import logging
import os
import sys
from importlib import import_module
from django.core.management.base import BaseCommand
from ...constants import __version__
from ...migration_linter import MessageType, MigrationLinter
from ..utils import register_linting_configuration_options
CONFIG_NAME = "django_migration_linter"
DEFAULT_CONFIG_FILES = (
".{}.cfg".format(CONFIG_NAME),
"pyproject.toml",
"setup.cfg",
"tox.ini",
".editorconfig",
)
class Command(BaseCommand):
help = "Lint your migrations"
def add_arguments(self, parser):
parser.add_argument(
"commit_id",
metavar="GIT_COMMIT_ID",
type=str,
nargs="?",
help=(
"if specified, only migrations since this commit "
"will be taken into account. If not specified, "
"the initial repo commit will be used"
),
)
parser.add_argument(
"--ignore-name-contains",
type=str,
nargs="?",
help="ignore migrations containing this name",
)
parser.add_argument(
"--ignore-name",
type=str,
nargs="*",
help="ignore migrations with exactly one of these names",
)
parser.add_argument(
"--project-root-path", type=str, nargs="?", help="django project root path"
)
parser.add_argument(
"--include-migrations-from",
metavar="FILE_PATH",
type=str,
nargs="?",
help="if specified, only migrations listed in the file will be considered",
)
cache_group = parser.add_mutually_exclusive_group(required=False)
cache_group.add_argument(
"--cache-path",
type=str,
help="specify a directory that should be used to store cache-files in.",
)
cache_group.add_argument(
"--no-cache", action="store_true", help="don't use a cache"
)
incl_excl_group = parser.add_mutually_exclusive_group(required=False)
incl_excl_group.add_argument(
"--include-apps",
type=str,
nargs="*",
help="check only migrations that are in the specified django apps",
)
incl_excl_group.add_argument(
"--exclude-apps",
type=str,
nargs="*",
help="ignore migrations that are in the specified django apps",
)
applied_unapplied_migrations_group = parser.add_mutually_exclusive_group(
required=False
)
applied_unapplied_migrations_group.add_argument(
"--unapplied-migrations",
action="store_true",
help="check only migrations have not been applied to the database yet",
)
applied_unapplied_migrations_group.add_argument(
"--applied-migrations",
action="store_true",
help="check only migrations that have already been applied to the database",
)
parser.add_argument(
"-q",
"--quiet",
nargs="+",
choices=MessageType.values(),
help="don't print linting messages to stdout",
)
register_linting_configuration_options(parser)
def handle(self, *args, **options):
if options["project_root_path"]:
settings_path = options["project_root_path"]
else:
settings_path = os.path.dirname(
import_module(os.getenv("DJANGO_SETTINGS_MODULE")).__file__
)
if options["verbosity"] > 1:
logging.basicConfig(format="%(message)s", level=logging.DEBUG)
else:
logging.basicConfig(format="%(message)s")
config_parser = configparser.ConfigParser()
config_parser.read(DEFAULT_CONFIG_FILES)
ignore_name_contains = options["ignore_name_contains"] or config_parser.get(
CONFIG_NAME, "ignore_name_contains", fallback=None
)
ignore_name = options["ignore_name"] or config_parser.get(
CONFIG_NAME, "ignore_name", fallback=None
)
include_apps = options["include_apps"] or config_parser.get(
CONFIG_NAME, "include_apps", fallback=None
)
exclude_apps = options["exclude_apps"] or config_parser.get(
CONFIG_NAME, "exclude_apps", fallback=None
)
database = options["database"] or config_parser.get(
CONFIG_NAME, "database", fallback=None
)
cache_path = options["cache_path"] or config_parser.get(
CONFIG_NAME, "cache_path", fallback=None
)
no_cache = options["no_cache"] or config_parser.get(
CONFIG_NAME, "no_cache", fallback=None
)
applied_migrations = options["applied_migrations"] or config_parser.get(
CONFIG_NAME, "applied_migrations", fallback=None
)
unapplied_migrations = options["unapplied_migrations"] or config_parser.get(
CONFIG_NAME, "unapplied_migrations", fallback=None
)
exclude_migration_tests = options[
"exclude_migration_tests"
] or config_parser.get(CONFIG_NAME, "exclude_migration_tests", fallback=None)
quiet = options["quiet"] or config_parser.get(
CONFIG_NAME, "quiet", fallback=None
)
warnings_as_errors = options["warnings_as_errors"] or config_parser.get(
CONFIG_NAME, "warnings_as_errors", fallback=None
)
linter = MigrationLinter(
settings_path,
ignore_name_contains=ignore_name_contains,
ignore_name=ignore_name,
include_apps=include_apps,
exclude_apps=exclude_apps,
database=database,
cache_path=cache_path,
no_cache=no_cache,
only_applied_migrations=applied_migrations,
only_unapplied_migrations=unapplied_migrations,
exclude_migration_tests=exclude_migration_tests,
quiet=quiet,
warnings_as_errors=warnings_as_errors,
)
linter.lint_all_migrations(
git_commit_id=options["commit_id"],
migrations_file_path=options["include_migrations_from"],
)
linter.print_summary()
if linter.has_errors:
sys.exit(1)
def get_version(self):
return __version__
|
Python
| 0.000032 |
@@ -4787,32 +4787,39 @@
onfig_parser.get
+boolean
(%0A CO
@@ -4936,32 +4936,39 @@
onfig_parser.get
+boolean
(%0A CO
@@ -5063,16 +5063,29 @@
options%5B
+%0A
%22unappli
@@ -5090,32 +5090,41 @@
lied_migrations%22
+%0A
%5D or config_pars
@@ -5121,32 +5121,39 @@
onfig_parser.get
+boolean
(%0A CO
@@ -5562,32 +5562,39 @@
onfig_parser.get
+boolean
(%0A CO
|
7a87568db2a2012a25dc0a09ce41b742107bb111
|
Mark fiware-ut-9 tests
|
src/wirecloud/oauth2provider/tests.py
|
src/wirecloud/oauth2provider/tests.py
|
# -*- coding: utf-8 -*-
# Copyright 2013 Universidad Politécnica de Madrid
# This file is part of Wirecloud.
# Wirecloud is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Wirecloud is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Wirecloud. If not, see <http://www.gnu.org/licenses/>.
from urlparse import parse_qs, urlparse
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import Client, TestCase
from django.utils import simplejson, unittest
from django.utils.http import urlencode
@unittest.skipIf(not 'wirecloud.oauth2provider' in settings.INSTALLED_APPS, 'OAuth2 provider not enabled')
class Oauth2TestCase(TestCase):
fixtures = ('selenium_test_data', 'oauth2_test_data')
tags = ('oauth2',)
@classmethod
def setUpClass(cls):
cls.client = Client()
cls.user_client = Client()
def setUp(self):
self.user_client.login(username='normuser', password='admin')
def test_authorization_code_grant_flow(self):
# Authorization request
query = {
'response_type': 'code',
'client_id': '3faf0fb4c2fe76c1c3bb7d09c21b97c2',
'redirect_uri': 'https://customapp.com/oauth/redirect',
}
auth_req_url = reverse('oauth2provider.auth') + '?' + urlencode(query)
response = self.user_client.get(auth_req_url)
# Parse returned code
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].startswith('https://customapp.com/oauth/redirect'))
response_data = parse_qs(urlparse(response['Location']).query)
code = response_data['code'][0]
# Access token request
url = reverse('oauth2provider.token')
data = {
'code': code,
'grant_type': 'authorization_code',
'client_id': '3faf0fb4c2fe76c1c3bb7d09c21b97c2',
'client_secret': '9643b7c3f59ef531931d39a3e19bcdd7',
'redirect_uri': 'https://customapp.com/oauth/redirect',
}
response = self.client.post(url, data)
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
token = response_data['access_token']
token_type = response_data['token_type']
self.assertEqual(token_type, 'Bearer')
# Make an authenticated request
url = reverse('wirecloud.workspace_collection')
response = self.client.get(url, HTTP_ACCEPT='application/json', HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, list))
self.assertTrue(isinstance(response_data[0], dict))
@unittest.skip('wip test')
def test_implicit_grant_flow(self):
# Authorization request
query = {
'response_type': 'token',
'client_id': '3faf0fb4c2fe76c1c3bb7d09c21b97c2',
'redirect_uri': 'https://customapp.com/oauth/redirect',
}
auth_req_url = reverse('oauth2provider.auth') + '?' + urlencode(query)
response = self.user_client.get(auth_req_url)
# Parse returned code
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].startswith('https://customapp.com/oauth/redirect'))
response_data = parse_qs(urlparse(response['Location']).query)
token = response_data['access_token'][0]
token_type = response_data['token_type'][0]
self.assertEqual(token_type, 'Bearer')
# Make an authenticated request
url = reverse('wirecloud.workspace_collection')
response = self.client.get(url, HTTP_ACCEPT='application/json', HTTP_AUTHORIZATION='Bearer ' + token)
self.assertEqual(response.status_code, 200)
response_data = simplejson.loads(response.content)
self.assertTrue(isinstance(response_data, list))
self.assertTrue(isinstance(response_data[0], dict))
|
Python
| 0 |
@@ -3211,16 +3211,79 @@
, dict))
+%0A test_authorization_code_grant_flow.tags = ('fiware-ut-9',)
%0A%0A @u
|
74bfb58d2ac736960392bd7358a96317b63e6aef
|
add delay for test_diag (#4046)
|
tests/scripts/thread-cert/test_diag.py
|
tests/scripts/thread-cert/test_diag.py
|
#!/usr/bin/env python
#
# Copyright (c) 2018, The OpenThread Authors.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
import unittest
import node
import config
class TestDiag(unittest.TestCase):
def setUp(self):
self.simulator = config.create_default_simulator()
self.node = node.Node(1, False, simulator=self.simulator)
def tearDown(self):
self.node.destroy()
self.simulator.stop()
def test(self):
cases = [
('diag\n',
'diagnostics mode is disabled\r\n'),
('diag send 10 100\n',
'failed\r\nstatus 0xd\r\n'),
('diag start\n',
'start diagnostics mode\r\nstatus 0x00\r\n'),
('diag',
'diagnostics mode is enabled\r\n'),
('diag channel 10\n',
'failed\r\nstatus 0x7\r\n'),
('diag channel 11\n',
'set channel to 11\r\nstatus 0x00\r\n'),
('diag channel\n',
'channel: 11\r\n'),
('diag power -10\n',
'set tx power to -10 dBm\r\nstatus 0x00\r\n'),
('diag power\n',
'tx power: -10 dBm\r\n'),
('diag stats\n',
'received packets: 0\r\nsent packets: 0\r\n'
'first received packet: rssi=0, lqi=0\r\n'
'last received packet: rssi=0, lqi=0\r\n',
),
('diag send 20 100\n',
r'sending 0x14 packet\(s\), length 0x64\r\nstatus 0x00\r\n',
),
(' diag \t send \t 20\t100',
r'sending 0x14 packet\(s\), length 0x64\r\nstatus 0x00\r\n',
),
('diag repeat 100 100\n',
'sending packets of length 0x64 at the delay of 0x64 ms\r\nstatus 0x00\r\n',
),
('diag repeat stop\n',
'repeated packet transmission is stopped\r\nstatus 0x00\r\n',
),
('diag stop\n',
r'received packets: 0\r\nsent packets: ([1-9]\d*)\r\n'
'first received packet: rssi=0, lqi=0\r\n'
'last received packet: rssi=0, lqi=0\r\n\n'
r'stop diagnostics mode\r\nstatus 0x00\r\n',
),
('diag',
'diagnostics mode is disabled\r\n'),
('diag 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32',
r'Error: too many args \(max 32\)\r\n',
),
]
for case in cases:
self.node.send_command(case[0])
self.simulator.go(1)
self.node._expect(case[1])
if __name__ == '__main__':
unittest.main()
|
Python
| 0 |
@@ -1612,16 +1612,28 @@
unittest
+%0Aimport time
%0A%0Aimport
@@ -4081,16 +4081,111 @@
r.go(1)%0A
+ if type(self.simulator).__name__ == 'VirtualTime':%0A time.sleep(0.1)%0A
|
6bdc92345a58dc40749eedb9630d0d28d6d23e87
|
Add release notes for moving_mnist
|
tensorflow_datasets/video/moving_mnist.py
|
tensorflow_datasets/video/moving_mnist.py
|
# coding=utf-8
# Copyright 2020 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""MovingMNIST."""
import numpy as np
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
from tensorflow_datasets.video.moving_sequence import image_as_moving_sequence # pylint: disable=unused-import
_OUT_RESOLUTION = (64, 64)
_SEQUENCE_LENGTH = 20
_URL = "http://www.cs.toronto.edu/~nitish/unsupervised_video/"
_CITATION = """\
@article{DBLP:journals/corr/SrivastavaMS15,
author = {Nitish Srivastava and
Elman Mansimov and
Ruslan Salakhutdinov},
title = {Unsupervised Learning of Video Representations using LSTMs},
journal = {CoRR},
volume = {abs/1502.04681},
year = {2015},
url = {http://arxiv.org/abs/1502.04681},
archivePrefix = {arXiv},
eprint = {1502.04681},
timestamp = {Mon, 13 Aug 2018 16:47:05 +0200},
biburl = {https://dblp.org/rec/bib/journals/corr/SrivastavaMS15},
bibsource = {dblp computer science bibliography, https://dblp.org}
}
"""
_DESCRIPTION = """\
Moving variant of MNIST database of handwritten digits. This is the
data used by the authors for reporting model performance. See
`tfds.video.moving_mnist.image_as_moving_sequence`
for generating training/validation data from the MNIST dataset.
"""
class MovingMnist(tfds.core.GeneratorBasedBuilder):
"""MovingMnist."""
VERSION = tfds.core.Version(
"1.0.0", "New split API (https://tensorflow.org/datasets/splits)")
def _info(self):
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict({
"image_sequence": tfds.features.Video(
shape=(_SEQUENCE_LENGTH,) + _OUT_RESOLUTION + (1,))
}),
homepage=_URL,
citation=_CITATION,
)
def _split_generators(self, dl_manager):
data_path = dl_manager.download(_URL + "mnist_test_seq.npy")
# authors only provide test data.
# See `tfds.video.moving_mnist.image_as_moving_sequence` for mapping
# function to create training/validation dataset from MNIST.
return [
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
gen_kwargs=dict(data_path=data_path)),
]
def _generate_examples(self, data_path):
"""Generate MovingMnist sequences.
Args:
data_path (str): Path to the data file
Yields:
20 x 64 x 64 x 1 uint8 numpy arrays
"""
with tf.io.gfile.GFile(data_path, "rb") as fp:
images = np.load(fp)
images = np.transpose(images, (1, 0, 2, 3))
images = np.expand_dims(images, axis=-1)
for i, sequence in enumerate(images):
yield i, dict(image_sequence=sequence)
|
Python
| 0 |
@@ -1945,16 +1945,42 @@
Version(
+%221.0.0%22)%0A RELEASE_NOTES=%7B
%0A %22
@@ -1985,17 +1985,17 @@
%221.0.0%22
-,
+:
%22New sp
@@ -2043,17 +2043,21 @@
splits)%22
-)
+,%0A %7D
%0A%0A def
|
3214645740866f4d15df826880169297b460bce4
|
fix file mode
|
egginst/utils.py
|
egginst/utils.py
|
import os
import sys
import random
import shutil
import string
from os.path import basename, isdir, isfile, join
chars = string.letters + string.digits
def mk_tmp_dir():
tmp_dir = join(sys.prefix, '.tmp_ironpkg')
try:
shutil.rmtree(tmp_dir)
except (WindowsError, IOError):
pass
if not isdir(tmp_dir):
os.mkdir(tmp_dir)
return tmp_dir
def pprint_fn_action(fn, action):
"""
Pretty print the distribution name (filename) and an action, the width
of the output corresponds to the with of the progress bar used by the
function below.
"""
print "%-56s %20s" % (fn, '[%s]' % action)
def rmdir_er(dn):
"""
Remove empty directories recursively.
"""
for name in os.listdir(dn):
path = join(dn, name)
if isdir(path):
rmdir_er(path)
if not os.listdir(dn):
os.rmdir(dn)
def rm_rf(path, verbose=False):
if isfile(path):
if verbose:
print "Removing: %r (file)" % path
try:
os.unlink(path)
except (WindowsError, IOError):
tmp_dir = mk_tmp_dir()
rand = ''.join(random.choice(chars) for x in xrange(10))
os.rename(path, join(tmp_dir, '%s_%s' % (rand, basename(path))))
elif isdir(path):
if verbose:
print "Removing: %r (directory)" % path
shutil.rmtree(path)
def human_bytes(n):
"""
Return the number of bytes n in more human readable form.
"""
if n < 1024:
return '%i B' % n
k = (n - 1) / 1024 + 1
if k < 1024:
return '%i KB' % k
return '%.2f MB' % (float(n) / (2**20))
|
Python
| 0.000001 | |
384358522c39c560538bfaade33d6176689dde56
|
Decrease multiple replies comment acceptance test number
|
src/adhocracy_frontend/adhocracy_frontend/tests/acceptance/test_comment.py
|
src/adhocracy_frontend/adhocracy_frontend/tests/acceptance/test_comment.py
|
from urllib.parse import urlencode
from pytest import fixture
from adhocracy_core.testing import god_login
from adhocracy_frontend.tests.acceptance.shared import wait
from adhocracy_frontend.tests.acceptance.shared import get_list_element
from adhocracy_frontend.tests.acceptance.shared import get_listing_create_form
from adhocracy_frontend.tests.acceptance.shared import get_random_string
from adhocracy_frontend.tests.acceptance.shared import login_god
from adhocracy_frontend.tests.fixtures.users import create_user
from adhocracy_frontend.tests.acceptance.shared import logout
from adhocracy_frontend.tests.acceptance.shared import login
EDIT = 'edit'
REPLY = 'reply'
SAVE = 'save'
@fixture(scope='module')
def user():
name = get_random_string(n=5)
password = 'password'
create_user(name, password)
import time
time.sleep(1)
return name, password
class TestComment:
def test_create(self, browser, rest_url):
login_god(browser)
comment = create_comment(browser, rest_url, 'comment1')
assert comment is not None
def test_empty_comment(self, browser, rest_url):
comment = create_comment(browser, rest_url, '')
assert comment is None
def test_nested_replies(self, browser, n=7):
for i in range(n):
comment = browser.find_by_css('.comment').last
reply = create_reply_comment(browser, comment, 'nested reply %d' % i)
assert reply is not None
def test_multiple_replies(self, browser, n=10):
comment = browser.find_by_css('.comment').first
for i in range(n):
reply = create_reply_comment(browser, comment, 'multiple reply %d' % i)
assert reply is not None
def test_edit(self, browser):
comment = browser.find_by_css('.comment').first
edit_comment(browser, comment, 'edited')
assert comment.find_by_css('.comment-content div').first.text == 'edited'
browser.reload()
assert wait(lambda: browser.find_by_css('.comment-content')
.first.text == 'edited')
def test_edit_twice(self, browser):
comment = browser.find_by_css('.comment').first
edit_comment(browser, comment, 'edited 1')
assert wait(lambda: comment.find_by_css('.comment-content div')
.first.text == 'edited 1')
edit_comment(browser, comment, 'edited 2')
assert wait(lambda: comment.find_by_css('.comment-content div')
.first.text == 'edited 2')
def test_multi_edits(self, browser):
parent = browser.find_by_css('.comment').first
reply = parent.find_by_css('.comment').first
edit_comment(browser, reply, 'somereply edited')
edit_comment(browser, parent, 'edited')
content = parent.find_by_css('.comment-content')
assert wait(lambda: content.first.text == 'edited')
def test_author(self, browser):
comment = browser.find_by_css('.comment').first
actual = lambda element: element.find_by_css('adh-user-meta').first.text
# the captialisation might be changed by CSS
assert wait(lambda: actual(comment).lower() == god_login.lower())
def test_edit_no_user(self, browser, rest_url, user):
logout(browser)
comment = browser.find_by_css('.comment').first
assert not _get_button(browser, comment, EDIT)
def test_reply_no_user(self, browser):
comment = browser.find_by_css('.comment').last
assert not _get_button(browser, comment, REPLY)
def test_edit_other_user(self, browser, rest_url, user):
login(browser, user[0], user[1])
_visit_url(browser, rest_url)
assert browser.is_text_present('edited')
comment = browser.find_by_css('.comment').first
assert not _get_button(browser, comment, EDIT)
def test_reply_other_user(self, browser):
comment = browser.find_by_css('.comment').first
reply = create_reply_comment(browser, comment, 'other user reply')
assert reply is not None
def _visit_url(browser, rest_url):
query = urlencode({
'key': 'test',
'pool-path': rest_url + 'adhocracy/',
})
browser.visit(browser.app_url + 'embed/create-or-show-comment-listing?' + query)
def create_comment(browser, rest_url, name):
"""Go to content2 column and create comment with content 'comment1'."""
_visit_url(browser, rest_url)
browser.is_element_present_by_css('.listing', wait_time=10)
listing = browser.find_by_css('.listing').first
comment = create_top_level_comment(browser, listing, name)
return comment
def create_top_level_comment(browser, listing, content):
"""Create a new top level Comment."""
form = get_listing_create_form(listing)
form.find_by_css('textarea').first.fill(content)
form.find_by_css('input[type="submit"]').first.click()
browser.is_text_present(content, wait_time=10)
comment = get_list_element(listing, content, descendant='.comment-content')
return comment
def create_reply_comment(browser, parent, content):
"""Create a new reply to an existing comment."""
form = get_comment_create_form(browser, parent)
form.find_by_css('textarea').first.fill(content)
form.find_by_css('input[type="submit"]').first.click()
if not browser.is_text_present(content, wait_time=15):
return None
reply = get_reply(parent, content)
return reply
def _get_button(browser, comment, text):
actions = comment.find_by_css('.comment-header-links a')\
+ comment.find_by_css('.comment-actions a')
for a in actions:
if a.text == text:
return a
else:
return None
def edit_comment(browser, comment, content):
edit = _get_button(browser, comment, EDIT)
assert edit
edit.click()
comment.find_by_css('textarea').first.fill(content)
save = _get_button(browser, comment, SAVE)
assert save
save.click()
browser.is_text_present(content, wait_time=10)
def get_comment_create_form(browser, comment):
reply = _get_button(browser, comment, REPLY)
assert reply
reply.click()
return comment.find_by_css('.comment-create-form').first
def get_reply(parent, content):
"""Return reply to comment `parent` with content == `content`."""
for element in parent.find_by_css('.comment'):
wait(lambda: element.text, max_steps=100)
if element.find_by_css('.comment-content').first.text == content:
return element
|
Python
| 0.002234 |
@@ -1259,9 +1259,9 @@
, n=
-7
+3
):%0A
@@ -1512,18 +1512,17 @@
wser, n=
-10
+3
):%0A
|
ac90bdad2f09a5b79cb33b7ffed4782b7af6db61
|
Removing old hello world ish
|
webserver.py
|
webserver.py
|
import time
from flask import Flask, render_template
# DEFAULT_EXPIRATION = 10 # 10 sec
# DEFAULT_EXPIRATION = 60 * 10 # 10 min
DEFAULT_EXPIRATION = 60 * 20 # 20 min
app = Flask(__name__)
last_ping = {}
@app.route('/')
def index():
# return 'Hello, World!'
return render_template('index.html')
@app.route('/ping/<thing>')
def ping(thing):
now = time.time()
last_ping[thing] = now
return "%s %s" % (thing, now)
@app.route('/check/<thing>')
def check(thing):
if thing not in last_ping:
response = u"No such thing as %s" % thing
return response, 404
elapsed = time.time() - last_ping[thing]
if elapsed > DEFAULT_EXPIRATION:
del last_ping[thing]
response = u"Thing expired: %s" % thing
return response, 404
return "%s %s" % (thing, elapsed)
|
Python
| 0.998965 |
@@ -242,37 +242,8 @@
():%0A
- # return 'Hello, World!'%0A
|
3fadef637ad17458f629a4baeba7fd38205a1510
|
Bump Katib Python SDK to 0.12.0rc0 version (#1640)
|
sdk/python/v1beta1/setup.py
|
sdk/python/v1beta1/setup.py
|
# Copyright 2021 The Kubeflow Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
with open('requirements.txt') as f:
REQUIRES = f.readlines()
setuptools.setup(
name='kubeflow-katib',
version='0.10.1',
author="Kubeflow Authors",
author_email='[email protected]',
license="Apache License Version 2.0",
url="https://github.com/kubeflow/katib/tree/master/sdk/python/v1beta1",
description="Katib Python SDK for APIVersion v1beta1",
long_description="Katib Python SDK for APIVersion v1beta1",
packages=setuptools.find_packages(
include=("kubeflow*")),
package_data={},
include_package_data=False,
zip_safe=False,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Artificial Intelligence',
'Topic :: Software Development',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
],
install_requires=REQUIRES
)
|
Python
| 0 |
@@ -724,19 +724,22 @@
ion='0.1
-0.1
+2.0rc0
',%0A a
|
8517803a2cb3f3dd46911ec63acdeae283f23efd
|
Increase fund graph detail
|
srv/config.py
|
srv/config.py
|
""" Global configuration variables """
import os.path
PIE_TOLERANCE = 0.075
PIE_DETAIL = 30
GRAPH_FUND_HISTORY_DETAIL = 100
OVERVIEW_NUM_LAST = 25
OVERVIEW_NUM_FUTURE = 10
START_YEAR = 2014
START_MONTH = 9
LIST_CATEGORIES = ('funds', 'in', 'bills', 'food', 'general', 'holiday', 'social')
# common columns are added programmatically
LIST_DATA_FORM_SCHEMA = {
'funds': {
'units': ('float', True),
},
'in': {
},
'bills': {
},
'food': {
'category': ('string', True),
'shop': ('string', True)
},
'general': {
'category': ('string', True),
'shop': ('string', True)
},
'holiday': {
'holiday': ('string', True),
'shop': ('string', True)
},
'social': {
'society': ('string', True),
'shop': ('string', True)
}
}
IP_BAN_TIME = 60
IP_BAN_TRIES = 10
BASE_DIR = os.path.dirname(os.path.realpath(__file__)) + "/.."
SERIAL_FILE = BASE_DIR + "/resources/serial"
FUND_SALT = 'a963anx2'
# error messages
E_NO_PARAMS = "Not enough parameters given"
E_BAD_PARAMS = "Invalid parameters given"
E_NO_FORM = "Not enough form data given"
E_BAD_FORM = "Invalid form data given"
E_NO_ITEM = "Must supply an item (at least)"
|
Python
| 0 |
@@ -115,17 +115,17 @@
ETAIL =
-1
+2
00%0A%0AOVER
|
3d18d7a60ccf970f22fecb4981a2a8aa48248006
|
use a reliable host
|
Lib/test/test_socket_ssl.py
|
Lib/test/test_socket_ssl.py
|
# Test just the SSL support in the socket module, in a moderately bogus way.
import sys
import unittest
from test import test_support
import socket
import errno
import threading
import subprocess
import time
import os
import urllib
# Optionally test SSL support, if we have it in the tested platform
skip_expected = not hasattr(socket, "ssl")
class ConnectedTests(unittest.TestCase):
def testBasic(self):
socket.RAND_status()
try:
socket.RAND_egd(1)
except TypeError:
pass
else:
print("didn't raise TypeError")
socket.RAND_add("this is a random string", 75.0)
with test_support.transient_internet():
f = urllib.urlopen('https://sf.net')
buf = f.read()
f.close()
def testTimeout(self):
def error_msg(extra_msg):
print("""\
WARNING: an attempt to connect to %r %s, in
test_timeout. That may be legitimate, but is not the outcome we
hoped for. If this message is seen often, test_timeout should be
changed to use a more reliable address.""" % (ADDR, extra_msg), file=sys.stderr)
# A service which issues a welcome banner (without need to write
# anything).
# XXX ("gmail.org", 995) has been unreliable so far, from time to
# XXX time non-responsive for hours on end (& across all buildbot
# XXX slaves, so that's not just a local thing).
ADDR = "gmail.org", 995
s = socket.socket()
s.settimeout(30.0)
try:
s.connect(ADDR)
except socket.timeout:
error_msg('timed out')
return
except socket.error as exc: # In case connection is refused.
if exc.args[0] == errno.ECONNREFUSED:
error_msg('was refused')
return
else:
raise
ss = socket.ssl(s)
# Read part of return welcome banner twice.
ss.read(1)
ss.read(1)
s.close()
class BasicTests(unittest.TestCase):
def testRudeShutdown(self):
# Some random port to connect to.
PORT = [9934]
listener_ready = threading.Event()
listener_gone = threading.Event()
# `listener` runs in a thread. It opens a socket listening on
# PORT, and sits in an accept() until the main thread connects.
# Then it rudely closes the socket, and sets Event `listener_gone`
# to let the main thread know the socket is gone.
def listener():
s = socket.socket()
PORT[0] = test_support.bind_port(s, '', PORT[0])
s.listen(5)
listener_ready.set()
s.accept()
s = None # reclaim the socket object, which also closes it
listener_gone.set()
def connector():
listener_ready.wait()
s = socket.socket()
s.connect(('localhost', PORT[0]))
listener_gone.wait()
try:
ssl_sock = socket.ssl(s)
except socket.sslerror:
pass
else:
raise test_support.TestFailed(
'connecting to closed SSL socket should have failed')
t = threading.Thread(target=listener)
t.start()
connector()
t.join()
def test_978833(self):
if test_support.verbose:
print("test_978833 ...")
import os, httplib, ssl
with test_support.transient_internet():
s = socket.socket(socket.AF_INET)
s.connect(("svn.python.org", 443))
fd = s.fileno()
sock = ssl.wrap_socket(s)
s = None
sock.close()
try:
os.fstat(fd)
except OSError:
pass
else:
raise test_support.TestFailed("Failed to close socket")
class OpenSSLTests(unittest.TestCase):
def testBasic(self):
s = socket.socket()
s.connect(("localhost", 4433))
ss = socket.ssl(s)
ss.write("Foo\n")
i = ss.read(4)
self.assertEqual(i, "Foo\n")
s.close()
def testMethods(self):
# read & write is already tried in the Basic test
# now we'll try to get the server info about certificates
# this came from the certificate I used, one I found in /usr/share/openssl
info = "/C=PT/ST=Queensland/L=Lisboa/O=Neuronio, Lda./OU=Desenvolvimento/CN=brutus.neuronio.pt/[email protected]"
s = socket.socket()
s.connect(("localhost", 4433))
ss = socket.ssl(s)
cert = ss.server()
self.assertEqual(cert, info)
cert = ss.issuer()
self.assertEqual(cert, info)
s.close()
class OpenSSLServer(threading.Thread):
def __init__(self):
self.s = None
self.keepServing = True
self._external()
if self.haveServer:
threading.Thread.__init__(self)
def _external(self):
# let's find the .pem files
curdir = os.path.dirname(__file__) or os.curdir
cert_file = os.path.join(curdir, "ssl_cert.pem")
if not os.access(cert_file, os.F_OK):
raise ValueError("No cert file found! (tried %r)" % cert_file)
key_file = os.path.join(curdir, "ssl_key.pem")
if not os.access(key_file, os.F_OK):
raise ValueError("No key file found! (tried %r)" % key_file)
try:
cmd = "openssl s_server -cert %s -key %s -quiet" % (cert_file, key_file)
self.s = subprocess.Popen(cmd.split(), stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
time.sleep(1)
except:
self.haveServer = False
else:
# let's try if it is actually up
try:
s = socket.socket()
s.connect(("localhost", 4433))
s.close()
if self.s.stdout.readline() != "ERROR\n":
raise ValueError
except:
self.haveServer = False
else:
self.haveServer = True
def run(self):
while self.keepServing:
time.sleep(.5)
l = self.s.stdout.readline()
self.s.stdin.write(l)
def shutdown(self):
self.keepServing = False
if not self.s:
return
if sys.platform == "win32":
subprocess.TerminateProcess(int(self.s._handle), -1)
else:
os.kill(self.s.pid, 15)
def test_main():
if not hasattr(socket, "ssl"):
raise test_support.TestSkipped("socket module has no ssl support")
tests = [BasicTests]
if test_support.is_resource_enabled('network'):
tests.append(ConnectedTests)
# in these platforms we can kill the openssl process
if sys.platform in ("sunos5", "darwin", "linux1",
"linux2", "win32", "hp-ux11"):
server = OpenSSLServer()
if server.haveServer:
tests.append(OpenSSLTests)
server.start()
else:
server = None
thread_info = test_support.threading_setup()
try:
test_support.run_unittest(*tests)
finally:
if server is not None and server.haveServer:
server.shutdown()
test_support.threading_cleanup(*thread_info)
if __name__ == "__main__":
test_main()
|
Python
| 0 |
@@ -1249,213 +1249,8 @@
g).%0A
- # XXX (%22gmail.org%22, 995) has been unreliable so far, from time to%0A # XXX time non-responsive for hours on end (& across all buildbot%0A # XXX slaves, so that's not just a local thing).%0A
@@ -1265,17 +1265,21 @@
= %22
+pop.
gmail.
-org
+com
%22, 9
|
899e3c9f81a43dcb94e290ce0a86f128bd94effd
|
Apply filter channel published on menu list (channel context processors)
|
opps/channel/context_processors.py
|
opps/channel/context_processors.py
|
# -*- coding: utf-8 -*-
from .models import Channel
def channel_context(request):
return {'opps_menu': Channel.objects.all()}
|
Python
| 0 |
@@ -17,16 +17,50 @@
f-8 -*-%0A
+from django.utils import timezone%0A
from .mo
@@ -119,17 +119,51 @@
-return %7B'
+%22%22%22 Channel context processors%0A %22%22%22%0A
opps
@@ -171,10 +171,10 @@
menu
-':
+ =
Cha
@@ -190,11 +190,139 @@
cts.
-all()
+filter(date_available__lte=timezone.now(),%0A published=True)%0A%0A return %7B'opps_menu': opps_menu
%7D%0A
|
54451c4030bfeece4ab2157afe1ee3f8f65c4dcb
|
Fix sentry_useremail "duplicate key" error (#16)
|
sentry_ldap_auth/backend.py
|
sentry_ldap_auth/backend.py
|
from __future__ import absolute_import
from django_auth_ldap.backend import LDAPBackend
from django.conf import settings
from sentry.models import (
Organization,
OrganizationMember,
UserOption,
)
class SentryLdapBackend(LDAPBackend):
def get_or_create_user(self, username, ldap_user):
model = super(SentryLdapBackend, self).get_or_create_user(username, ldap_user)
if len(model) < 1:
return model
user = model[0]
user.is_managed = True
try:
from sentry.models import (UserEmail)
except ImportError:
pass
else:
UserEmail.objects.update(
user=user,
email=ldap_user.attrs.get('mail', ' ')[0] or '',
)
# Check to see if we need to add the user to an organization
if not settings.AUTH_LDAP_DEFAULT_SENTRY_ORGANIZATION:
return model
# If the user is already a member of an organization, leave them be
orgs = OrganizationMember.objects.filter(user=user)
if orgs != None and len(orgs) > 0:
return model
# Find the default organization
organizations = Organization.objects.filter(name=settings.AUTH_LDAP_DEFAULT_SENTRY_ORGANIZATION)
if not organizations or len(organizations) < 1:
return model
member_role = getattr(settings, 'AUTH_LDAP_SENTRY_ORGANIZATION_ROLE_TYPE', 'member')
has_global_access = getattr(settings, 'AUTH_LDAP_SENTRY_ORGANIZATION_GLOBAL_ACCESS', False)
# Add the user to the organization with global access
OrganizationMember.objects.create(
organization=organizations[0],
user=user,
role=member_role,
has_global_access=has_global_access,
flags=getattr(OrganizationMember.flags, 'sso:linked'),
)
if not getattr(settings, 'AUTH_LDAP_SENTRY_SUBSCRIBE_BY_DEFAULT', True):
UserOption.objects.set_value(
user=user,
project=None,
key='subscribe_by_default',
value='0',
)
return model
|
Python
| 0.000008 |
@@ -624,24 +624,36 @@
%0A
+ userEmail =
UserEmail.o
@@ -663,42 +663,117 @@
cts.
-update(%0A user=user,
+get(user=user)%0A if not userEmail:%0A userEmail = UserEmail.objects.create(user=user)%0A
%0A
@@ -781,20 +781,26 @@
-
+userEmail.
email=ld
@@ -838,17 +838,16 @@
0%5D or ''
-,
%0A
@@ -843,32 +843,47 @@
''%0A
+userEmail.save(
)%0A%0A # Che
|
c55bf8d153c47500615b8ded3c95957be8ee70a3
|
Refactor JSONResponse views to include ListView
|
froide/helper/json_view.py
|
froide/helper/json_view.py
|
from django import http
from django.views.generic import DetailView
class JSONResponseDetailView(DetailView):
def render_to_json_response(self, context):
"Returns a JSON response containing 'context' as payload"
return self.get_json_response(self.convert_context_to_json(context))
def get_json_response(self, content, **httpresponse_kwargs):
"Construct an `HttpResponse` object."
return http.HttpResponse(content,
content_type='application/json',
**httpresponse_kwargs)
def convert_context_to_json(self, context):
"Convert the context dictionary into a JSON object"
return context['object'].as_json()
def render_to_response(self, context):
if self.format == "json":
return self.render_to_json_response(context)
else:
return super(DetailView, self).render_to_response(context)
|
Python
| 0 |
@@ -60,16 +60,26 @@
tailView
+, ListView
%0A%0Aclass
@@ -94,29 +94,20 @@
onse
-DetailView(DetailView
+Mixin(object
):%0A
@@ -577,158 +577,983 @@
s)%0A%0A
- def convert_context_to_json(self, context):%0A %22Convert the context dictionary into a JSON object%22%0A return context%5B'object'%5D.as_json()
+class JSONResponseListView(ListView, JSONResponseMixin):%0A def get_context_data(self, **kwargs):%0A self.format = %22html%22%0A if %22format%22 in self.kwargs:%0A self.format = self.kwargs%5B'format'%5D%0A context = super(JSONResponseListView, self).get_context_data(**kwargs)%0A return context%0A%0A def convert_context_to_json(self, context):%0A %22Convert the context dictionary into a JSON object%22%0A return %22%5B%25s%5D%22 %25 %22,%22.join(%5Bo.as_json() for o in context%5B'object_list'%5D%5D)%0A%0Aclass JSONResponseDetailView(DetailView, JSONResponseMixin):%0A def convert_context_to_json(self, context):%0A %22Convert the context dictionary into a JSON object%22%0A return context%5B'object'%5D.as_json()%0A%0A def get_context_data(self, **kwargs):%0A self.format = %22html%22%0A if %22format%22 in self.kwargs:%0A self.format = self.kwargs%5B'format'%5D%0A context = super(JSONResponseDetailView, self).get_context_data(**kwargs)%0A return context
%0A%0A
@@ -1769,10 +1769,8 @@
ontext)%0A
-%0A%0A
|
e03103c74a066184178980f1073505724e094394
|
Fix url order
|
stadt/urls.py
|
stadt/urls.py
|
from django.conf import settings, urls
from django.conf.urls import static
from django.contrib import admin
urlpatterns = [
urls.url(r'^stadt/admin/', admin.site.urls),
urls.url(r'^stadt/api/', urls.include('core.api_urls')),
urls.url(r'^stadt/', urls.include('account.urls')),
urls.url(r'^stadt/', urls.include('content.urls')),
urls.url(r'^stadt/', urls.include('entities.urls')),
urls.url(r'^stadt/', urls.include('features.articles.urls')),
urls.url(r'^stadt/', urls.include('features.associations.urls')),
urls.url(r'^stadt/', urls.include('features.conversations.urls')),
urls.url(r'^stadt/', urls.include('features.memberships.urls')),
urls.url(r'^stadt/', urls.include('features.sharing.urls')),
urls.url(r'^stadt/', urls.include('features.subscriptions.urls')),
urls.url(r'^stadt/', urls.include('features.tags.urls')),
urls.url(r'^', urls.include('features.stadt.urls')),
urls.url(r'^', urls.include('features.events.urls')),
urls.url(r'^', urls.include('features.content.urls')),
urls.url(r'^', urls.include('features.groups.urls')),
urls.url(r'^', urls.include('features.gestalten.urls')),
] + static.static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
Python
| 0.999733 |
@@ -979,32 +979,101 @@
.events.urls')),
+%0A%0A # matches /*/, should be included late, groups before gestalten
%0A urls.url(r'
@@ -1099,23 +1099,22 @@
eatures.
-content
+groups
.urls'))
@@ -1150,37 +1150,40 @@
lude('features.g
-roups
+estalten
.urls')),%0A ur
@@ -1167,32 +1167,81 @@
stalten.urls')),
+%0A%0A # matches /*/*/, should be included at last
%0A urls.url(r'
@@ -1263,33 +1263,31 @@
e('features.
-gestal
+con
ten
+t
.urls')),%0A%5D
|
a72f72c16aaf1689fc364311afe3b42a6fed7eae
|
add examples
|
CourierToDovecot.py
|
CourierToDovecot.py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
# -----------------------
# Author : jgo
# Execute a perl script into all mailbox newly created,
# on the Dovecot server.
# -----------------------
import subprocess
import os
import logging
from logging.handlers import RotatingFileHandler
## [Config VARS] --------------------------------------------
# Don't change this value! :)
init_path = os.path.dirname(os.path.realpath(__file__))
# Change this value with your target dir
dest_path = '/var/spool/mail/'
# Change this value with your script path
script_path = '/courier-dovecot-migrate.pl --to-dovecot --convert --recursive'
## ----------------------------------------------------------
## [Logging] ------------------------------------------------
# Create logger object used to write logfile
logger = logging.getLogger()
# Set your Log level to debug => Write everything
logger.setLevel(logging.DEBUG)
# Choose how you want your log format
formatter = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s')
# Create a file (valhalla.log) in "append mode", max size => 30Mb
# and 1 backup.
logfile = 'valhalla.log'
file_handler = RotatingFileHandler(logfile, 'a', 30000000, 1)
# Assign our formatter and set to debug mode.
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
# Create a second handler to display the log on the console
steam_handler = logging.StreamHandler()
steam_handler.setLevel(logging.DEBUG)
logger.addHandler(steam_handler)
## ----------------------------------------------------------
print '===================================================='
print '[SCRIPT STATUS]'
print '===================================================='
# Create a list with all directory
output = subprocess.check_output(
'ls -R ' + dest_path + ' | grep "[[:alnum:]]\+@[[:alnum:]]\+" | tr ":" "/" | grep "/"', shell=True
)
# Transform the output to a list
output = output.split()
obj = len(output)
# Execute the script into all dir
try:
for path in output:
os.chdir(path)
logger.info('[Job] - Working on %s' % path)
subprocess.call(init_path + script_path, shell=True)
except SyntaxError:
logger.error('SyntaxError, your target already exists.')
print 'Please check your log file SyntaxError detected'
except OSError:
logger.error('OSError, this script can\'t be used on files')
print 'Please check your log file OSError detected'
finally:
os.chdir(init_path)
print ''
print 'Number of objects handled : %s' % obj
print 'Log file : %s' % logfile
print '===================================================='
|
Python
| 0 |
@@ -473,16 +473,46 @@
rget dir
+ (example : '/var/spool/mail')
%0Adest_pa
@@ -576,16 +576,40 @@
ipt path
+ (example: '/script.sh')
%0Ascript_
|
83ed8a4fd258f351da2ea358613ff57dadbf03f6
|
Remove blank line
|
junction/proposals/permissions.py
|
junction/proposals/permissions.py
|
# -*- coding: utf-8 -*-
# Third Party Stuff
from django.core.exceptions import PermissionDenied
# Junction Stuff
from junction.conferences.models import ConferenceProposalReviewer
from junction.base.constants import ConferenceStatus
from .models import ProposalSectionReviewer
def is_proposal_voting_allowed(proposal):
return proposal.conference.status != ConferenceStatus.SCHEDULE_PUBLISHED
def is_proposal_author(user, proposal):
return user.is_authenticated() and proposal.author == user
def is_proposal_reviewer(user, conference):
authenticated = user.is_authenticated()
is_reviewer = ConferenceProposalReviewer.objects.filter(
reviewer=user.id, conference=conference, active=True).exists()
return authenticated and is_reviewer
def is_proposal_section_reviewer(user, conference, proposal):
return user.is_authenticated() and ProposalSectionReviewer.objects.filter(
conference_reviewer__reviewer=user,
conference_reviewer__conference=conference,
proposal_section=proposal.proposal_section,
active=True).exists()
def is_proposal_author_or_proposal_reviewer(user, conference, proposal):
reviewer = is_proposal_reviewer(user, conference)
author = is_proposal_author(user, proposal)
return reviewer or author
def is_proposal_author_or_proposal_section_reviewer(user,
conference, proposal):
return is_proposal_author(user, proposal) or \
is_proposal_section_reviewer(user, conference, proposal)
def is_proposal_author_or_permisson_denied(user, proposal):
if is_proposal_author(user, proposal):
return True
raise PermissionDenied
def is_conference_moderator(user, conference):
if user.is_superuser:
return True
users = [mod.moderator for mod in conference.moderators.all()]
return user in users
|
Python
| 0.999999 |
@@ -1881,9 +1881,8 @@
n users%0A
-%0A
|
30d6f1e1233d3ff832b6f771f2e381a833c0005d
|
Test customer/non-customer combo in new_domain_subscription
|
corehq/apps/accounting/tests/test_new_domain_subscription.py
|
corehq/apps/accounting/tests/test_new_domain_subscription.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
from corehq.apps.accounting.exceptions import NewSubscriptionError
from corehq.apps.accounting.models import (
Subscription, BillingAccount, DefaultProductPlan, SoftwarePlanEdition,
SubscriptionAdjustmentMethod, SubscriptionType, EntryPoint)
from corehq.apps.accounting.tests import generator
from corehq.apps.accounting.tests.base_tests import BaseAccountingTest
from corehq.apps.domain.models import Domain
class TestNewDomainSubscription(BaseAccountingTest):
def setUp(self):
super(TestNewDomainSubscription, self).setUp()
self.domain = Domain(
name="test-domain-sub",
is_active=True,
)
self.domain.save()
self.domain2 = Domain(
name="test-domain-sub2",
is_active=True,
)
self.domain2.save()
self.admin_user_name = generator.create_arbitrary_web_user_name()
self.account = BillingAccount.get_or_create_account_by_domain(
self.domain.name, created_by=self.admin_user_name)[0]
self.account2 = BillingAccount.get_or_create_account_by_domain(
self.domain2.name, created_by=self.admin_user_name)[0]
self.standard_plan = DefaultProductPlan.get_default_plan_version(edition=SoftwarePlanEdition.STANDARD)
self.advanced_plan = DefaultProductPlan.get_default_plan_version(edition=SoftwarePlanEdition.ADVANCED)
def tearDown(self):
self.domain.delete()
self.domain2.delete()
super(TestNewDomainSubscription, self).tearDown()
def test_new_susbscription_in_future(self):
"""
Test covers issue that came up with commcare-hq/PR#3725.
"""
today = datetime.date.today()
in_30_days = today + datetime.timedelta(days=30)
week_after_30 = in_30_days + datetime.timedelta(days=7)
next_year = week_after_30 + datetime.timedelta(days=400)
# mimic domain signing up for trial
trial_subscription = Subscription.new_domain_subscription(
self.account, self.domain.name, self.advanced_plan,
date_end=in_30_days,
adjustment_method=SubscriptionAdjustmentMethod.TRIAL,
is_trial=True,
)
trial_subscription.is_active = True
trial_subscription.save()
subscription = Subscription.new_domain_subscription(
self.account2, self.domain.name, self.standard_plan,
web_user=self.admin_user_name,
date_start=week_after_30, date_end=next_year,
)
final_sub = Subscription.visible_objects.get(pk=subscription.id)
self.assertEqual(final_sub.date_start, week_after_30)
self.assertEqual(final_sub.date_end, next_year)
def test_conflicting_dates(self):
"""
Tests creating a subscription with conflicting dates with an existing
subscription
"""
today = datetime.date.today()
one_week = today + datetime.timedelta(days=7)
one_month = today + datetime.timedelta(days=30)
Subscription.new_domain_subscription(
self.account, self.domain.name, self.advanced_plan,
date_start=one_week,
date_end=one_month,
)
# conflicting subscription with no date end.
self.assertRaises(NewSubscriptionError, lambda: Subscription.new_domain_subscription(
self.account, self.domain.name, self.standard_plan,
))
# conflicting subscription with overlapping end date
self.assertRaises(NewSubscriptionError, lambda: Subscription.new_domain_subscription(
self.account, self.domain.name, self.standard_plan,
date_end=one_week + datetime.timedelta(days=1)
))
# conflicting subscription with overlapping start date
self.assertRaises(NewSubscriptionError, lambda: Subscription.new_domain_subscription(
self.account, self.domain.name, self.standard_plan,
date_start=one_month - datetime.timedelta(days=1)
))
# subscription without overlapping dates before
# bound future subscription
sub_before = Subscription.new_domain_subscription(
self.account, self.domain.name, self.standard_plan,
date_end=one_week,
)
# subscription without overlapping dates after
# bound future subscription
sub_after = Subscription.new_domain_subscription(
self.account, self.domain.name, self.standard_plan,
date_start=one_month,
)
def test_update_billing_account_entry_point_self_serve(self):
self_serve_subscription = Subscription.new_domain_subscription(
self.account, self.domain.name, self.advanced_plan,
web_user=self.admin_user_name, service_type=SubscriptionType.PRODUCT
)
self.assertEqual(self_serve_subscription.account.entry_point, EntryPoint.SELF_STARTED)
def test_update_billing_account_entry_point_contracted(self):
contracted_subscription = Subscription.new_domain_subscription(
self.account, self.domain.name, self.advanced_plan,
web_user=self.admin_user_name, service_type=SubscriptionType.IMPLEMENTATION
)
self.assertNotEqual(contracted_subscription.account.entry_point, EntryPoint.SELF_STARTED)
def test_dont_update_billing_account_if_set(self):
self.account.entry_point = EntryPoint.CONTRACTED
self.account.save()
subscription = Subscription.new_domain_subscription(
self.account, self.domain.name, self.advanced_plan,
web_user=self.admin_user_name, service_type=SubscriptionType.IMPLEMENTATION
)
self.assertEqual(subscription.account.entry_point, EntryPoint.CONTRACTED)
def test_exceeding_max_domains_prevents_new_domains(self):
self.advanced_plan.plan.max_domains = 1
Subscription.new_domain_subscription(
self.account, self.domain.name, self.advanced_plan
)
self.assertRaises(NewSubscriptionError, lambda: Subscription.new_domain_subscription(
self.account, self.domain2.name, self.advanced_plan
))
|
Python
| 0 |
@@ -6246,20 +6246,604 @@
ced_plan%0A ))%0A
+%0A def test_customer_plan_not_added_to_regular_account(self):%0A self.advanced_plan.plan.is_customer_software_plan = True%0A self.assertRaises(NewSubscriptionError, lambda: Subscription.new_domain_subscription(%0A self.account, self.domain.name, self.advanced_plan%0A ))%0A%0A def test_regular_plan_not_added_to_customer_account(self):%0A self.account.is_customer_billing_account = True%0A self.assertRaises(NewSubscriptionError, lambda: Subscription.new_domain_subscription(%0A self.account, self.domain.name, self.advanced_plan%0A ))
|
f8ea5ef37280366b4b3991442e406952bb0575b3
|
Create calculate_cosine_distance.py
|
k-NN/calculate_cosine_distance.py
|
k-NN/calculate_cosine_distance.py
|
'''
Calculates the cosine distance for an input data
'''
import math
import numpy as np
import scipy.io
__author__ = """Mari Wahl"""
def cosineDistance(x, y):
''' This function computes the cosine distance between feature vectors
x and y. This distance is frequently used for text classification.
It varies between 0 and 1. The distance is 0 if x==y.
'''
denom = math.sqrt(sum(x**2)*sum(y**2))
dist = 1.0-(np.dot(x, y.conj().transpose()))/denom
return round(dist, 6)
def print_to_file(distances):
with open('cos_distances.dat', 'w') as f:
for i, col in enumerate(distances):
f.write('# distance for example %d to others\n' %(i+1))
for item in col:
f.write(str(item) + ' ')
f.write('\n')
def main():
f = scipy.io.loadmat('cvdataset.mat')
traindata = f['traindata']
trainlabels = f['trainlabels']
testdata = f['testdata']
evaldata = f['evaldata']
testlabels = f['testlabels']
distances = []
for i in range(len(trainlabels)):
first_train_example_class1 = traindata[i]
aux = []
for j in range (len(trainlabels)):
first_train_example_class2 = traindata[j]
d = cosineDistance(first_train_example_class1, first_train_example_class2)
aux.append(d)
distances.append(aux)
print_to_file(distances)
if __name__ == '__main__':
main()
|
Python
| 0.00003 |
@@ -125,13 +125,25 @@
Mari
- Wahl
+na von Steinkirch
%22%22%22%0A
|
cdefb05cfb315c2885e55831bfc31775b5a5d230
|
Handle time drift between databases
|
custom/icds_reports/management/commands/stale_data_in_ucr.py
|
custom/icds_reports/management/commands/stale_data_in_ucr.py
|
import inspect
from datetime import datetime
from django.core.management.base import BaseCommand, CommandError
from django.db import connections
import attr
import dateutil
from dimagi.utils.chunked import chunked
from corehq.apps.hqadmin.management.commands.stale_data_in_es import (
get_sql_case_data_for_db,
)
from corehq.apps.userreports.util import get_table_name
from corehq.form_processor.models import XFormInstanceSQL
from corehq.form_processor.utils import should_use_sql_backend
from corehq.sql_db.connections import get_icds_ucr_citus_db_alias
from corehq.sql_db.util import get_db_aliases_for_partitioned_query
@attr.s
class RunConfig(object):
domain = attr.ib()
table_id = attr.ib()
start_date = attr.ib()
end_date = attr.ib()
case_type = attr.ib()
xmlns = attr.ib()
@property
def run_with_forms(self):
return bool(self.xmlns)
class Command(BaseCommand):
"""
Returns list of (doc_id, doc_type, doc_subtype, ucr_insert_on, modified_on)
tuples for all househould cases that are not found static-household_cases UCR.
Can be used in conjunction with republish_doc_changes
1. Generate tuples not updated in ES with extra debug columns
$ ./manage.py stale_data_in_househould_ucr <DOMAIN> --start 2019-09-19 --end 2019-09-28 > stale_ids.txt
2. Republish case changes
$ ./manage.py republish_doc_changes <DOMAIN> stale_ids.txt
"""
help = inspect.cleandoc(__doc__).split('\n')[0]
def add_arguments(self, parser):
parser.add_argument('domain')
parser.add_argument('table_id')
parser.add_argument(
'--case-type',
action='store',
help='Mutually exclusive with XMLNS',
)
parser.add_argument(
'--xmlns',
action='store',
help='Mutually exclusive with case_type',
)
parser.add_argument(
'--start',
action='store',
help='Only include data modified after this date',
)
parser.add_argument(
'--end',
action='store',
help='Only include data modified before this date',
)
def handle(self, domain, table_id, **options):
if options['case_type'] and options['xmlns']:
raise CommandError('You may only specify one of case_type or XMLNS')
start = dateutil.parser.parse(options['start']) if options['start'] else datetime(2010, 1, 1)
end = dateutil.parser.parse(options['end']) if options['end'] else datetime.utcnow()
run_config = RunConfig(domain, table_id, start, end, options['case_type'], options['xmlns'])
if not should_use_sql_backend(run_config.domain):
raise CommandError('This command only supports SQL domains.')
for doc_id, doc_type, ucr_date, primary_date in _get_stale_data(run_config):
print(f"{doc_id},{doc_type},{ucr_date},{primary_date}")
def _get_stale_data(run_config):
for db in get_db_aliases_for_partitioned_query():
print(f"Starting db {db}")
matching_records_for_db = _get_primary_data_for_db(db, run_config)
chunk_size = 1000
for chunk in chunked(matching_records_for_db, chunk_size):
doc_ids = [val[0] for val in chunk]
ucr_insertion_dates = _get_ucr_insertion_dates(run_config.domain, run_config.table_id, doc_ids)
for doc_id, doc_type, sql_modified_on in chunk:
ucr_insert_date = ucr_insertion_dates.get(doc_id)
if not ucr_insert_date or (ucr_insert_date < sql_modified_on):
ucr_date_string = ucr_insert_date.isoformat() if ucr_insert_date else ''
yield (doc_id, doc_type, ucr_date_string, sql_modified_on.isoformat())
def _get_ucr_insertion_dates(domain, table_id, doc_ids):
table_name = get_table_name(domain, table_id)
with connections[get_icds_ucr_citus_db_alias()].cursor() as cursor:
query = f'''
SELECT
doc_id,
inserted_at
FROM "{table_name}"
WHERE doc_id = ANY(%(doc_ids)s);
'''
cursor.execute(query, {'doc_ids': doc_ids})
return dict(cursor.fetchall())
def _get_primary_data_for_db(db, run_config):
if run_config.run_with_forms:
matching_xforms = XFormInstanceSQL.objects.using(db).filter(
domain=run_config.domain,
received_on__gte=run_config.start_date,
received_on__lte=run_config.end_date,
state=XFormInstanceSQL.NORMAL,
)
if run_config.xmlns:
matching_xforms = matching_xforms.filter(xmlns=run_config.xmlns)
return matching_xforms.values_list('form_id', 'xmlns', 'received_on')
else:
return get_sql_case_data_for_db(db, run_config)
|
Python
| 0.000003 |
@@ -37,16 +37,27 @@
datetime
+, timedelta
%0A%0Afrom d
@@ -3570,24 +3570,25 @@
if
+(
not ucr_inse
@@ -3598,13 +3598,123 @@
date
- or (
+%0A # Handle small time drift between databases%0A or (sql_modified_on -
ucr_
@@ -3728,26 +3728,32 @@
date
+)
%3C
-sql_modified_on
+timedelta(seconds=1)
):%0A
|
b73f9e058b09e86e4f78849b6325b679c05d052f
|
Updated the hypothesis to be more accurate
|
solidity/hypothesis/test.py
|
solidity/hypothesis/test.py
|
import json
from hypothesis import given, assume, example
import hypothesis.strategies as st
import math
import unittest
from web3 import Web3, TestRPCProvider, RPCProvider
ACCURACY = 0.000001
def calculatePurchaseReturn(S,R,F,E):
if F== 100:
return S*E/R
return int(S * ( math.pow(1.0 + float(E)/float(R), float(F)/100.0) - 1.0 ))
def calculateSaleReturn(S,R,F,T):
"""
E = R(1 - ((1 - T / S) ^ (1 / F))
"""
if (T > S):
return 0
if F == 100:
return int(R- R*T/S)
return int(R * ( 1.0 - math.pow(float(S-T)/float(S) , (100.0/F))))
def fixedLogn(x, n):
return int(math.log(x >> 32, n)) << 32
def rationalLn(numerator, denominator):
return fixedLogn(numerator << 32, math.e) - fixedLogn(denominator << 32, math.e)
class TestFormula(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.web3 = Web3(RPCProvider())
abi = json.loads(open('../contracts/build/BancorFormula.abi').read())
bin = open('../contracts/build/BancorFormula.bin').read()
formula = cls.web3.eth.contract(abi=abi, bytecode=bin)
tx = formula.deploy()
cls.formula = formula(cls.web3.eth.getTransactionReceipt(tx)['contractAddress'])
@given(st.integers(min_value=0x100000000, max_value=1<<256-1))
@example(0x100000000)
def testFixedLog2(self, x):
expectedReturn = fixedLogn(x, 2)
actualReturn = self.formula.call().fixedLog2(x)
if expectedReturn == 0:
self.assertEqual(expectedReturn, actualReturn)
return
error = abs(expectedReturn - actualReturn) / expectedReturn
self.assertLessEqual(error, 1e-9, "expectedReturn: %d, actualReturn: %d, error: %fppm" % (expectedReturn, actualReturn, error * 1000000))
@given(st.integers(min_value=0x100000000, max_value=1<<256-1))
@example(0x100000000)
def testFixedLoge(self, x):
expectedReturn = fixedLogn(x, math.e)
actualReturn = self.formula.call().fixedLoge(x)
if expectedReturn == 0:
self.assertEqual(expectedReturn, actualReturn)
return
error = abs(expectedReturn - actualReturn) / expectedReturn
self.assertLessEqual(error, 1e-9, "expectedReturn: %d, actualReturn: %d, error: %fppm" % (expectedReturn, actualReturn, error * 1000000))
@given(st.integers(min_value=2, max_value=1<<224-1), st.integers(min_value=2, max_value=1<<224-1))
@example(2, 1)
def testLn(self, numerator, denominator):
assume(denominator <= numerator)
expectedReturn = rationalLn(numerator, denominator)
actualReturn = self.formula.call().ln(numerator, denominator)
if expectedReturn == 0:
self.assertEqual(expectedReturn, actualReturn)
return
error = abs(expectedReturn - actualReturn) / expectedReturn
self.assertLessEqual(error, 1e-9, "expectedReturn: %d, actualReturn: %d, error: %fppm" % (expectedReturn, actualReturn, error * 1000000))
@given(st.integers(min_value=100), st.integers(min_value=100), st.integers(min_value=1, max_value=100), st.integers(min_value=0))
@example(1102573407846, 1102573407846, 30, 86426)
def testPurchaseReturn(self, supply, reserveBalance, reserveRatio, depositAmount):
# Assume the supply is no more than 8 OOM greater than the reserve balance
assume(supply <= reserveBalance * 100000000)
# Assume the deposit amount is no more than 8 OOM greater than the reserve balance
assume(depositAmount <= reserveBalance * 100000000)
actualReturn = self.formula.call().calculatePurchaseReturn(supply, reserveBalance, reserveRatio, depositAmount)
expectedReturn = calculatePurchaseReturn(supply, reserveBalance, reserveRatio, depositAmount)
self.assertLessEqual(actualReturn, expectedReturn)
if expectedReturn > actualReturn:
error = (expectedReturn - actualReturn) / expectedReturn
self.assertLessEqual(error, ACCURACY, "Expected %d but got %d, difference of %f ppm" % (expectedReturn, actualReturn, error * 1000000.0))
@given(st.integers(min_value=100), st.integers(min_value=100), st.integers(min_value=1, max_value=100), st.integers(min_value=0))
def testSaleReturn(self, supply, reserveBalance, reserveRatio, sellAmount):
assume(sellAmount < supply)
actualReturn = self.formula.call().calculateSaleReturn(supply, reserveBalance, reserveRatio, sellAmount)
expectedReturn = calculateSaleReturn(supply, reserveBalance, reserveRatio, sellAmount)
self.assertLessEqual(actualReturn, expectedReturn)
if expectedReturn > actualReturn:
error = (expectedReturn - actualReturn) / expectedReturn
self.assertLessEqual(error, ACCURACY, "Expected %d but got %d, difference of %f ppm" % (expectedReturn, actualReturn, error * 1000000.0))
if __name__ == '__main__':
unittest.main()
|
Python
| 0.999996 |
@@ -587,25 +587,24 @@
00.0/F))))%0A%0A
-%0A
def fixedLog
@@ -610,14 +610,33 @@
gn(x
+
, n):%0A
+ one = 1 %3C%3C 32%0A
@@ -646,16 +646,17 @@
urn int(
+
math.log
@@ -660,27 +660,122 @@
log(
-x %3E%3E 32, n)) %3C%3C 32%0A
+ float(x) / one, n) * one )%0A%0Adef fixedLogE(x):%0A one = 1 %3C%3C 32%0A return int( math.log( float(x) / one) * one )
%0A%0Ade
@@ -831,17 +831,17 @@
fixedLog
-n
+E
(numerat
@@ -848,24 +848,16 @@
or %3C%3C 32
-, math.e
) - fixe
@@ -860,17 +860,17 @@
fixedLog
-n
+E
(denomin
@@ -879,26 +879,17 @@
or %3C%3C 32
-, math.e)%0A
+)
%0A%0Aclass
|
7128bd231cec6533be893e53c2c874ae46001231
|
correct swagger annotation problem for SupportLevelPage
|
pnc_cli/swagger_client/models/support_level_page.py
|
pnc_cli/swagger_client/models/support_level_page.py
|
# coding: utf-8
"""
Copyright 2015 SmartBear Software
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ref: https://github.com/swagger-api/swagger-codegen
"""
from datetime import datetime
from pprint import pformat
from six import iteritems
class SupportLevelPage(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self):
"""
SupportLevelPage - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'page_index': 'int',
'page_size': 'int',
'total_pages': 'int',
'content': 'list[str]'
}
self.attribute_map = {
'page_index': 'pageIndex',
'page_size': 'pageSize',
'total_pages': 'totalPages',
'content': 'content'
}
self._page_index = None
self._page_size = None
self._total_pages = None
self._content = None
@property
def page_index(self):
"""
Gets the page_index of this SupportLevelPage.
:return: The page_index of this SupportLevelPage.
:rtype: int
"""
return self._page_index
@page_index.setter
def page_index(self, page_index):
"""
Sets the page_index of this SupportLevelPage.
:param page_index: The page_index of this SupportLevelPage.
:type: int
"""
self._page_index = page_index
@property
def page_size(self):
"""
Gets the page_size of this SupportLevelPage.
:return: The page_size of this SupportLevelPage.
:rtype: int
"""
return self._page_size
@page_size.setter
def page_size(self, page_size):
"""
Sets the page_size of this SupportLevelPage.
:param page_size: The page_size of this SupportLevelPage.
:type: int
"""
self._page_size = page_size
@property
def total_pages(self):
"""
Gets the total_pages of this SupportLevelPage.
:return: The total_pages of this SupportLevelPage.
:rtype: int
"""
return self._total_pages
@total_pages.setter
def total_pages(self, total_pages):
"""
Sets the total_pages of this SupportLevelPage.
:param total_pages: The total_pages of this SupportLevelPage.
:type: int
"""
self._total_pages = total_pages
@property
def content(self):
"""
Gets the content of this SupportLevelPage.
:return: The content of this SupportLevelPage.
:rtype: list[str]
"""
return self._content
@content.setter
def content(self, content):
"""
Sets the content of this SupportLevelPage.
:param content: The content of this SupportLevelPage.
:type: list[str]
"""
allowed_values = ["UNRELEASED", "EARLYACCESS", "SUPPORTED", "EXTENDED_SUPPORT", "EOL"]
if content not in allowed_values:
raise ValueError(
"Invalid value for `content`, must be one of {0}"
.format(allowed_values)
)
self._content = content
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, datetime):
result[attr] = str(value.date())
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
|
Python
| 0.000002 |
@@ -3737,16 +3737,17 @@
alues =
+%5B
%5B%22UNRELE
@@ -3807,16 +3807,17 @@
, %22EOL%22%5D
+%5D
%0A
|
2cb57dc3e7f913b9dbaec6ef915f0ea72a6efe6e
|
Update move_item_tax_to_item_tax_template.py
|
erpnext/patches/v12_0/move_item_tax_to_item_tax_template.py
|
erpnext/patches/v12_0/move_item_tax_to_item_tax_template.py
|
import frappe
import json
from six import iteritems
def execute():
if "tax_type" not in frappe.db.get_table_columns("Item Tax"):
return
old_item_taxes = {}
item_tax_templates = {}
rename_template_to_untitled = []
for d in frappe.db.sql("""select parent as item_code, tax_type, tax_rate from `tabItem Tax`""", as_dict=1):
old_item_taxes.setdefault(d.item_code, [])
old_item_taxes[d.item_code].append(d)
frappe.reload_doc("accounts", "doctype", "item_tax_template_detail", force=1)
frappe.reload_doc("accounts", "doctype", "item_tax_template", force=1)
frappe.reload_doc("stock", "doctype", "item", force=1)
frappe.reload_doc("stock", "doctype", "item_tax", force=1)
frappe.reload_doc("selling", "doctype", "quotation_item", force=1)
frappe.reload_doc("selling", "doctype", "sales_order_item", force=1)
frappe.reload_doc("stock", "doctype", "delivery_note_item", force=1)
frappe.reload_doc("accounts", "doctype", "sales_invoice_item", force=1)
frappe.reload_doc("buying", "doctype", "supplier_quotation_item", force=1)
frappe.reload_doc("buying", "doctype", "purchase_order_item", force=1)
frappe.reload_doc("stock", "doctype", "purchase_receipt_item", force=1)
frappe.reload_doc("accounts", "doctype", "purchase_invoice_item", force=1)
frappe.reload_doc("accounts", "doctype", "accounts_settings", force=1)
# for each item that have item tax rates
for item_code in old_item_taxes.keys():
# make current item's tax map
item_tax_map = {}
for d in old_item_taxes[item_code]:
item_tax_map[d.tax_type] = d.tax_rate
item_tax_template_name = get_item_tax_template(item_tax_templates, rename_template_to_untitled,
item_tax_map, item_code)
# update the item tax table
item = frappe.get_doc("Item", item_code)
item.set("taxes", [])
item.append("taxes", {"item_tax_template": item_tax_template_name, "tax_category": ""})
item.save()
doctypes = [
'Quotation', 'Sales Order', 'Delivery Note', 'Sales Invoice',
'Supplier Quotation', 'Purchase Order', 'Purchase Receipt', 'Purchase Invoice'
]
for dt in doctypes:
for d in frappe.db.sql("""select name, parent, item_code, item_tax_rate from `tab{0} Item`
where ifnull(item_tax_rate, '') not in ('', '{{}}')""".format(dt), as_dict=1):
item_tax_map = json.loads(d.item_tax_rate)
item_tax_template = get_item_tax_template(item_tax_templates, rename_template_to_untitled,
item_tax_map, d.item_code, d.parent)
frappe.db.set_value(dt + " Item", d.name, "item_tax_template", item_tax_template)
idx = 1
for oldname in rename_template_to_untitled:
frappe.rename_doc("Item Tax Template", oldname, "Untitled {}".format(idx))
idx += 1
settings = frappe.get_single("Accounts Settings")
settings.add_taxes_from_item_tax_template = 0
settings.determine_address_tax_category_from = "Billing Address"
settings.save()
def get_item_tax_template(item_tax_templates, rename_template_to_untitled, item_tax_map, item_code, parent=None):
# search for previously created item tax template by comparing tax maps
for template, item_tax_template_map in iteritems(item_tax_templates):
if item_tax_map == item_tax_template_map:
if not parent:
rename_template_to_untitled.append(template)
return template
# if no item tax template found, create one
item_tax_template = frappe.new_doc("Item Tax Template")
item_tax_template.title = "{}--{}".format(parent, item_code) if parent else "Item-{}".format(item_code)
for tax_type, tax_rate in iteritems(item_tax_map):
if not frappe.db.exists("Account", tax_type):
parts = tax_type.strip().split(" - ")
account_name = " - ".join(parts[:-1])
company = frappe.db.get_value("Company", filters={"abbr": parts[-1]})
parent_account = frappe.db.get_value("Account",
filters={"account_type": "Tax", "root_type": "Liability"}, "parent_account")
frappe.get_doc({
"doctype": "Account",
"account_name": account_name,
"company": company,
"account_type": "Tax",
"parent_account": parent_account
}).insert()
item_tax_template.append("taxes", {"tax_type": tax_type, "tax_rate": tax_rate})
item_tax_templates.setdefault(item_tax_template.title, {})
item_tax_templates[item_tax_template.title][tax_type] = tax_rate
item_tax_template.save()
return item_tax_template.name
|
Python
| 0.000003 |
@@ -3791,16 +3791,26 @@
lity%22%7D,
+fieldname=
%22parent_
|
93ca5c4832c2037bdf4b504328c304835d2776a4
|
Fix wrong byte literal in FIPS U2F commands test
|
test/on_yubikey/test_fips_u2f_commands.py
|
test/on_yubikey/test_fips_u2f_commands.py
|
import struct
import unittest
from fido2.hid import (CTAPHID)
from ykman.util import (TRANSPORT)
from ykman.driver_fido import (FIPS_U2F_CMD)
from .util import (DestructiveYubikeyTestCase, is_fips, open_device)
HID_CMD = 0x03
P1 = 0
P2 = 0
@unittest.skipIf(not is_fips(), 'FIPS YubiKey required.')
class TestFipsU2fCommands(DestructiveYubikeyTestCase):
def test_echo_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG,
struct.pack(
'>HBBBH6s',
FIPS_U2F_CMD.ECHO, P1, P2, 0, 6, b'012345'
))
self.assertEqual(res, b'012345\x90\x00')
def test_pin_commands(self):
# Assumes PIN is 012345 or not set at beginning of test
# Sets PIN to 012345
dev = open_device(transports=TRANSPORT.FIDO)
verify_res1 = dev.driver._dev.call(
CTAPHID.MSG,
struct.pack(
'>HBBBH6s',
FIPS_U2F_CMD.VERIFY_PIN, P1, P2, 0, 6, b'012345'
))
if verify_res1 == b'\x90\x90':
res = dev.driver._dev.call(
CTAPHID.MSG,
struct.pack(
'>HBBBHB6s6s',
FIPS_U2F_CMD.SET_PIN, P1, P2, 0, 13, 6, b'012345', b'012345'
))
else:
res = dev.driver._dev.call(
CTAPHID.MSG,
struct.pack(
'>HBBBHB6s',
FIPS_U2F_CMD.SET_PIN, P1, P2, 0, 7, 6, b'012345'
))
verify_res2 = dev.driver._dev.call(
CTAPHID.MSG,
struct.pack(
'>HBBBH6s',
FIPS_U2F_CMD.VERIFY_PIN, P1, P2, 0, 6, b'543210'
))
verify_res3 = dev.driver._dev.call(
CTAPHID.MSG,
struct.pack(
'>HBBBH6s',
FIPS_U2F_CMD.VERIFY_PIN, P1, P2, 0, 6, b'012345'
))
self.assertIn(verify_res1, [b'\x90\x00', b'\x69\x86']) # OK / not set
self.assertEqual(res, b'\x90\x00') # Success
self.assertEqual(verify_res2, b'\x63\xc0') # Incorrect PIN
self.assertEqual(verify_res3, b'\x90\x00') # Success
def test_reset_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG,
struct.pack(
'>HBB',
FIPS_U2F_CMD.RESET, P1, P2
))
# 0x6985: Touch required
# 0x6986: Power cycle required
# 0x9000: Success
self.assertIn(res, [b'\x69\x85', b'\x69\x86', b'\x90\x00'])
def test_verify_fips_mode_command(self):
dev = open_device(transports=TRANSPORT.FIDO)
res = dev.driver._dev.call(
CTAPHID.MSG,
struct.pack(
'>HBB',
FIPS_U2F_CMD.VERIFY_FIPS_MODE, P1, P2
))
# 0x6a81: Function not supported (PIN not set - not FIPS mode)
# 0x9000: Success (PIN set - FIPS mode)
self.assertIn(res, [b'\x6a\x81', b'\x90\x00'])
|
Python
| 0.000627 |
@@ -1088,17 +1088,17 @@
b'%5Cx90%5Cx
-9
+0
0':%0A
|
f71205edb15cb2fc760912be2adea7c608fd84ee
|
Use parsed.netloc for the HTTP Host header
|
thrift/lib/py/transport/THttpClient.py
|
thrift/lib/py/transport/THttpClient.py
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from .TTransport import *
import os
import socket
import sys
import warnings
if sys.version_info[0] >= 3:
from io import BytesIO as StringIO
from urllib import parse
from http import client
urlparse = parse
urllib = parse
httplib = client
else:
from cStringIO import StringIO
import urlparse
import httplib
import urllib
class THttpClient(TTransportBase):
"""Http implementation of TTransport base."""
def __init__(self, uri_or_host, port=None, path=None):
"""THttpClient supports two different types constructor parameters.
THttpClient(host, port, path) - deprecated
THttpClient(uri)
Only the second supports https."""
if port is not None:
warnings.warn(
"Please use the THttpClient('http://host:port/path') syntax",
DeprecationWarning,
stacklevel=2)
self.host = uri_or_host
self.port = port
assert path
self.path = path
self.scheme = 'http'
else:
parsed = urlparse.urlparse(uri_or_host)
self.scheme = parsed.scheme
assert self.scheme in ('http', 'https')
if self.scheme == 'http':
self.port = parsed.port or httplib.HTTP_PORT
elif self.scheme == 'https':
self.port = parsed.port or httplib.HTTPS_PORT
self.host = parsed.hostname
self.path = parsed.path
if parsed.query:
self.path += '?%s' % parsed.query
self.__wbuf = StringIO()
self.__http = None
self.__timeout = None
self.__custom_headers = None
def open(self):
if self.scheme == 'http':
self.__http = httplib.HTTPConnection(self.host, self.port,
timeout=self.__timeout)
else:
self.__http = httplib.HTTPSConnection(self.host, self.port,
timeout=self.__timeout)
def close(self):
self.__http.close()
self.__http = None
def isOpen(self):
return self.__http is not None
def setTimeout(self, ms):
if ms is None:
self.__timeout = None
else:
self.__timeout = ms / 1000.0
def setCustomHeaders(self, headers):
self.__custom_headers = headers
def setCustomHeader(self, name, value):
if self.__custom_headers is None:
self.__custom_headers = {}
self.__custom_headers[name] = value
def read(self, sz):
return self.response.read(sz)
def write(self, buf):
self.__wbuf.write(buf)
def flush(self):
if self.isOpen():
self.close()
self.open()
# Pull data out of buffer
data = self.__wbuf.getvalue()
self.__wbuf = StringIO()
# HTTP request
self.__http.putrequest('POST', self.path, skip_host=True)
if not self.__custom_headers or 'Host' not in self.__custom_headers:
self.__http.putheader('Host', self.host)
self.__http.putheader('Content-Type', 'application/x-thrift')
self.__http.putheader('Content-Length', str(len(data)))
if not self.__custom_headers or 'User-Agent' not in \
self.__custom_headers:
user_agent = 'Python/THttpClient'
script = os.path.basename(sys.argv[0])
if script:
user_agent = '%s (%s)' % (user_agent, urllib.quote(script))
self.__http.putheader('User-Agent', user_agent)
if self.__custom_headers:
if sys.version_info[0] > 3:
custom_headers_iter = self.__custom_headers.items()
else:
custom_headers_iter = self.__custom_headers.iteritems()
for key, val in custom_headers_iter:
self.__http.putheader(key, val)
self.__http.endheaders()
# Write payload
self.__http.send(data)
# Get reply to flush the request
self.response = self.__http.getresponse()
self.code = self.response.status
self.headers = self.response.getheaders()
|
Python
| 0.000021 |
@@ -1873,16 +1873,55 @@
or_host%0A
+ self.http_host = self.host%0A
@@ -2427,16 +2427,59 @@
ostname%0A
+ self.http_host = parsed.netloc%0A
@@ -4136,24 +4136,29 @@
Host', self.
+http_
host)%0A%0A
|
2cde35bb6f948f861026921daf7fe24b353af273
|
Add bulleted and numbered list to CKEditor
|
kerrokantasi/settings/__init__.py
|
kerrokantasi/settings/__init__.py
|
from .util import get_settings, load_local_settings, load_secret_key
from . import base
settings = get_settings(base)
load_local_settings(settings, "local_settings")
load_secret_key(settings)
if not settings["DEBUG"] and settings["JWT_AUTH"]["JWT_SECRET_KEY"] == "kerrokantasi":
raise ValueError("Refusing to run out of DEBUG mode with insecure JWT secret key.")
settings['CKEDITOR_CONFIGS'] = {
'default': {
'stylesSet': [
{
"name": 'Lead',
"element": 'p',
"attributes": {'class': 'lead'},
},
],
'contentsCss': ['%sckeditor/ckeditor/contents.css' % settings['STATIC_URL'], '.lead { font-weight: bold;}'],
'extraAllowedContent': 'video [*]{*}(*);source [*]{*}(*);',
'extraPlugins': 'video',
'toolbar': [
['Styles', 'Format'],
['Bold', 'Italic', 'Underline', 'StrikeThrough', 'Undo', 'Redo'],
['Link', 'Unlink', 'Anchor'],
['Image', 'Video', 'Flash', 'Table', 'HorizontalRule'],
['TextColor', 'BGColor'],
['Smiley', 'SpecialChar'],
['Source']
]
},
}
globals().update(settings) # Export the settings for Django to use.
|
Python
| 0 |
@@ -978,24 +978,70 @@
'Anchor'%5D,%0A
+ %5B'BulletedList', 'NumberedList'%5D,%0A
|
d09cc197d11efa2181ce68ef4212cb9df5ee285c
|
add daemon argument to launcher
|
selfdrive/athena/manage_athenad.py
|
selfdrive/athena/manage_athenad.py
|
#!/usr/bin/env python3
import time
from multiprocessing import Process
from common.params import Params
from selfdrive.manager.process import launcher
from selfdrive.swaglog import cloudlog
from selfdrive.version import get_version, is_dirty
ATHENA_MGR_PID_PARAM = "AthenadPid"
def main():
params = Params()
dongle_id = params.get("DongleId").decode('utf-8')
cloudlog.bind_global(dongle_id=dongle_id, version=get_version(), dirty=is_dirty())
try:
while 1:
cloudlog.info("starting athena daemon")
proc = Process(name='athenad', target=launcher, args=('selfdrive.athena.athenad',))
proc.start()
proc.join()
cloudlog.event("athenad exited", exitcode=proc.exitcode)
time.sleep(5)
except Exception:
cloudlog.exception("manage_athenad.exception")
finally:
params.delete(ATHENA_MGR_PID_PARAM)
if __name__ == '__main__':
main()
|
Python
| 0.000001 |
@@ -600,16 +600,26 @@
thenad',
+ 'athenad'
))%0A
|
38eb6221ca41446c0c4fb1510354bdc4f00ba5f1
|
Remove children via uid rather than name
|
serfnode/build/handler/launcher.py
|
serfnode/build/handler/launcher.py
|
#!/usr/bin/env python
import functools
import os
import signal
import sys
import docker_utils
def handler(name, signum, frame):
print('Should kill', name)
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
sys.exit(0)
def launch(name, args):
try:
os.unlink('/child_{}'.format(name))
except OSError:
pass
try:
docker_utils.client.remove_container(name, force=True)
except Exception:
pass
args.insert(0, '--cidfile=/child_{}'.format(name))
docker_utils.docker('run', *args)
if __name__ == '__main__':
name = sys.argv[1]
args = sys.argv[2:]
signal.signal(signal.SIGINT, functools.partial(handler, name))
launch(name, args)
|
Python
| 0 |
@@ -157,32 +157,92 @@
name)%0A try:%0A
+ cid = open('/child_%7B%7D'.format(name)).read().strip()%0A
docker_u
@@ -262,36 +262,35 @@
emove_container(
-name
+cid
, force=True)%0A
@@ -365,32 +365,140 @@
args):%0A try:%0A
+ cid = open('/child_%7B%7D'.format(name)).read().strip()%0A except IOError:%0A cid = name%0A try:%0A
os.unlin
@@ -612,20 +612,19 @@
ntainer(
-name
+cid
, force=
|
933201b14764b8a108986313b8bece8ae4ad7d51
|
handle in_progress event
|
km_hipchat_screamer/statuspage.py
|
km_hipchat_screamer/statuspage.py
|
# -*- coding: utf-8 -*-
"""
km-hipchat-screamer.statuspage
~~~~~~~~~~~~~~~~~~~~~~~
Module providing for status change alert routes the KISSmetrics HipChat Webhook service
"""
from flask import Blueprint, jsonify, request
from utils import env_check
import os
import json
import hipchat
import requests
hipchat_notification_color = { 'operational': 'green',
'degraded_performance': 'yellow',
'partial_outage': 'yellow',
'major_outage': 'red',
'scheduled': 'gray',
'investigating': 'red',
'identified': 'yellow',
'monitoring': 'gray',
'resolved': 'green' }
def get_component_name(page_id, component_id):
url = 'http://%s.statuspage.io/index.json' % (page_id)
response = requests.get(url)
data = response.json()
for component in data['components']:
if component['id'] == component_id:
return component['name']
STATUSPAGE_HIPCHAT_TOKEN = os.environ.get('STATUSPAGE_HIPCHAT_TOKEN')
STATUSPAGE_NOTIFY_ROOMS = os.environ.get('STATUSPAGE_NOTIFY_ROOMS')
statuspage = Blueprint('statuspage', __name__)
#-------
# Routes
#-------
@statuspage.route('/statuspage/alert', methods=['POST'])
@env_check('STATUSPAGE_HIPCHAT_TOKEN')
@env_check('STATUSPAGE_NOTIFY_ROOMS')
def statuspage_route():
"""Send alerts for statuspage.io webhooks to rooms listed in STATUSPAGE_NOTIFY_ROOMS"""
notification = json.loads(request.data)
if 'component_update' in notification:
page_id = notification['page']['id']
component_update = notification['component_update']
component_id = component_update['component_id']
component_name = get_component_name(page_id, component_id)
old_status = component_update['old_status']
new_status = component_update['new_status']
color = hipchat_notification_color[new_status]
message = "[%s] status changed from %s to %s" % (component_name, old_status, new_status)
elif 'incident' in notification:
incident_update = notification['incident']
incident_name = incident_update['name']
incident_status = incident_update['status']
incident_message = incident_update['incident_updates'][0]['body']
color = hipchat_notification_color[incident_status]
message = "[%s] %s: %s" % (incident_name, incident_status, incident_message)
hipchat_api = hipchat.HipChat(token=STATUSPAGE_HIPCHAT_TOKEN)
for channel in STATUSPAGE_NOTIFY_ROOMS.split(','):
hipchat_api.message_room(channel, 'KM Status', message, notify=True, color=color)
body = { "action": "message sent" }
return jsonify(body)
|
Python
| 0.000012 |
@@ -577,32 +577,86 @@
duled': 'gray',%0A
+ 'in_progress': 'gray',%0A
|
a68cbe3e99b4361a35553e7728cc0087b040e254
|
remove order from markers
|
server/flask/flaskserver.py
|
server/flask/flaskserver.py
|
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 25 14:49:32 2016
@author: Dave Ho
"""
#HOST&&PORT: http://127.0.0.1:5000/
#COMP1: 192.168.1.54
#COMP2: 192.168.1.77
import werkzeug.datastructures
import sqlite3
import flask
import json as mjson
from flask import Flask
from flask import request
import ast
app = Flask(__name__)
DATABASE = 'database.db'
#Tester
@app.route("/", methods=["GET", "POST"])
def hello():
#if request.method == "GET":
print "someone said get"
return "JJ!"
'''
if request.method == "POST":
content = request.get_json(silent=True)
print content
print "someone posted something"
return ""
'''
@app.route("/json", methods=['GET', 'POST'])
def json():
print "here"
parameters = str(request.form)[22:]
parameters = parameters[:-4]
#print parameters
dic = ast.literal_eval(parameters)
print dic
conn = sqlite3.connect(DATABASE)
c = conn.cursor()
#users cannot be duplicate
print "here2"
try:
c.execute("INSERT INTO users VALUES('"+dic['phone']+"', '"+dic['phone']+"')")
except sqlite3.IntegrityError:
pass
print "here3"
c.execute('SELECT COUNT(pid) FROM path')
count = c.fetchone()[0]
print count
c.execute("INSERT INTO path VALUES('"+str(count)+"', '"+dic['phone']+"', '"+dic['title']+"', '"+dic['zipCodeList'][0]+"')")
print "INSERT INTO path VALUES('"+str(count)+"', '"+dic['phone']+"', '"+dic['title']+"', '"+dic['zipCodeList'][0]+"')"
for i in range(0, len(dic['lat'])):
c.execute("INSERT INTO points VALUES('"+str(count)+"', '"+str(dic['lat'][i])+"', '"+str(dic['lng'][i])+"', '"+str(i)+"')")
print "here3.7"
for i in range(0, len(dic['markerMap'].keys())):
key = sorted(dic['markerMap'].keys())[i]
print key
print str(dic['markerMap'][key]['lat'])
print str(dic['markerMap'][key]['lng'])
print dic['markerMap'][key]['description']
#print dic['markerMap'][key]['image']
print "start"
c.execute("INSERT INTO markers VALUES(?, ?, ?, ?, ?)", (str(count), str(dic['markerMap'][key]['lat']), str(dic['markerMap'][key]['lng']), dic['markerMap'][key]['description'], dic['markerMap'][key]['image']))
print "hello"
#print "INSERT INTO markers VALUES('"+str(count)+"', '"+str(dic['markerMap'][key]['lat'])+"', '"+str(dic['markerMap'][key]['lng'])+"', '"+dic['markerMap'][key]['description']+"', '"+dic['markerMap'][key]['image']+"')"
#c.execute("INSERT INTO markers VALUES('"+str(count)+"', '"+str(dic['markerMap'][key]['lat'])+"', '"+str(dic['markerMap'][key]['lng'])+"', '"+dic['markerMap'][key]['description']+"', '"+dic['markerMap'][key]['image']+"')")
print "here4"
conn.commit()
conn.close()
print "done"
return request.json
@app.route("/getTitle", methods=["GET", "POST"])
def getTitle():
"""
Example JSON Call: http://192.168.1.65/getTitle?zip=95135
Android User Given: Zip Code of Current User Location
What Server Needs to return: Given zip code of current user location, give back set of paths within that location using json
use paths table
"""
if request.method == "GET":
conn = sqlite3.connect(DATABASE)
c = conn.cursor()
zipcode = str(request.args.get('zipCode'))
# need to return titles back to android user from database
resultString = ""
for row in c.execute("SELECT * FROM path WHERE zip="+zipcode):
resultString += str(row[1])+"_"+str(row[2])+"\n"
return resultString
@app.route("/getDetail", methods=["GET", "POST"])
def getDetail():
"""
Example JSON Call: http://192.168.1.65/getDetail?phonenumber=14083346432&id=hi
Android User Given: Phone + title of path selected
What Server Needs to return: given phone and title of path return the dictionary json of that path
user users table
"""
if request.method == "GET":
print "start"
conn = sqlite3.connect(DATABASE)
c = conn.cursor()
# need to return full details back to android user from database
phone = "\"" + str(request.args.get('phoneNumber')) + "\""
titleid = "\"" + str(request.args.get('id')) + "\""
c.execute('SELECT pid, zip FROM path WHERE name='+phone+' AND title='+titleid)
tmp = c.fetchone()
path_id = tmp[0]
zipCodeList = [tmp[1]]
points_list = []
print "start2"
for row in c.execute('SELECT lat, lng FROM points WHERE pid='+str(path_id)+" ORDER BY sequence"):
points_list.append(row)
marker_list = []
print 'SELECT lat, lng, description, image FROM markers WHERE pid='+str(path_id)+" ORDER BY sequence"
for row in c.execute('SELECT lat, lng, description, image FROM markers WHERE pid='+str(path_id)+" ORDER BY sequence"):
marker_list.append(row)
print marker_list
lat = []
lng = []
print marker_list
print points_list
for coordinate in points_list:
lat.append(coordinate[0])
lng.append(coordinate[1])
print "hi"
jsondic = {}
jsondic["lat"] = lat
jsondic["lng"] = lng
count = 1;
markerMap = {}
for marker in marker_list:
snap = {}
snap["lat"] = marker[0]
snap["lng"] = marker[1]
snap["description"] = marker[2]
snap["image"] = marker[3]
markerMap["Snap"+str(count)] = snap
count += 1
jsondic["markerMap"] = markerMap
jsondic["zipCodeList"] = zipCodeList
print mjson.dumps(jsondic)
return mjson.dumps(jsondic)
if __name__ == "__main__":
app.run(host='0.0.0.0', port=80)
#app.run(host='0.0.0.0',port="80")
|
Python
| 0.000009 |
@@ -4727,37 +4727,16 @@
path_id)
-+%22 ORDER BY sequence%22
%0A
@@ -4831,37 +4831,16 @@
path_id)
-+%22 ORDER BY sequence%22
):%0A
|
875d558d69fcadcea5f89b4ef4021484b34e435b
|
fix #190
|
django-openstack/django_openstack/syspanel/views/services.py
|
django-openstack/django_openstack/syspanel/views/services.py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
from django import template
from django import http
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.shortcuts import render_to_response
from django.shortcuts import redirect
from django.utils.translation import ugettext as _
import datetime
import json
import logging
import subprocess
import urlparse
from django.contrib import messages
from django_openstack import api
from django_openstack import forms
from django_openstack.dash.views import instances as dash_instances
from openstackx.api import exceptions as api_exceptions
class ToggleService(forms.SelfHandlingForm):
service = forms.CharField(required=False)
name = forms.CharField(required=False)
def handle(self, request, data):
try:
service = api.service_get(request, data['service'])
api.service_update(request,
data['service'],
not service.disabled)
if service.disabled:
messages.info(request, "Service '%s' has been enabled"
% data['name'])
else:
messages.info(request, "Service '%s' has been disabled"
% data['name'])
except api_exceptions.ApiException, e:
messages.error(request, "Unable to update service '%s': %s"
% data['name'], e.message)
return redirect(request.build_absolute_uri())
@login_required
def index(request):
for f in (ToggleService,):
_, handled = f.maybe_handle(request)
if handled:
return handled
services = []
try:
services = api.service_list(request)
except api_exceptions.ApiException, e:
messages.error(request, 'Unable to get service info: %s' % e.message)
other_services = []
for k, v in request.session['serviceCatalog'].iteritems():
v = v[0]
try:
subprocess.check_call(['curl', '-m', '1', v['internalURL']])
up = True
except:
up = False
hostname = urlparse.urlparse(v['internalURL']).hostname
row = {'type': k, 'internalURL': v['internalURL'], 'host': hostname,
'region': v['region'], 'up': up }
other_services.append(row)
return render_to_response('syspanel_services.html', {
'services': services,
'service_toggle_enabled_form': ToggleService,
'other_services': other_services,
}, context_instance = template.RequestContext(request))
|
Python
| 0.000001 |
@@ -2396,16 +2396,287 @@
d(row)%0A%0A
+ services = sorted(services, key=lambda svc: (svc.type +%0A svc.host))%0A other_services = sorted(other_services, key=lambda svc: (svc%5B'type'%5D +%0A svc%5B'host'%5D))%0A%0A
retu
|
94937ee5f2a797e864684cf2bd624775e60e0b1a
|
update Bdd model with visible parameter
|
source/sql/models/georef.py
|
source/sql/models/georef.py
|
# -*- coding: utf-8 -*-
## File autogenerated by SQLAutoCode
## see http://code.google.com/p/sqlautocode/
from sqlalchemy import *
from sqlalchemy.dialects.postgresql import *
from models import metadata
from geoalchemy2 import Geography
synonym = Table(
'synonym',
metadata,
*[
Column('id', BIGINT(), primary_key=True, nullable=False),
Column('key', TEXT(), primary_key=False, nullable=False),
Column('value', TEXT(), primary_key=False, nullable=False),
],
schema='georef'
)
poi = Table(
'poi',
metadata,
*[
Column('id', BIGINT(), primary_key=True, nullable=False),
Column('weight', INTEGER(), primary_key=False, nullable=False),
Column('coord', Geography(geometry_type='POINT', srid=4326, spatial_index=False), primary_key=False),
Column('name', TEXT(), primary_key=False, nullable=False),
Column('uri', TEXT(), primary_key=False, nullable=False),
Column('visible', BOOLEAN(), primary_key=False, nullable=False, default=text(u'true')),
Column('poi_type_id', BIGINT(), primary_key=False, nullable=False),
Column('address_name', TEXT(), primary_key=False),
Column('address_number', TEXT(), primary_key=False),
ForeignKeyConstraint(['poi_type_id'], [u'georef.poi_type.id'], name=u'poi_poi_type_id_fkey'),
],
schema='georef'
)
poi_properties = Table(
'poi_properties',
metadata,
*[
Column('poi_id', BIGINT(), primary_key=False, nullable=False),
Column('key', TEXT(), primary_key=False),
Column('value', TEXT(), primary_key=False),
ForeignKeyConstraint(['poi_id'], [u'georef.poi.id'], name=u'poi_properties_poi_id_fkey'),
],
schema='georef'
)
poi_type = Table(
'poi_type',
metadata,
*[
Column('id', BIGINT(), primary_key=True, nullable=False),
Column('uri', TEXT(), primary_key=False, nullable=False),
Column('name', TEXT(), primary_key=False, nullable=False),
],
schema='georef'
)
rel_admin_admin = Table(
'rel_admin_admin',
metadata,
*[
Column('master_admin_id', BIGINT(), primary_key=True, nullable=False),
Column('admin_id', BIGINT(), primary_key=True, nullable=False),
ForeignKeyConstraint(['admin_id'], [u'georef.admin.id'], name=u'rel_admin_admin_admin_id_fkey'),
ForeignKeyConstraint(
['master_admin_id'], [u'georef.admin.id'], name=u'rel_admin_admin_master_admin_id_fkey'
),
],
schema='georef'
)
rel_way_admin = Table(
'rel_way_admin',
metadata,
*[
Column('admin_id', BIGINT(), primary_key=True, nullable=False),
Column('way_id', BIGINT(), primary_key=True, nullable=False),
ForeignKeyConstraint(['way_id'], [u'georef.way.id'], name=u'rel_way_admin_way_id_fkey'),
ForeignKeyConstraint(['admin_id'], [u'georef.admin.id'], name=u'rel_way_admin_admin_id_fkey'),
],
schema='georef'
)
way = Table(
'way',
metadata,
*[
Column(
'id',
BIGINT(),
primary_key=True,
nullable=False,
default=text(u"nextval('georef.way_id_seq'::regclass)"),
),
Column('name', TEXT(), primary_key=False, nullable=False),
Column('uri', TEXT(), primary_key=False, nullable=False),
Column('type', TEXT(), primary_key=False),
],
schema='georef'
)
edge = Table(
'edge',
metadata,
*[
Column('source_node_id', BIGINT(), primary_key=False, nullable=False),
Column('target_node_id', BIGINT(), primary_key=False, nullable=False),
Column('way_id', BIGINT(), primary_key=False, nullable=False),
Column(
'the_geog',
Geography(geometry_type='LINESTRING', srid=4326, spatial_index=False),
primary_key=False,
nullable=False,
),
Column('pedestrian_allowed', BOOLEAN(), primary_key=False, nullable=False),
Column('cycles_allowed', BOOLEAN(), primary_key=False, nullable=False),
Column('cars_allowed', BOOLEAN(), primary_key=False, nullable=False),
ForeignKeyConstraint(['source_node_id'], [u'georef.node.id'], name=u'edge_source_node_id_fkey'),
ForeignKeyConstraint(['target_node_id'], [u'georef.node.id'], name=u'edge_target_node_id_fkey'),
],
schema='georef'
)
house_number = Table(
'house_number',
metadata,
*[
Column('way_id', BIGINT(), primary_key=False),
Column(
'coord',
Geography(geometry_type='POINT', srid=4326, spatial_index=False),
primary_key=False,
nullable=False,
),
Column('number', TEXT(), primary_key=False, nullable=False),
Column('left_side', BOOLEAN(), primary_key=False, nullable=False),
ForeignKeyConstraint(['way_id'], [u'georef.way.id'], name=u'house_number_way_id_fkey'),
],
schema='georef'
)
node = Table(
'node',
metadata,
*[
Column(
'id',
BIGINT(),
primary_key=True,
nullable=False,
default=text(u"nextval('georef.node_id_seq'::regclass)"),
),
Column('coord', Geography(geometry_type='POINT', srid=4326, spatial_index=False), primary_key=False),
],
schema='georef'
)
admin = Table(
'admin',
metadata,
*[
Column('id', BIGINT(), primary_key=True, nullable=False),
Column('name', TEXT(), primary_key=False, nullable=False),
Column('comment', TEXT(), primary_key=False),
Column('insee', TEXT(), primary_key=False),
Column('level', INTEGER(), primary_key=False, nullable=False),
Column('coord', Geography(geometry_type='POINT', srid=4326, spatial_index=False), primary_key=False),
Column(
'boundary',
Geography(geometry_type='MULTIPOLYGON', srid=4326, spatial_index=False),
primary_key=False,
),
Column('uri', TEXT(), primary_key=False, nullable=False),
],
schema='georef'
)
postal_codes = Table(
'postal_codes',
metadata,
*[
Column('admin_id', TEXT(), primary_key=False, nullable=False),
Column('postal_code', TEXT(), primary_key=False, nullable=False),
ForeignKeyConstraint(['admin_id'], [u'georef.admin.id'], name=u'postal_codes_admin_id_fkey'),
],
schema='georef'
)
|
Python
| 0 |
@@ -3372,32 +3372,128 @@
ary_key=False),%0A
+ Column('visible', BOOLEAN(), primary_key=False, nullable=False, default=text(u'true')),%0A
%5D,%0A schem
|
93361bad12c132846b10966559fe89bc1d1a1e0b
|
Update settings.py
|
Epitome/settings.py
|
Epitome/settings.py
|
"""
Django settings for Epitome project.
Generated by 'django-admin startproject' using Django 2.0.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'DefaultKeyMustBeChanged'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'Agora',
'Propylaea',
'Eisegesis',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'Epitome.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'Epitome.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'Atlas.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static")
]
|
Python
| 0.000001 |
@@ -3133,12 +3133,11 @@
Z =
-Fals
+Tru
e%0D%0A%0D
|
bd3d8738fc00b2d36aafe5749e88826845441541
|
fix handling of pages (closes #685)
|
weboob/backends/orange/browser.py
|
weboob/backends/orange/browser.py
|
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Nicolas Duhamel
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
#~ from .pages.compose import ClosePage, ComposePage, ConfirmPage, SentPage
#~ from .pages.login import LoginPage
from .pages import LoginPage, ComposePage, ConfirmPage
from weboob.tools.browser import BaseBrowser, BrowserIncorrectPassword
__all__ = ['OrangeBrowser']
class OrangeBrowser(BaseBrowser):
DOMAIN = 'orange.fr'
PAGES = {
'http://id.orange.fr/auth_user/bin/auth_user.cgi.*': LoginPage,
'http://id.orange.fr/auth_user/bin/auth0user.cgi.*': LoginPage,
'http://smsmms1.orange.fr/M/Sms/sms_write.php.*' : ComposePage,
'http://smsmms1.orange.fr/M/Sms/sms_write.php?command=send' : ConfirmPage,
}
def get_nb_remaining_free_sms(self):
self.location("http://smsmms1.orange.fr/M/Sms/sms_write.php")
return self.page.get_nb_remaining_free_sms()
def home(self):
self.location("http://smsmms1.orange.fr/M/Sms/sms_write.php")
def is_logged(self):
self.location("http://smsmms1.orange.fr/M/Sms/sms_write.php", no_login=True)
return not self.is_on_page(LoginPage)
def login(self):
if not self.is_on_page(LoginPage):
self.location('http://id.orange.fr/auth_user/bin/auth_user.cgi?url=http://www.orange.fr', no_login=True)
self.page.login(self.username, self.password)
if not self.is_logged():
raise BrowserIncorrectPassword()
def post_message(self, message, sender):
if not self.is_on_page(ComposePage):
self.home()
self.page.post_message(message, sender)
|
Python
| 0 |
@@ -1248,33 +1248,33 @@
smms1.orange.fr/
-M
+.
/Sms/sms_write.p
@@ -1322,33 +1322,33 @@
smms1.orange.fr/
-M
+.
/Sms/sms_write.p
|
f631099894a02cb79b5be372894ed1f589849a8d
|
test for datetime.datetime type from dframe_dateconv
|
test/pandaservtest.py
|
test/pandaservtest.py
|
import unittest, sys, os
from datetime import datetime
import pandas as pd
import src.pandaserv as pandaserv
import numpy as np
class Testpandaserv(unittest.TestCase):
def setUp(self):
self.dates = pd.date_range('20130101', periods=6)
self.df = pd.DataFrame(
np.random.randn(6,4), index=self.dates, columns=list('ABCD'))
self.df2 = pd.DataFrame({ 'A' : 1.,
'B' : pd.Timestamp('20130102'),
'C' : pd.Series(1,index=list(range(4)),dtype='float32'),
'D' : np.array([3] * 4,dtype='int32'),
'E' : pd.Categorical(["test","train","test","train"]),
'F' : 'foo' })
def test_dframe_dateconv(self):
print('Unfinished test, PASS.')
pandaserv.dframe_dateconv(self.df, 'D')
self.assertIsInstance(self.df['D'], datetime)
def test_dframe_currencystrip(self):
print('Unfinished test, PASS.')
def test_make_sheets(self):
print('Unfinished test, PASS.')
def test_clean_sheets(self):
print('Unfinished test, PASS.')
if __name__ == '__main__':
unittest.main()
|
Python
| 0 |
@@ -889,15 +889,55 @@
f.df
+2
, '
-D
+B
')%0A
+ for singledate in df%5B'B'%5D:%0A
@@ -967,19 +967,17 @@
ce(s
-elf.df%5B'D'%5D
+ingledate
, da
|
e8e109de54ebed6336f6ed3bcb2400ec5d4aaafb
|
add docs for number
|
schematec/converters.py
|
schematec/converters.py
|
'''
Convertaion rules
=================
Can be converted into:
integer
-------
#. Any int or long value
#. Any suitable string/unicode
#. Boolean value
string
------
#. Any suitable string/unicode
#. Any int or long value
boolean
-------
#. Boolean value
#. 0 or 1
#. '0' or '1'
#. u'0' or u'1'
array
-----
#. Any iterable value(collections.Iterable)
dictionary
----------
#. Any mapping value(collections.Mapping)
'''
from __future__ import absolute_import
import collections
import schematec.exc as exc
class Converter(object):
pass
class Integer(Converter):
def __call__(self, value):
if value is None:
raise exc.ConvertationError(value)
if isinstance(value, bool):
return int(value)
if isinstance(value, (int, long)):
return int(value)
if isinstance(value, basestring):
try:
return int(value)
except ValueError:
raise exc.ConvertationError(value)
raise exc.ConvertationError(value)
integer = Integer()
class Number(Converter):
def __call__(self, value):
if value is None:
raise exc.ConvertationError(value)
if isinstance(value, bool):
return float(value)
if isinstance(value, (float, int, long)):
return float(value)
if isinstance(value, basestring):
try:
return float(value)
except ValueError:
raise exc.ConvertationError(value)
raise exc.ConvertationError(value)
number = Number()
class String(Converter):
def __call__(self, value):
if value is None:
raise exc.ConvertationError(value)
if isinstance(value, unicode):
return value
if isinstance(value, bool):
raise exc.ConvertationError(value)
if isinstance(value, (int, long)):
return unicode(value)
if isinstance(value, str):
try:
return unicode(value)
except UnicodeDecodeError:
raise exc.ConvertationError(value)
raise exc.ConvertationError(value)
string = String()
class Boolean(Converter):
def __call__(self, value):
if value is None:
raise exc.ConvertationError(value)
if isinstance(value, bool):
return value
if isinstance(value, (int, long)) and value in (0, 1):
return bool(value)
if isinstance(value, basestring) and value in (u'0', u'1'):
return bool(int(value))
raise exc.ConvertationError(value)
boolean = Boolean()
class Array(Converter):
def __call__(self, value):
if value is None:
raise exc.ConvertationError(value)
if isinstance(value, collections.Iterable):
return list(value)
raise exc.ConvertationError(value)
array = Array()
class Dictionary(Converter):
def __call__(self, value):
if value is None:
raise exc.ConvertationError(value)
if isinstance(value, collections.Mapping):
return dict(value)
raise exc.ConvertationError(value)
dictionary = Dictionary()
|
Python
| 0.000001 |
@@ -149,16 +149,115 @@
value%0A%0A
+number%0A-------%0A%0A#. Any float or int or long value%0A#. Any suitable string/unicode%0A#. Boolean value%0A%0A
string%0A-
|
91238b6b0f0b14a6d0f7707aa0b388cedfd5894c
|
set default false allow_cnpj_multi_ie
|
l10n_br_base/models/res_config.py
|
l10n_br_base/models/res_config.py
|
# -*- coding: utf-8 -*-
from openerp import fields, models
from openerp.tools.safe_eval import safe_eval
class res_config(models.TransientModel):
_inherit = 'base.config.settings'
allow_cnpj_multi_ie = fields.Boolean(
string=u'Permitir o cadastro de Customers com CNPJs iguais',
default=True,
)
def get_default_allow_cnpj_multi_ie(self, cr, uid, fields, context=None):
icp = self.pool.get('ir.config_parameter')
return {
'allow_cnpj_multi_ie': safe_eval(icp.get_param(
cr, uid, 'l10n_br_base_allow_cnpj_multi_ie', 'False')),
}
def set_allow_cnpj_multi_ie(self, cr, uid, ids, context=None):
config = self.browse(cr, uid, ids[0], context=context)
icp = self.pool.get('ir.config_parameter')
icp.set_param(cr, uid, 'l10n_br_base_allow_cnpj_multi_ie',
repr(config.allow_cnpj_multi_ie))
|
Python
| 0.000003 |
@@ -312,11 +312,12 @@
ult=
-Tru
+Fals
e,%0A
|
bc9c782317eac99716bc961e42e6072f0e5616cf
|
Add dummy var in order to work around issue 1 https://github.com/LinuxTeam-teilar/cronos.teilar.gr/issues/1
|
apps/__init__.py
|
apps/__init__.py
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.core.mail import send_mail
def mail_cronos_admin(title, message):
'''
Wrapper function of send_mail
'''
try:
send_mail(title, message, '[email protected]', [settings.ADMIN[0][1]])
except:
pass
class CronosError(Exception):
'''
Custom Exception class
'''
def __init__(self, value):
self.value = value
def __unicode__(self):
return repr(self.value)
def log_extra_data(username = None, request = None, form = None, cronjob = None):
'''
Extra data needed by the custom formatter
All values default to None
It provides three data: client_ip, username and cronjob name
Username can be passed directly as argument, or it can be retrieved by
either the request var or the form
'''
log_extra_data = {
'client_ip': request.META.get('REMOTE_ADDR','None') if request else '',
'username': username if username else '',
'cronjob': cronjob if cronjob else '',
}
if not username:
if form:
log_extra_data['username'] = form.data.get('username', 'None')
else:
try:
if request.user.is_authenticated():
'''
Handle logged in users
'''
log_extra_data['username'] = request.user.name
else:
'''
Handle anonymous users
'''
log_extra_data['username'] = 'Anonymous'
except AttributeError:
pass
return log_extra_data
|
Python
| 0.001556 |
@@ -91,16 +91,241 @@
d_mail%0A%0A
+'''%0AFor unkown reason, the logger is NOT able to find a handler%0Aunless a settings.VARIABLE is called!!%0Ahttps://github.com/LinuxTeam-teilar/cronos.teilar.gr/issues/1%0AI leave that here till the bug is fixed%0A'''%0Asettings.DEBUG%0A%0A
def mail
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.