commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
6988a498504b382fd86099d3c037100ad14c62d3
fix bug, tpl_path is related to simiki source path, not wiki path
simiki/configs.py
simiki/configs.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys from os import path as osp from pprint import pprint import yaml from simiki import utils def parse_configs(config_file): #base_dir = osp.dirname(osp.dirname(osp.realpath(__file__))) try: with open(config_file, "rb") as fd: configs = yaml.load(fd) except yaml.YAMLError, e: msg = "Yaml format error in {}:\n{}".format( config_file, unicode(str(e), "utf-8") ) sys.exit(utils.color_msg("error", msg)) if configs["base_dir"] is None: configs["base_dir"] = osp.dirname(osp.realpath(config_file)) configs.update( # The directory to store markdown files source = osp.join(configs["base_dir"], configs["source"]), # The directory to store the generated html files destination = osp.join(configs["base_dir"], configs["destination"]), # The path of html template file tpl_path = osp.join(configs["base_dir"], "simiki/themes", configs["theme"]), ) if configs.get("url", "") is None: configs["url"] = "" if configs.get("keywords", "") is None: configs["keywords"] = "" if configs.get("description", "") is None: configs["description"] = "" return configs if __name__ == "__main__": BASE_DIR = osp.dirname(osp.dirname(osp.realpath(__file__))) config_file = osp.join(BASE_DIR, "_config.yml") pprint(parse_configs(config_file))
Python
0
@@ -180,17 +180,16 @@ e):%0A -# base_dir @@ -995,33 +995,24 @@ sp.join( -configs%5B%22 base_dir %22%5D, %22sim @@ -1003,18 +1003,16 @@ base_dir -%22%5D , %22simik
6adae60ee018966199ee1f8e2120b2eb65dcdc9e
Add stub for registration executable.
nanshe/nanshe/nanshe_registerer.py
nanshe/nanshe/nanshe_registerer.py
Python
0
@@ -0,0 +1,122 @@ +#!/usr/bin/env python%0A%0A__author__ = %22John Kirkham %[email protected]%3E%22%0A__date__ = %22$Feb 20, 2015 13:00:51 EST$%22%0A%0A%0A
b83c4ddb14c9ba555d187125838a5189dfb3530c
Remove six as an explicit dependency.
setup.py
setup.py
import re import ast from setuptools import setup, find_packages _version_re = re.compile(r'__version__\s+=\s+(.*)') with open('mycli/__init__.py', 'rb') as f: version = str(ast.literal_eval(_version_re.search( f.read().decode('utf-8')).group(1))) description = 'CLI for MySQL Database. With auto-completion and syntax highlighting.' setup( name='mycli', author='Amjith Ramanujam', author_email='amjith[dot]r[at]gmail.com', version=version, license='LICENSE.txt', url='http://mycli.net', packages=find_packages(), package_data={'mycli': ['myclirc', '../AUTHORS', '../SPONSORS']}, description=description, long_description=open('README.md').read(), install_requires=[ 'click >= 4.1', 'Pygments >= 2.0', # Pygments has to be Capitalcased. WTF? 'prompt_toolkit==0.42', 'PyMySQL >= 0.6.6', 'sqlparse == 0.1.14', 'six >= 1.9', 'configobj >= 5.0.6', ], entry_points=''' [console_scripts] mycli=mycli.main:cli ''', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: SQL', 'Topic :: Database', 'Topic :: Database :: Front-Ends', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
Python
0
@@ -971,34 +971,8 @@ 4',%0A - 'six %3E= 1.9',%0A
540bf48cdca59744baf043cbfa5056b07e493429
fix sage script to work generally over a list of account ids to produce lists of journals
portality/scripts/journals_in_doaj_by_account.py
portality/scripts/journals_in_doaj_by_account.py
Python
0
@@ -0,0 +1,2177 @@ +from portality import models%0Afrom portality.core import app%0Afrom portality.core import es_connection%0Aimport esprit%0Aimport csv%0Aimport json%0Afrom portality.util import ipt_prefix%0A%0Aclass JournalQuery(object):%0A def __init__(self, owner):%0A self.owner = owner%0A%0A def query(self):%0A return %7B%0A %22query%22:%7B%0A %22filtered%22:%7B%0A %22filter%22:%7B%0A %22bool%22:%7B%0A %22must%22:%5B%0A %7B%22term%22:%7B%22admin.owner.exact%22: self.owner%7D%7D,%0A %7B%22term%22 : %7B%22admin.in_doaj%22 : True%7D%7D%0A %5D%0A %7D%0A %7D,%0A %22query%22:%7B%0A %22match_all%22:%7B%7D%0A %7D%0A %7D%0A %7D%0A %7D%0A%0Aif __name__ == %22__main__%22:%0A%0A import argparse%0A parser = argparse.ArgumentParser()%0A parser.add_argument(%22-i%22, %22--input%22, help=%22input account list%22)%0A parser.add_argument(%22-o%22, %22--out%22, help=%22output file path%22)%0A args = parser.parse_args()%0A%0A if not args.out:%0A print(%22Please specify an output file path with the -o option%22)%0A parser.print_help()%0A exit()%0A%0A if not args.input:%0A print(%22Please specify an input file path with the -i option%22)%0A parser.print_help()%0A exit()%0A%0A # conn = esprit.raw.make_connection(None, app.config%5B%22ELASTIC_SEARCH_HOST%22%5D, None, app.config%5B%22ELASTIC_SEARCH_DB%22%5D)%0A conn = es_connection%0A%0A with open(args.out, %22w%22, encoding=%22utf-8%22) as f, open(args.input, %22r%22) as g:%0A reader = csv.reader(g)%0A%0A writer = csv.writer(f)%0A writer.writerow(%5B%22Name%22, %22Account%22, %22ID%22, %22Title%22%5D)%0A%0A for row in reader:%0A query = JournalQuery(row%5B1%5D)%0A print(json.dumps(query.query()))%0A count = 0%0A for j in esprit.tasks.scroll(conn, ipt_prefix(models.Journal.__type__), q=query.query(), limit=800, keepalive='5m'):%0A journal = models.Journal(_source=j)%0A bibjson = journal.bibjson()%0A%0A writer.writerow(%5Brow%5B0%5D, row%5B1%5D, journal.id, bibjson.title%5D)%0A count += 1%0A print(count)
4d139c6d2b9ea368bfc5189537d9af67cea582f6
Create demo_Take_Photo_when_PIR_high.py
demo_Take_Photo_when_PIR_high.py
demo_Take_Photo_when_PIR_high.py
Python
0.000001
@@ -0,0 +1,1677 @@ +import time%0Aimport picamera%0Aimport datetime%0Aimport RPi.GPIO as GPIO%0A%0Adef CheckPIR():%0A # dependencies are RPi.GPIO and time%0A # returns whats_here with %22NOTHING HERE%22 or %22SOMETHING HERE%22%0A time.sleep(1)%0A #don't rush the PIR!%0A GPIO.setmode(GPIO.BOARD)%0A # set numbering system for GPIO PINs are BOARD%0A GPIO.setup(7, GPIO.IN)%0A # set up number 7 PIN for input from the PIR%0A # need to adjust if you connected PIR to another GPIO PIN%0A try:%0A val = GPIO.input(7)%0A if (val == True):%0A PIR_IS = 1%0A #PIR returned HIGH to GPIO PIN, so something here!%0A if (val == False):%0A PIR_IS = 0%0A #PIR returned LOW to GPIO PIN, so something here!%0A GPIO.cleanup()%0A%0A except:%0A GPIO.cleanup()%0A%0A return PIR_IS%0A %0A%0A%0APIR = 1%0Acount = 0%0A%0A%0Awhile True:%0A PIR = 0%0A #Now to check the PIR and send what it returns to PIR%0A PIR = CheckPIR()%0A if PIR == 0:%0A print(%22Nothing has been detected by PIR%22)%0A elif PIR == 1:%0A print(%22Something has been seen! Time to photograph it!%22)%0A i = 0%0A with picamera.PiCamera() as camera:%0A while i %3C 5:%0A i = i+1%0A print(i)%0A camera.start_preview()%0A time.sleep(1)%0A utc_datetime = datetime.datetime.utcnow()%0A utc_datetime.strftime(%22%25Y-%25m-%25d-%25H%25MZ%22)%0A #get date and time so we can append it to the image filename%0A camera.capture('image_'+str(utc_datetime)+'.jpg')%0A camera.stop_preview()%0A time.sleep(1)%0A if i == 5:%0A break%0A %0A%0A %0A
feeb386efe01fb3dd4e70e216337c8a4b476cb9a
Add setup.py
setup.py
setup.py
Python
0
@@ -0,0 +1,2234 @@ +#!/usr/bin/env python%0A# Copyright 2015-2015 ARM Limited%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0Afrom setuptools import setup, find_packages%0A%0A%0AVERSION = %221.0.0%22%0A%0ALONG_DESCRIPTION = %22%22%22Behavioural Analysis involves the expressing the general%0Aexpectation of the state of the system while targeting a single or set of heuristics.%0AThis is particularly helpful when there are large number of factors that can change%0Athe behaviour of the system and testing all permutations of these input parameters%0Ais impossible. In such a scenario an assertion of the final expectation can be%0Auseful in managing performance and regression.%0A%0AThe Behavioural Analysis and Regression Toolkit is based on TRAPpy. The primary goal is%0Ato assert behaviours using the FTrace output from the kernel%0A%22%22%22%0A%0AREQUIRES = %5B%0A %22TRAPpy==1.0.0%22,%0A%5D%0A%0Asetup(name='BART',%0A version=VERSION,%0A license=%22Apache v2%22,%0A author=%22ARM-BART%22,%0A author_email=%[email protected]%22,%0A description=%22Behavioural Analysis and Regression Toolkit%22,%0A long_description=LONG_DESCRIPTION,%0A url=%22http://arm-software.github.io/bart%22,%0A packages=find_packages(),%0A include_package_data=True,%0A classifiers=%5B%0A %22Development Status :: 5 - Production/Stable%22,%0A %22Environment :: Web Environment%22,%0A %22Environment :: Console%22,%0A %22License :: OSI Approved :: Apache Software License%22,%0A %22Operating System :: POSIX :: Linux%22,%0A %22Programming Language :: Python :: 2.7%22,%0A # As we depend on trace data from the Linux Kernel/FTrace%0A %22Topic :: System :: Operating System Kernels :: Linux%22,%0A %22Topic :: Scientific/Engineering :: Visualization%22%0A %5D,%0A install_requires=REQUIRES%0A )%0A
4a4231976f2f084c1233e3efe27f5d18b486f146
Create setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,406 @@ +from setuptools import setup%0Aimport re%0A%0Aname = 'gcdb'%0A%0Aversion = ''%0Awith open('%7B0%7D/__init__.py'.format(name), 'rb') as f:%0A match_object = re.search(%0A r'%5E__version__%5Cs*=%5Cs*%5B%5C'%22%5D(%5B%5E%5C'%22%5D*)%5B%5C'%22%5D',%0A f.read(),%0A re.MULTILINE)%0A version = match_object.group(1)%0A%0Asetup(%0A name=name,%0A version=version,%0A packages=%5Bname%5D,%0A entry_points=%7B'console_scripts': %5B'gcdb = gcdb:main'%5D%7D,%0A)%0A
3c314d006fb1726b671d0223f08fe16f0944cd82
test call started sla
cla_backend/apps/reports/tests/test_mi_sla_report.py
cla_backend/apps/reports/tests/test_mi_sla_report.py
Python
0.000001
@@ -0,0 +1,2150 @@ +# -*- coding: utf-8 -*-%0Afrom contextlib import contextmanager%0Aimport datetime%0Afrom django.test import TestCase%0Afrom legalaid.forms import get_sla_time%0Aimport mock%0A%0Afrom core.tests.mommy_utils import make_recipe, make_user%0Afrom cla_eventlog import event_registry%0Afrom cla_eventlog.models import Log%0Afrom reports.forms import MICB1Extract%0A%0A%0A@contextmanager%0Adef patch_field(cls, field_name, dt):%0A field = cls._meta.get_field(field_name)%0A mock_now = lambda: dt%0A with mock.patch.object(field, 'default', new=mock_now):%0A yield%0A%0A%0Aclass MiSlaTestCase(TestCase):%0A def test_call_started_sla(self):%0A with patch_field(Log, 'created', datetime.datetime(2015, 1, 2, 9, 0, 0)):%0A case = make_recipe('legalaid.case')%0A%0A user = make_user()%0A make_recipe('call_centre.operator', user=user)%0A%0A event = event_registry.get_event('call_me_back')()%0A _dt = datetime.datetime(2015, 1, 2, 9, 1, 0)%0A with patch_field(Log, 'created', datetime.datetime(2015, 1, 2, 9, 1, 0)):%0A event.get_log_code(case=case)%0A event.process(%0A case, created_by=user,%0A notes='',%0A context=%7B%0A 'requires_action_at': _dt,%0A 'sla_15': get_sla_time(_dt, 15),%0A 'sla_30': get_sla_time(_dt, 30),%0A 'sla_120': get_sla_time(_dt, 120),%0A 'sla_480': get_sla_time(_dt, 480)%0A %7D,%0A )%0A%0A case.requires_action_at = datetime.datetime(2015, 1, 2, 9, 1, 0)%0A case.save()%0A%0A event = event_registry.get_event('case')()%0A with patch_field(Log, 'created', datetime.datetime(2015, 1, 2, 9, 30, 0)):%0A event.process(%0A case, status='call_started', created_by=user,%0A notes='Call started'%0A )%0A%0A date_range = (%0A datetime.datetime(2015, 1, 1),%0A datetime.datetime(2015, 2, 1)%0A )%0A%0A with mock.patch('reports.forms.MICB1Extract.date_range', date_range):%0A report = MICB1Extract()%0A%0A qs = report.get_queryset()%0A%0A self.assertFalse(qs%5B0%5D%5B28%5D)%0A
0e45b8fcf1978f560713864e18a270719d7d4872
Make sure the handle dict values are string. Looks like dbus-python get confused if they are dbus.String.
sugar/activity/activityhandle.py
sugar/activity/activityhandle.py
# Copyright (C) 2006-2007 Red Hat, Inc. # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the # Free Software Foundation, Inc., 59 Temple Place - Suite 330, # Boston, MA 02111-1307, USA. from sugar.presence import presenceservice class ActivityHandle(object): """Data structure storing simple activity metadata""" def __init__( self, activity_id=None, pservice_id=None, object_id=None, uri=None ): """Initialise the handle from activity_id activity_id -- unique id for the activity to be created pservice_id -- identity of the sharing service for this activity in the PresenceService object_id -- identity of the journal object associated with the activity. It was used by the journal prototype implementation, might change when we do the real one. When you resume an activity from the journal the object_id will be passed in. It's optional since new activities does not have an associated object (yet). XXX Not clear how this relates to the activity id yet, i.e. not sure we really need both. TBF uri -- URI associated with the activity. Used when opening an external file or resource in the activity, rather than a journal object (downloads stored on the file system for example or web pages) """ self.activity_id = activity_id self.pservice_id = pservice_id self.object_id = object_id self.uri = uri def get_shared_activity(self): """Retrieve the shared instance of this activity Uses the PresenceService to find any existing dbus service which provides sharing mechanisms for this activity. """ if self.pservice_id: pservice = presenceservice.get_instance() return pservice.get_activity(self.pservice_id) else: return None def get_dict(self): """Retrieve our settings as a dictionary""" result = { 'activity_id' : self.activity_id } if self.pservice_id: result['pservice_id'] = self.pservice_id if self.object_id: result['object_id'] = self.object_id if self.uri: result['uri'] = self.uri return result def create_from_dict(handle_dict): """Create a handle from a dictionary of parameters""" result = ActivityHandle( handle_dict['activity_id'], pservice_id = handle_dict.get( 'pservice_id' ), object_id = handle_dict.get('object_id'), uri = handle_dict.get('uri'), ) return result
Python
0
@@ -2740,16 +2740,66 @@ ult = %7B +%7D%0A if self.activity_id:%0A result%5B 'activit @@ -2807,11 +2807,16 @@ _id' - : +%5D = str( self @@ -2827,18 +2827,17 @@ ivity_id - %7D +) %0A @@ -2890,24 +2890,28 @@ vice_id'%5D = +str( self.pservic @@ -2906,32 +2906,33 @@ self.pservice_id +) %0A if self @@ -2973,24 +2973,28 @@ ject_id'%5D = +str( self.object_ @@ -2987,32 +2987,33 @@ r(self.object_id +) %0A if self @@ -3046,24 +3046,28 @@ uri'%5D = +str( self.uri %0A%0A @@ -3058,16 +3058,17 @@ self.uri +) %0A%0A
26cbfe83f0047c8ce66a21237db8ae484736a085
Add TensorboardLogs class for use as a proxy to tensorboard data.
helpers/tensorboard.py
helpers/tensorboard.py
Python
0
@@ -0,0 +1,2180 @@ +import glob%0Aimport numpy as np%0Aimport os%0Afrom tensorflow.tensorboard.backend.event_processing.event_accumulator import EventAccumulator%0Afrom . import get_first_existing_path, get_nth_matching_path%0Afrom ..experiments import Experiment%0A%0Aclass TensorboardLogs(object):%0A%0A def __init__(self, path):%0A self.path = path%0A self.ea = EventAccumulator(self.path)%0A self.ea.Reload()%0A%0A def get_scalars(self, name):%0A events = self.ea.Scalars(name)%0A scalars = np.array(%5B(event.wall_time, event.step, event.value) for event in events%5D)%0A return (scalars%5B:,0%5D, scalars%5B:,1%5D.astype('int'), scalars%5B:,2%5D)%0A%0Adef find_log_path(config, main_path=None):%0A%0A config.define('path.result.main.base', 'path.result.base', default='')%0A config.define('path.result.main.relative', 'path.result.relative', default='')%0A config.define('path.result.tensorboard.base', 'path.result.base.tensorboard', default='')%0A config.define('path.result.tensorboard.relative', 'path.result.relative.tensorboard', default='')%0A%0A candidates = %5Bos.path.join(config('path.result.tensorboard.base'), config('path.result.tensorboard.relative')),%0A os.path.join(config('path.result.main.base').replace('experiment', 'experiment-tb'), config('path.result.tensorboard.relative')),%0A os.path.join(Experiment.DEFAULT_TENSORBOARD_ROOT, config('path.result.tensorboard.relative')),%0A get_nth_matching_path(os.path.join(config('path.result.tensorboard.base'), config('path.result.main.relative')) + '@*', -1, ''),%0A get_nth_matching_path(os.path.join(config('path.result.main.base').replace('experiment', 'experiment-tb'), config('path.result.main.relative')) + '@*', -1, ''),%0A get_nth_matching_path(os.path.join(Experiment.DEFAULT_TENSORBOARD_ROOT, config('path.result.main.relative')) + '@*', -1, '')%5D%0A%0A if main_path:%0A candidates.append(get_nth_matching_path(glob.escape(main_path.replace('experiment','experiment-tb')) + '@*', -1, ''))%0A%0A path = get_first_existing_path(*candidates)%0A if not path:%0A raise FileNotFoundError('Tensorboard log directory is not found.')%0A%0A return path
c184e79b91a63299c249e207dba1e8cd95a8e5d0
Add fpocket (#12675)
var/spack/repos/builtin/packages/fpocket/package.py
var/spack/repos/builtin/packages/fpocket/package.py
Python
0
@@ -0,0 +1,892 @@ +# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Fpocket(MakefilePackage):%0A %22%22%22fpocket is a very fast open source protein pocket detection algorithm%0A based on Voronoi tessellation.%22%22%22%0A%0A homepage = %22https://github.com/Discngine/fpocket%22%0A version('master', branch='master',%0A git='https://github.com/Discngine/fpocket.git')%0A%0A depends_on(%22netcdf%22)%0A%0A def setup_environment(self, spack_env, run_env):%0A if self.compiler.name == 'gcc':%0A spack_env.set('CXX', 'g++')%0A%0A def edit(self):%0A makefile = FileFilter('makefile')%0A makefile.filter('BINDIR .*', 'BINDIR = %25s/bin' %25 self.prefix)%0A makefile.filter('MANDIR .*', 'MANDIR = %25s/man/man8' %25 self.prefix)%0A
fb6eee18b2bf48dd0063623515ced00e980bdf10
Add a few tests for docparse.
nipype/utils/tests/test_docparse.py
nipype/utils/tests/test_docparse.py
Python
0.999642
@@ -0,0 +1,808 @@ +from nipype.testing import *%0A%0Afrom nipype.utils.docparse import reverse_opt_map, build_doc%0A%0Aclass Foo(object):%0A opt_map = %7B'outline': '-o', 'fun': '-f %25.2f', 'flags': '%25s'%7D%0A%0Afoo_doc = %22%22%22Usage: foo infile outfile %5Bopts%5D%0A%0ABunch of options:%0A%0A -o something about an outline%0A -f %3Cf%3E intensity of fun factor%0A%0AOther stuff:%0A -v verbose%0A%0A%22%22%22%0A%0Afmtd_doc = %22%22%22Parameters%0A----------%0Aoutline : %0A something about an outline%0Afun : %0A %3Cf%3E intensity of fun factor%0A%0AOthers Parameters%0A-----------------%0A -v verbose%22%22%22%0A%0Adef test_rev_opt_map():%0A map = %7B'-f': 'fun', '-o': 'outline'%7D%0A rev_map = reverse_opt_map(Foo.opt_map)%0A assert_equal(rev_map, map)%0A%0Adef test_build_doc():%0A opts = reverse_opt_map(Foo.opt_map)%0A doc = build_doc(foo_doc, opts)%0A assert_equal(doc, fmtd_doc)%0A%0A
e71742bc0fc09ebf37532b92458670a4efe8926b
Add setup file
setup.py
setup.py
Python
0
@@ -0,0 +1,367 @@ +from setuptools import setup, find_packages%0A%0Asetup(%0A name='django-device-notifications',%0A version='0.0.1',%0A description='Generic library for APN & GCM notifications',%0A author='Johann Heller',%0A author_email='[email protected]',%0A url='https://github.com/roverdotcom/django-device-notifications',%0A packages=find_packages(exclude=('tests', 'docs'))%0A)%0A
ec54935e169019067f2179a92d0f6e833f133bc9
add a DataContainer implemented as a subclass of dict
simphony/core/data_container.py
simphony/core/data_container.py
Python
0
@@ -0,0 +1,2406 @@ +from collections import Mapping%0A%0Afrom simphony.core.cuba import CUBA%0A%0A_ERROR_MESSAGE = %22Keys %7B!r%7D are not in the approved CUBA keywords%22%0A_CUBA_KEYS = set(CUBA)%0A%0A%0Aclass DataContainer(dict):%0A %22%22%22 A DataContainer instance%0A%0A The DataContainer object is implemented as a python dictionary whose keys%0A are restricted to be members of the CUBA enum class.%0A%0A %22%22%22%0A%0A # Memory usage optimization.%0A __slots__ = ()%0A%0A def __init__(self, *args, **kwards):%0A %22%22%22 Contructor.%0A%0A Initialization follows the behaviour of the python dict class.%0A%0A %22%22%22%0A self._check_arguments(args, kwards)%0A if len(args) == 1 and not hasattr(args%5B0%5D, 'keys'):%0A super(DataContainer, self).__init__(**kwards)%0A for key, value in args%5B0%5D:%0A self.__setitem__(key, value)%0A return%0A super(DataContainer, self).__init__(*args, **kwards)%0A%0A def __setitem__(self, key, value):%0A %22%22%22 Set/Update the key value only when%0A%0A%0A %22%22%22%0A if key in _CUBA_KEYS:%0A super(DataContainer, self).__setitem__(key, value)%0A else:%0A message = %22Key %7B!r%7D is not in the approved CUBA keywords%22%0A raise KeyError(message.format(key))%0A%0A def update(self, *args, **kwards):%0A self._check_arguments(args, kwards)%0A if len(args) == 1 and not hasattr(args%5B0%5D, 'keys'):%0A for key, value in argument:%0A self.__setitem__(key, value)%0A return%0A super(DataContainer, self).update(*args, **kwards)%0A%0A def _check_arguments(self, args, kwards):%0A %22%22%22 Check for the right arguments%0A%0A %22%22%22%0A # See if there are any non CUBA keys in the mapping argument%0A non_cuba_keys = kwards.viewkeys() - _CUBA_KEYS%0A if len(non_cuba_keys) %3E 0:%0A raise KeyError(_ERROR_MESSAGE.format(non_cuba_keys))%0A if len(args) == 1:%0A argument = args%5B0%5D%0A if isinstance(argument, DataContainer):%0A # This is already a DataContainer so we are sure that%0A # it only contains CUBA keys.%0A return%0A if isinstance(argument, Mapping):%0A # See if there any non CUBA keys in the mapping argument%0A non_cuba_keys = set(argument.keys()) - _CUBA_KEYS%0A if len(non_cuba_keys) %3E 0:%0A raise KeyError(_ERROR_MESSAGE.format(non_cuba_keys))%0A
4912bac4ab534ca942393c36f71dd7df4182eb94
add test_dot.py
sympy/printing/tests/test_dot.py
sympy/printing/tests/test_dot.py
Python
0.00008
@@ -0,0 +1,1351 @@ +from sympy.printing.dot import (purestr, styleof, attrprint, dotnode,%0A dotedges, dotprint)%0Afrom sympy import Symbol, Integer, Basic, Expr%0Afrom sympy.abc import x%0A%0Adef test_purestr():%0A assert purestr(Symbol('x')) == %22Symbol(x)%22%0A assert purestr(Basic(1, 2)) == %22Basic(1, 2)%22%0A%0A%0Adef test_styleof():%0A styles = %5B(Basic, %7B'color': 'blue', 'shape': 'ellipse'%7D),%0A (Expr, %7B'color': 'black'%7D)%5D%0A assert styleof(Basic(1), styles) == %7B'color': 'blue', 'shape': 'ellipse'%7D%0A%0A x = Symbol('x')%0A assert styleof(x + 1, styles) == %7B'color': 'black', 'shape': 'ellipse'%7D%0A%0Adef test_attrprint():%0A assert attrprint(%7B'color': 'blue', 'shape': 'ellipse'%7D) == %5C%0A '%22color%22=%22blue%22, %22shape%22=%22ellipse%22'%0A%0Adef test_dotnode():%0A%0A assert dotnode(x) ==%5C%0A '%22Symbol(x)%22 %5B%22color%22=%22black%22, %22label%22=%22x%22, %22shape%22=%22ellipse%22%5D;'%0A assert dotnode(x+2) == %5C%0A '%22Add(Integer(2), Symbol(x))%22 %5B%22color%22=%22black%22, %22label%22=%22Add%22, %22shape%22=%22ellipse%22%5D;'%0A%0A%0Adef test_dotedges():%0A assert sorted(dotedges(x+2)) == %5B%0A '%22Add(Integer(2), Symbol(x))%22 -%3E %22Integer(2)%22;',%0A '%22Add(Integer(2), Symbol(x))%22 -%3E %22Symbol(x)%22;'%0A %5D%0A%0Adef test_dotprint():%0A text = dotprint(x+2)%0A assert all(e in text for e in dotedges(x+2))%0A assert all(n in text for n in map(dotnode, (x, Integer(2), x+2)))%0A assert 'digraph' in text%0A
4567a9810b8c9abdb450a442c892dbdb4eecf0e0
Add test.py to test gsutil in pantheon
vm_server/accept/test.py
vm_server/accept/test.py
Python
0.000001
@@ -0,0 +1,311 @@ +from google.cloud import storage%0A%0Abucket_name = %22automation-interns%22%0Adestination_file_name = (%22./text.txt%22)%0Asource_blob_name = %22test/text_file.txt%22%0Astorage_client = storage.Client()%0Abucket = storage_client.bucket(bucket_name)%0Ablob = bucket.blob(source_blob_name)%0Ablob.download_to_filename(destination_file_name)
a43acda7271c3fc48a82552721aec1332e9892d6
Create OpticalDensityInv.py
OpticalDensityInv.py
OpticalDensityInv.py
Python
0.000001
@@ -0,0 +1,593 @@ +import numpy%0A%0Adef OpticalDensityInv( I ):%0A '''%0A Transforms input RGB image %22I%22 into optical density space for color deconvolution.%0A *Inputs:%0A I (rgbimage) - a floating-point image of optical density values obtained%0A from OpticalDensityFwd.%0A *Outputs:%0A Out (rgbimage) - a floating-point multi-channel intensity image with %0A values in range 0-255.%0A *Related functions:%0A OpticalDensityFwd, ColorDeconvolution, ColorConvolution %0A '''%0A %0A return numpy.exp(-(I - 255)*numpy.log(255)/255);%0A
fa049b79c24f8213fa9335a31a34c354faf67459
Add exmaple about proving equivalence of exprs
src/examples/python/proving_equivalence.py
src/examples/python/proving_equivalence.py
Python
0.000119
@@ -0,0 +1,2094 @@ +#!/usr/bin/env python%0A## -*- coding: utf-8 -*-%0A##%0A## $ python ./proving equivalence.py%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A## True%0A##%0A%0Aimport sys%0Afrom triton import *%0A%0A%0Adef prove(ctx, n):%0A ast = ctx.getAstContext()%0A if ctx.isSat(ast.lnot(n)) == True:%0A return False%0A return True%0A%0Aif __name__ == '__main__':%0A ctx = TritonContext(ARCH.X86_64)%0A ast = ctx.getAstContext()%0A%0A ctx.setAstRepresentationMode(AST_REPRESENTATION.PYTHON)%0A%0A x = ast.variable(ctx.newSymbolicVariable(8, 'x'))%0A y = ast.variable(ctx.newSymbolicVariable(8, 'y'))%0A%0A # MBA coming from VMProtect https://whereisr0da.github.io/blog/posts/2021-02-16-vmp-3/%0A # To detect their equivalence you can synthesize them (see synthesizing_obfuscated_expressions.py)%0A # Then you can confirm the synthesized output with this example%0A print(prove(ctx, x %5E y == (~(~(x) & ~(y)) & ~(~(~(x)) & ~(~(y))))))%0A print(prove(ctx, x + y == ((~(~(x)) & ~(~(y))) + (~(~(x)) %7C ~(~(y))))))%0A print(prove(ctx, x + y == ((~(~(y)) %7C ~(~(x))) + ~(~(x)) - (~(~(x)) & ~(~(~(y)))))))%0A print(prove(ctx, x + y == ((~(~(x)) %7C ~(~(y))) + (~(~(~(x))) %7C ~(~(y))) - (~(~(~(x)))))))%0A print(prove(ctx, x + y == ((~(~(x)) %7C ~(~(y))) + ~(~(y)) - (~(~(~(x))) & ~(~(y))))))%0A print(prove(ctx, x + y == (~(~(y)) + (~(~(x)) & ~(~(~(y)))) + (~(~(x)) & ~(~(y))))))%0A print(prove(ctx, x - y == (~(~(x) + y))))%0A print(prove(ctx, ~((x %7C y) - x) == (~(((~(~(x)) %7C y) - (~(~(x))))))))%0A print(prove(ctx, x - y == (~((~(x) & ~(x)) + y) & ~((~(x) & ~(x)) + y))))%0A print(prove(ctx, x & y == ((~(~(x)) %7C y) - (~(~(~(x))) & y) - (~(~(x)) & ~y))))%0A print(prove(ctx, x & y == ((~(~(~(x))) %7C y) - (~(~(~(x)))))))%0A print(prove(ctx, x %7C y == ((~(~(x)) & ~(y)) + y)))%0A print(prove(ctx, x %7C y == (((~(~(x)) & ~(y)) & y) + ((~(~(x)) & ~(y)) %7C y))))%0A print(prove(ctx, x + y == ((~(~(x)) & ~(~(y))) + (~(~(x)) %7C ~(~(y))))))%0A%0A sys.exit(0)%0A
3c997e3a9eb92c3053c521f6c2fff6cfdf99c126
add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,1040 @@ +# noqa: D100%0Aimport os%0Aimport re%0A%0Afrom setuptools import setup%0A%0Arequirements_txt = open(os.path.join(os.path.dirname(__file__), 'requirements.txt')).read()%0Arequirements = re.findall(r'%5E(%5B%5E%5Cs#%5D+)', requirements_txt, re.M)%0A%0Asetup(name='assignment_dashboard',%0A packages=%5B'assignment_dashboard'%5D,%0A include_package_data=True,%0A version='0.1',%0A description=%22A web app that inspects forks of an GitHub assignment repo%22,%0A long_description=%22Display the a GitHub repo's forks, by file, and collate Jupyter notebooks%22,%0A classifiers=%5B%0A 'Intended Audience :: Developers',%0A 'Intended Audience :: Education',%0A 'License :: OSI Approved :: MIT License',%0A 'Programming Language :: Python',%0A 'Programming Language :: Python :: 3'%0A 'Programming Language :: Python :: 3.5'%0A %5D,%0A url='http://github.com/osteele/assignment-dashboard',%0A author='Oliver Steele',%0A author_email='[email protected]',%0A license='MIT',%0A install_requires=requirements%0A )%0A
11cf7dd63f8fe7453057ef0846d4e645fa05f124
Add setuptools setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,372 @@ +from setuptools import setup%0A%0Asetup(name='pybeam',%0A version='0.1',%0A description='Python module to parse Erlang BEAM files',%0A url='http://github.com/matwey/pybeam',%0A author='Matwey V. Kornilov',%0A author_email='[email protected]',%0A license='MIT',%0A packages=%5B'pybeam'%5D,%0A install_requires=%5B'construct'%5D, %0A zip_safe=False)%0A%0A
555dac76a8810cfeaae96f8de04e9eb3362a3314
Remove old notification status column
migrations/versions/0109_rem_old_noti_status.py
migrations/versions/0109_rem_old_noti_status.py
Python
0.000001
@@ -0,0 +1,1224 @@ +%22%22%22%0A%0ARevision ID: 0109_rem_old_noti_status%0ARevises: 0108_change_logo_not_nullable%0ACreate Date: 2017-07-10 14:25:15.712055%0A%0A%22%22%22%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.dialects import postgresql%0A%0Arevision = '0109_rem_old_noti_status'%0Adown_revision = '0108_change_logo_not_nullable'%0A%0A%0Adef upgrade():%0A op.drop_column('notification_history', 'status')%0A op.drop_column('notifications', 'status')%0A%0A%0Adef downgrade():%0A op.add_column(%0A 'notifications',%0A sa.Column(%0A 'status',%0A postgresql.ENUM(%0A 'created', 'sending', 'delivered', 'pending', 'failed', 'technical-failure',%0A 'temporary-failure', 'permanent-failure', 'sent', name='notify_status_type'%0A ),%0A autoincrement=False,%0A nullable=True%0A )%0A )%0A op.add_column(%0A 'notification_history',%0A sa.Column(%0A 'status',%0A postgresql.ENUM(%0A 'created', 'sending', 'delivered', 'pending', 'failed', 'technical-failure',%0A 'temporary-failure', 'permanent-failure', 'sent', name='notify_status_type'%0A ),%0A autoincrement=False,%0A nullable=True%0A )%0A )%0A
21a67556b83b7905134439d55afe33c35e4b3422
Add an index on notifications for (service_id, created_at) to improve the performance of the notification queries. We've already performed this update on production since you need to create the index concurrently, which is not allowed from the alembic script. For that reason we are checking if the index exists.
migrations/versions/0246_notifications_index.py
migrations/versions/0246_notifications_index.py
Python
0
@@ -0,0 +1,542 @@ +%22%22%22%0A%0ARevision ID: 0246_notifications_index%0ARevises: 0245_archived_flag_jobs%0ACreate Date: 2018-12-12 12:00:09.770775%0A%0A%22%22%22%0Afrom alembic import op%0A%0Arevision = '0246_notifications_index'%0Adown_revision = '0245_archived_flag_jobs'%0A%0A%0Adef upgrade():%0A conn = op.get_bind()%0A conn.execute(%0A %22CREATE INDEX IF NOT EXISTS ix_notifications_service_created_at ON notifications (service_id, created_at)%22%0A )%0A%0A%0Adef downgrade():%0A conn = op.get_bind()%0A conn.execute(%0A %22DROP INDEX IF EXISTS ix_notifications_service_created_at%22%0A )%0A%0A
1337c19df3ccecf5739c58a719742d970c7faa14
Calculate LDA
build_topics.py
build_topics.py
Python
0.998653
@@ -0,0 +1,1883 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport argparse%0Aimport json%0Aimport logging%0Aimport os%0A%0Afrom gensim import corpora%0Afrom gensim.models.ldamulticore import LdaMulticore%0A%0Afrom common import SimpleTokenizer%0A%0A%0Adef parse_args():%0A description = '''%0A Finds topics from reviews%0A '''%0A%0A parser = argparse.ArgumentParser(description=description)%0A parser.add_argument('prefix')%0A parser.add_argument('--no_below', type=int, default=5)%0A parser.add_argument('--no_above', type=float, default=0.95)%0A parser.add_argument('--num_topics', type=int, default=64)%0A parser.add_argument('--workers')%0A return parser.parse_args()%0A%0A%0Aclass ReviewCorpus(object):%0A def __init__(self, filename, dictionary):%0A self.filename = filename%0A self.dictionary = dictionary%0A self.tokenizer = SimpleTokenizer()%0A%0A def __iter__(self):%0A with open(self.filename) as f:%0A for line in f:%0A review = json.loads(line)%0A tokens = self.tokenizer.tokenize(review)%0A yield self.dictionary.doc2bow(tokens)%0A%0A%0Adef main():%0A logging.basicConfig(format='%25(asctime)s : %25(levelname)s : %25(message)s',%0A level=logging.INFO)%0A%0A args = parse_args()%0A%0A dictionary = corpora.Dictionary.load(os.path.join(args.prefix, 'review.dict'))%0A logging.info('Pruning dictionary')%0A dictionary.filter_extremes(no_below=args.no_below,%0A no_above=args.no_above)%0A%0A corpus = ReviewCorpus(os.path.join(args.prefix, 'review.json'),%0A dictionary)%0A%0A logging.info('Computing LDA model')%0A lda = LdaMulticore(corpus, num_topics=args.num_topics, id2word=dictionary,%0A workers=args.workers)%0A%0A logging.info('Persisting LDA model')%0A lda.save(os.path.join(args.prefix, 'review.ldamodel'))%0A%0Aif __name__ == '__main__':%0A main()%0A
86b2f32bd212a14e904b9823fbf543b321f46ca7
Add very basic setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,116 @@ +from distutils.core import setup%0A%0Asetup(name='astcheck',%0A version='0.1',%0A py_modules=%5B'astcheck'%5D,%0A )
523a6fe005149bf0a8a91cd81c9f692f5aaaf1c9
fix description
farmer/models.py
farmer/models.py
#coding=utf8 import os import sys import time from psutil import Process from datetime import datetime from threading import Thread from ansible.runner import Runner from ansible.inventory import Inventory from django.db import models from farmer.settings import WORKER_TIMEOUT, ANSIBLE_FORKS class Task(models.Model): # hosts, like web_servers:host1 . inventory = models.TextField(null = False, blank = False) # 0, do not use sudo; 1, use sudo . sudo = models.BooleanField(default = True) # for example: ansible web_servers -m shell -a 'du -sh /tmp' # the 'du -sh /tmp' is cmd here cmd = models.TextField(null = False, blank = False) # return code of this job rc = models.IntegerField(null = True) # submitter farmer = models.TextField(null = False, blank = False) start = models.DateTimeField(null = True, blank = False) end = models.DateTimeField(null = True, blank = False) def run(self): t = Thread(target = self._run) t.setDaemon(True) t.start() def _run(self): self.start = datetime.now() self.save() # initial jobs for host in map(lambda i: i.name, Inventory().get_hosts(pattern = self.inventory)): self.job_set.add(Job(host = host, cmd = self.cmd, start = datetime.now())) self.save() runner = Runner(module_name = 'shell', module_args = self.cmd, \ pattern = self.inventory, sudo = self.sudo, forks = ANSIBLE_FORKS) _, poller = runner.run_async(time_limit = WORKER_TIMEOUT) now = time.time() while True: if poller.completed: break if time.time() - now > WORKER_TIMEOUT: # TIMEOUT break results = poller.poll() results = results.get('contacted') if results: for host, result in results.items(): job = self.job_set.get(host = host) job.end = result.get('end') job.rc = result.get('rc') job.stdout = result.get('stdout') job.stderr = result.get('stderr') job.save() time.sleep(1) jobs_timeout = filter(lambda job: job.rc is None, self.job_set.all()) jobs_failed = filter(lambda job: job.rc, self.job_set.all()) for job in jobs_timeout: job.rc = 1 job.stderr = 'TIMEOUT' # marked as 'TIMEOUT' job.save() self.rc = (jobs_timeout or jobs_failed) and 1 or 0 self.end = datetime.now() self.save() self.done() def done(self): try: myself = Process(os.getpid()) for child in myself.get_children(): child.kill() except Exception as e: sys.stderr.write(str(e) + '\n') def __unicode__(self): return self.inventory + ' -> ' + self.cmd class Job(models.Model): task = models.ForeignKey(Task) host = models.TextField(null = False, blank = False) cmd = models.TextField(null = False, blank = False) start = models.DateTimeField(null = True, blank = False) end = models.DateTimeField(null = True, blank = False) rc = models.IntegerField(null = True) stdout = models.TextField(null = True) stderr = models.TextField(null = True) def __unicode__(self): return self.host + ' : ' + self.cmd
Python
0.020455
@@ -2479,16 +2479,20 @@ derr = ' +JOB TIMEOUT'
5acc7d50cbe199af49aece28b95ea97484ae31c7
Add solution class for Ghia et al. (1982)
snake/solutions/ghiaEtAl1982.py
snake/solutions/ghiaEtAl1982.py
Python
0
@@ -0,0 +1,2151 @@ +%22%22%22%0AImplementation of the class %60GhiaEtAl1982%60 that reads the centerline velocities%0Areported in Ghia et al. (1982).%0A%0A_References:_%0A* Ghia, U. K. N. G., Ghia, K. N., & Shin, C. T. (1982).%0A High-Re solutions for incompressible flow using the Navier-Stokes equations%0A and a multigrid method.%0A Journal of computational physics, 48(3), 387-411.%0A%22%22%22%0A%0Aimport os%0A%0Aimport numpy%0A%0A%0Aclass GhiaEtAl1982(object):%0A %22%22%22%0A Container to store results from Ghia et al. (1982).%0A %22%22%22%0A def __init__(self, Re=None, file_path=None):%0A %22%22%22%0A Initialization.%0A%0A Parameters%0A ----------%0A Re: float, optional%0A Desired Reynolds number;%0A default: None.%0A file_path: string, optional%0A Path of the file containing the validation data;%0A default: None.%0A %22%22%22%0A self.y, self.u = None, None%0A self.x, self.v = None, None%0A if Re:%0A self.read_centerline_velocities(Re, file_path=file_path)%0A%0A def read_centerline_velocities(self, Re, file_path=None):%0A %22%22%22%0A Reads the centerline velocities from file and for a given Reynolds number.%0A%0A Parameters%0A ----------%0A Re: float%0A Desired Reynolds number.%0A file_path: string, optional%0A Path of the file containing the validation data;%0A default: None (will be read the file located in %60resources%60 directory of%0A the %60snake%60 package).%0A %22%22%22%0A if not file_path:%0A file_path = os.path.join(os.environ%5B'SNAKE'%5D,%0A 'resources',%0A 'validationData',%0A 'ghia_et_al_1982_lid_driven_cavity.dat')%0A Re = str(int(round(Re)))%0A # column indices in file with experimental results%0A cols = %7B'100': %7B'u': 1, 'v': 7%7D,%0A '1000': %7B'u': 2, 'v': 8%7D,%0A '3200': %7B'u': 3, 'v': 9%7D,%0A '5000': %7B'u': 4, 'v': 10%7D,%0A '10000': %7B'u': 5, 'v': 11%7D%7D%0A%0A with open(file_path, 'r') as infile:%0A y, u, x, v = numpy.loadtxt(infile,%0A dtype=float,%0A usecols=(0, cols%5BRe%5D%5B'u'%5D, 6, cols%5BRe%5D%5B'v'%5D),%0A unpack=True)%0A self.y, self.u = y, u%0A self.x, self.v = x, v%0A
a893a8f9375164cbbec4e276ae73f181f74fd9ae
create image,py
src/image.py
src/image.py
Python
0.000001
@@ -0,0 +1,63 @@ +#%0A# image.py%0A# Created by pira on 2017/07/28.%0A#%0A%0A#coding: utf-8
14068a2e3ca445c02895aed38420baf846338aae
Add smile detection example script.
scripts/examples/25-Machine-Learning/nn_haar_smile_detection.py
scripts/examples/25-Machine-Learning/nn_haar_smile_detection.py
Python
0
@@ -0,0 +1,1011 @@ +# Simle detection using Haar Cascade + CNN.%0Aimport sensor, time, image, os, nn%0A%0Asensor.reset() # Reset and initialize the sensor.%0Asensor.set_contrast(2)%0Asensor.set_pixformat(sensor.GRAYSCALE) # Set pixel format to RGB565%0Asensor.set_framesize(sensor.QQVGA) # Set frame size to QVGA (320x240)%0Asensor.skip_frames(time=2000)%0Asensor.set_auto_gain(False)%0A%0A# Load smile detection network%0Anet = nn.load('/smile.network')%0A%0A# Load Face Haar Cascade%0Aface_cascade = image.HaarCascade(%22frontalface%22, stages=25)%0Aprint(face_cascade)%0A%0A# FPS clock%0Aclock = time.clock()%0Awhile (True):%0A clock.tick()%0A%0A # Capture snapshot%0A img = sensor.snapshot()%0A%0A # Find faces.%0A objects = img.find_features(face_cascade, threshold=0.75, scale_factor=1.25)%0A%0A # Detect smiles%0A for r in objects:%0A img.draw_rectangle(r)%0A out = net.forward(img, roi=r, softmax=True)%0A img.draw_string(r%5B0%5D, r%5B1%5D, ':)' if (out%5B0%5D/127 %3E 0.8) else ':(', color=(255), scale=2)%0A%0A print(clock.fps())%0A
b31e7a3471daefb79b1d63a433c480cf51b75745
Create __init__.py
FireModules/FileDownloads/AccountBruting/__init__.py
FireModules/FileDownloads/AccountBruting/__init__.py
Python
0.000429
@@ -0,0 +1 @@ +%0A
7a4df9d8c385ed53e29e5171c115939920a271b3
Add a setup.py script
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,462 @@ +# Use the setuptools package if it is available. It's preferred %0A# because it creates an exe file on Windows for Python scripts.%0Atry:%0A from setuptools import setup%0Aexcept ImportError:%0A from ez_setup import use_setuptools%0A use_setuptools()%0A from setuptools import setup%0A %0A%0Asetup(name='csv_util',%0A entry_points=%7B'console_scripts': %5B %0A # 'EXECUTABLE_NAME = csv_util.scripts.script_module_name:entry_function_name'%0A %5D%0A %7D)
1e7548a5b237f18c3bf5918a2254d04125492372
Add setup script
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,221 @@ +#!/usr/bin/env python%0A%0Afrom setuptools import setup, find_packages%0A%0Asetup(name='rapidtest',%0A version='0.1',%0A author='Simon Zhang',%0A license='MIT',%0A packages=find_packages(),%0A install_requires=%5B%5D)%0A
61fcca809b31372bb5e793359df243cff5ee23cf
Add the setup.py file
setup.py
setup.py
Python
0.000002
@@ -0,0 +1,588 @@ +# -*- coding: utf-8 -*-%0Afrom setuptools import setup%0A%0Asetup(%0A name='fedmsg_fasclient',%0A version='0.1',%0A description='A fedmsg consumer that runs the fasClient based on fedmsg FAS messages',%0A license=%22LGPLv2+%22,%0A author='Janez Nemani%C4%8D, Ralph Bean and Pierre-Yves Chibon',%0A author_email='[email protected]',%0A url='https://github.com/fedora-infra/fedmsg-fasclient',%0A install_requires=%5B%22fedmsg%22%5D,%0A packages=%5B%5D,%0A py_modules=%5B'fedmsg_fasclient'%5D,%0A entry_points=%22%22%22%0A %5Bmoksha.consumer%5D%0A fedmsg_fasclient = fedmsg_fasclient:FasClientConsumer%0A %22%22%22,%0A)%0A
139123ddb81eec12d0f932ff6ff73aadb4b418cc
Add decorator to make a Node class from a regular function
ocradmin/lib/nodetree/decorators.py
ocradmin/lib/nodetree/decorators.py
Python
0.000001
@@ -0,0 +1,1516 @@ +%22%22%22%0ANodetree decorators.%0A%22%22%22%0A%0Aimport inspect%0Aimport textwrap%0Aimport node%0A%0A%0Adef underscore_to_camelcase(value):%0A def camelcase(): %0A yield str.lower%0A while True:%0A yield str.capitalize%0A c = camelcase()%0A return %22%22.join(c.next()(x) if x else '_' for x in value.split(%22_%22))%0A%0Adef upper_camelcase(value):%0A value = underscore_to_camelcase(value)%0A return value%5B0%5D.capitalize() + value%5B1:%5D%0A%0A%0Aclass makenode(object):%0A %22%22%22Decorate for constructing a node out%0A of a single function.%22%22%22%0A def __init__(self, intypes, outtype, **kwargs):%0A self.intypes = intypes%0A self.outtype = outtype%0A self.kwargs = kwargs%0A%0A def __call__(self, fun):%0A argspec = inspect.getargspec(fun)%0A%0A def _eval(self):%0A args = %5Bself.eval_input(i) for i in range(len(argspec.args))%5D%0A return fun(*args)%0A%0A doc = fun.__doc__ if not fun.__doc__ is None %5C%0A else %22No description provided%22%0A clsname = upper_camelcase(fun.__name__)%0A ns = upper_camelcase(fun.__module__.split(%22.%22)%5B-1%5D)%0A clsdict = dict(%0A __module__ = fun.__module__,%0A __doc__ = doc,%0A _eval = _eval,%0A arity = len(self.intypes),%0A intypes = self.intypes,%0A outtype = self.outtype,%0A description = textwrap.dedent(fun.__doc__),%0A name = %22%25s::%25s%22 %25 (ns, clsname),%0A )%0A clsdict.update(self.kwargs)%0A return type(clsname + %22Node%22, (node.Node,), clsdict)()%0A%0A%0A
a34810b957ee5381db2ea62fbcfc103c5ad0cf0e
Check for image earlier so we can open in binary
grip/server.py
grip/server.py
import os import re import errno import requests import mimetypes from traceback import format_exc from flask import Flask, current_app, safe_join, abort, url_for, send_from_directory from .renderer import render_page, render_image default_filenames = ['README.md', 'README.markdown'] def serve(path=None, host=None, port=None, gfm=False, context=None, render_offline=False, username=None, password=None): """Starts a server to render the specified file or directory containing a README.""" if not path or os.path.isdir(path): path = _find_file(path) if not os.path.exists(path): raise ValueError('File not found: ' + path) directory = os.path.dirname(path) # Flask application app = Flask(__name__) app.config.from_pyfile('settings.py') app.config.from_pyfile('settings_local.py', silent=True) # Setup style cache style_urls = app.config['STYLE_URLS'] if app.config['STYLE_CACHE_DIRECTORY']: style_cache_path = os.path.join(app.instance_path, app.config['STYLE_CACHE_DIRECTORY']) if not os.path.exists(style_cache_path): os.makedirs(style_cache_path) else: style_cache_path = None # Get styles from style source @app.before_first_request def retrieve_styles(): """Retrieves the style URLs from the source and caches them, if requested.""" if not app.config['STYLE_URLS_SOURCE'] or not app.config['STYLE_URLS_RE']: return # Fetch style URLs style_urls.extend( _get_style_urls(app.config['STYLE_URLS_SOURCE'], app.config['STYLE_URLS_RE'], style_cache_path)) # Set overridden config values if host is not None: app.config['HOST'] = host if port is not None: app.config['PORT'] = port # Views @app.route('/') @app.route('/<path:filename>') def render(filename=None): if filename is not None: filename = safe_join(directory, filename) if os.path.isdir(filename): try: filename = _find_file(filename) except ValueError: abort(404) try: text = _read_file(filename) except IOError as ex: if ex.errno != errno.ENOENT: raise return abort(404) # if we think this file is an image, serve it as such mimetype, _ = mimetypes.guess_type(filename) if mimetype.startswith("image/"): return render_image(text, mimetype) filename_display = _display_filename(filename) else: text = _read_file(path) filename_display = _display_filename(path) return render_page(text, filename_display, gfm, context, render_offline, username, password, style_urls) @app.route('/cache/<path:filename>') def render_cache(filename=None): return send_from_directory(style_cache_path, filename) # Run local server app.run(app.config['HOST'], app.config['PORT'], debug=app.debug, use_reloader=app.config['DEBUG_GRIP']) def _get_style_urls(source_url, pattern, style_cache_path): """Gets the specified resource and parses all style URLs in the form of the specified pattern.""" try: # TODO: Add option to clear the cached styles # Skip fetching styles if there's any already cached if style_cache_path: cached = _get_cached_style_urls(style_cache_path) if cached: return cached # Find style URLs r = requests.get(source_url) if not 200 <= r.status_code < 300: print ' * Warning: retrieving styles gave status code', r.status_code urls = re.findall(pattern, r.text) # Cache the styles if style_cache_path: _cache_contents(urls, style_cache_path) urls = _get_cached_style_urls(style_cache_path) return urls except Exception as ex: if current_app.config['DEBUG_GRIP']: print format_exc() else: print ' * Error: could not retrieve styles:', str(ex) return [] def _get_cached_style_urls(style_cache_path): """Gets the URLs of the cached styles.""" cached_styles = os.listdir(style_cache_path) return [url_for('render_cache', filename=style) for style in cached_styles] def _find_file(path): """Gets the full path and extension of the specified.""" if path is None: path = '.' for filename in default_filenames: full_path = os.path.join(path, filename) if os.path.exists(full_path): return full_path raise ValueError('No README found at ' + path) def _read_file(filename): """Reads the contents of the specified file.""" with open(filename) as f: return f.read() def _write_file(filename, contents): """Creates the specified file and writes the given contents to it.""" with open(filename, 'w') as f: f.write(contents.encode('utf-8')) def _cache_contents(urls, style_cache_path): """Fetches the given URLs and caches their contents in the given directory.""" for url in urls: basename = url.rsplit('/', 1)[-1] filename = os.path.join(style_cache_path, basename) contents = requests.get(url).text _write_file(filename, contents) print ' * Downloaded', url def _display_filename(filename): """Normalizes the specified filename for display purposes.""" cwd = '.' + os.path.sep return filename[len(cwd):] if filename.startswith(cwd) else filename
Python
0
@@ -2173,16 +2173,246 @@ rt(404)%0A +%0A # if we think this file is an image, we need to read it in%0A # binary mode and serve it as such%0A mimetype, _ = mimetypes.guess_type(filename)%0A is_image = mimetype.startswith(%22image/%22)%0A%0A @@ -2454,32 +2454,42 @@ ad_file(filename +, is_image )%0A ex @@ -2629,164 +2629,20 @@ - # if -we think this file is an image, serve it as such%0A mimetype, _ = mimetypes.guess_type(filename)%0A if mimetype.startswith(%22 +is_ image -/%22) :%0A @@ -4904,32 +4904,54 @@ ad_file(filename +, read_as_binary=False ):%0A %22%22%22Reads @@ -4985,24 +4985,67 @@ ed file.%22%22%22%0A + mode = %22rb%22 if read_as_binary else %22r%22%0A with ope @@ -5054,16 +5054,22 @@ filename +, mode ) as f:%0A
fe7f07cbd9ff9844efa2b191a900f6efb9de576e
add db model file
model/db.py
model/db.py
Python
0
@@ -0,0 +1,29 @@ +# db model - all db handlers%0A
8ec524a7a64c55f0759e18ea4b70c63c9c83f99a
Add admin for the various models
pombola/interests_register/admin.py
pombola/interests_register/admin.py
Python
0
@@ -0,0 +1,949 @@ +from django.contrib import admin%0A%0Afrom . import models%0A%0A%0Aclass CategoryAdmin(admin.ModelAdmin):%0A prepopulated_fields = %7B%22slug%22: %5B%22name%22%5D%7D%0A list_display = %5B'slug', 'name', 'sort_order'%5D%0A search_fields = %5B'name'%5D%0A%0A%0Aclass ReleaseAdmin(admin.ModelAdmin):%0A prepopulated_fields = %7B%22slug%22: %5B%22name%22%5D%7D%0A list_display = %5B'slug', 'name', 'date'%5D%0A search_fields = %5B'name'%5D%0A date_hierarchy = 'date'%0A%0A%0Aclass LineItemInlineAdmin(admin.TabularInline):%0A model = models.EntryLineItem%0A # extra = 2%0A fields = %5B 'key', 'value' %5D%0A%0A%0Aclass EntryAdmin(admin.ModelAdmin):%0A inlines = %5BLineItemInlineAdmin%5D%0A list_display = %5B'id', 'person', 'category', 'release', 'sort_order'%5D%0A list_filter = %5B 'release', 'category' %5D%0A search_fields = %5B'person__legal_name'%5D%0A%0A%0A# Add these to the admin%0Aadmin.site.register( models.Category, CategoryAdmin)%0Aadmin.site.register( models.Release, ReleaseAdmin)%0Aadmin.site.register( models.Entry, EntryAdmin)%0A
a4f49b988a10afc160c217d32da46ea854059e8c
Add migration file
ureport/polls/migrations/0060_populate_category_displayed.py
ureport/polls/migrations/0060_populate_category_displayed.py
Python
0.000001
@@ -0,0 +1,800 @@ +# Generated by Django 2.2.10 on 2020-05-05 15:01%0A%0Afrom django.db import migrations%0A%0A%0Adef noop(apps, schema_editor): # pragma: no cover%0A pass%0A%0A%0Adef populate_category_displayed(apps, schema_editor): # pragma: no cover%0A PollResponseCategory = apps.get_model(%22polls%22, %22PollResponseCategory%22)%0A%0A updated = 0%0A%0A for obj in PollResponseCategory.objects.all().exclude(category=None):%0A PollResponseCategory.objects.filter(id=obj.id).update(category_displayed=obj.category)%0A updated += 1%0A%0A if updated %3E 0:%0A print(f%22populated %7Bupdated%7D poll response categories%22)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%22polls%22, %220059_pollresponsecategory_category_displayed%22),%0A %5D%0A%0A operations = %5Bmigrations.RunPython(populate_category_displayed, noop)%5D%0A
b7cd3081585c0a4695db4f85b7db8e346a525e23
add to pypi
setup.py
setup.py
Python
0
@@ -0,0 +1,398 @@ +from setuptools import setup, find_packages%0A%0Asetup(%0A name=%22libraw.py%22,%0A version=%221.0%22,%0A description=%22python bindings using ctypes for libraw%22,%0A url=%22https://github.com/paroj/libraw.py%22,%0A author=%22Pavel Rojtberg%22,%0A license=%22LGPLv2%22,%0A classifiers=%5B%0A 'Programming Language :: Python :: 2.7',%0A 'Programming Language :: Python :: 3',%0A %5D,%0A py_modules=%5B%22libraw%22%5D%0A)%0A
88cb2155d55100d9b00dca1ecf4f9a01dec7c3f5
Add missing 'import os' for integrationtest/vm/basic/suite_setup.py
integrationtest/vm/basic/suite_setup.py
integrationtest/vm/basic/suite_setup.py
''' @author: Frank ''' import zstackwoodpecker.setup_actions as setup_actions import zstackwoodpecker.operations.deploy_operations as deploy_operations import zstackwoodpecker.operations.config_operations as config_operations import zstackwoodpecker.test_lib as test_lib import zstackwoodpecker.test_util as test_util USER_PATH = os.path.expanduser('~') EXTRA_SUITE_SETUP_SCRIPT = '%s/.zstackwoodpecker/extra_suite_setup_config.sh' % USER_PATH def test(): setup = setup_actions.SetupAction() setup.plan = test_lib.all_config setup.run() if os.path.exists(EXTRA_SUITE_SETUP_SCRIPT): os.system("bash %s" % EXTRA_SUITE_SETUP_SCRIPT) deploy_operations.deploy_initial_database(test_lib.deploy_config) test_util.test_pass('Suite Setup Success')
Python
0.000011
@@ -23,16 +23,27 @@ %0A'''%0D%0A%0D%0A +import os%0D%0A import z
e3cbc79cc60e21978fe682b73413e9de19b71543
add a print hello world function
helloAlyssa.py
helloAlyssa.py
Python
0.999999
@@ -0,0 +1,55 @@ +#This is my hello world program%0Aprint ('Hello World')%0A%0A
9339307b6bd42ad014e528d337fc9f195c632245
Add tick class
zaifbot/exchange/tick.py
zaifbot/exchange/tick.py
Python
0.000001
@@ -0,0 +1,404 @@ +class Tick:%0A def __init__(self, currency_pair):%0A self.size = currency_pair.info%5B'aux_unit_step'%5D%0A self._decimal_digits = currency_pair.info%5B'aux_unit_point'%5D%0A%0A def truncate_price(self, price):%0A remainder = price %25 self.size%0A truncated_price = price - remainder%0A if self._decimal_digits == 0:%0A return int(truncated_price)%0A return truncated_price%0A
d9d84083a488ad1b4643298d7a75b54b4e0e34be
add OptionChainConsistencyRegressionAlgorithm
Algorithm.Python/OptionChainConsistencyRegressionAlgorithm.py
Algorithm.Python/OptionChainConsistencyRegressionAlgorithm.py
using System; namespace QuantConnect.Algorithm.Python { public class OptionChainConsistencyRegressionAlgorithm { public OptionChainConsistencyRegressionAlgorithm() { } } }
Python
0.000001
@@ -1,182 +1,3317 @@ %EF%BB%BF -using System;%0Anamespace QuantConnect.Algorithm.Python%0A%7B%0A%09public class OptionChainConsistencyRegressionAlgorithm%0A%09%7B%0A%09%09public OptionChainConsistencyRegressionAlgorithm()%0A%09%09%7B%0A%09%09%7D%0A%09%7D%0A%7D%0A +# QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.%0A# Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0Afrom clr import AddReference%0AAddReference(%22System%22)%0AAddReference(%22QuantConnect.Algorithm%22)%0AAddReference(%22QuantConnect.Indicators%22)%0AAddReference(%22QuantConnect.Common%22)%0A%0Afrom System import *%0Afrom QuantConnect import *%0Afrom QuantConnect.Algorithm import *%0Afrom QuantConnect.Indicators import *%0Afrom datetime import datetime, timedelta%0A%0A### %3Csummary%3E%0A### This regression algorithm checks if all the option chain data coming to the algo is consistent with current securities manager state%0A### %3C/summary%3E%0A### %3Cmeta name=%22tag%22 content=%22regression test%22 /%3E%0A### %3Cmeta name=%22tag%22 content=%22options%22 /%3E%0A### %3Cmeta name=%22tag%22 content=%22using data%22 /%3E%0A### %3Cmeta name=%22tag%22 content=%22filter selection%22 /%3E%0Aclass OptionChainConsistencyRegressionAlgorithm(QCAlgorithm):%0A%0A def Initialize(self):%0A %0A self.SetCash(10000)%0A self.SetStartDate(2015,12,24)%0A self.SetEndDate(2015,12,24)%0A equity = self.AddEquity(%22GOOG%22)%0A option = self.AddOption(%22GOOG%22)%0A %0A # set our strike/expiry filter for this option chain%0A option.SetFilter(self.UniverseFunc)%0A %0A self.SetBenchmark(equity.Symbol)%0A self.OptionSymbol = option.Symbol%0A equity.SetDataNormalizationMode(DataNormalizationMode.Raw)%0A%0A def OnData(self, slice):%0A if self.Portfolio.Invested: return%0A for kvp in slice.OptionChains:%0A chain = kvp.Value%0A for o in chain:%0A if not self.Securities.ContainsKey(o.Symbol):%0A # inconsistency found: option chains contains contract information that is not available in securities manager and not available for trading%0A self.Log(%22inconsistency found: option chains contains contract %7B0%7D that is not available in securities manager and not available for trading%22.format(o.Symbol.Value)) %0A %0A contracts = filter(lambda x: x.Expiry.date() == self.Time.date() and%0A x.Strike %3C chain.Underlying.Price and%0A x.Right == OptionRight.Call, chain)%0A %0A sorted_contracts = sorted(contracts, key = lambda x: x.Strike, reverse = True)%0A%0A if len(sorted_contracts) %3E 2:%0A self.MarketOrder(sorted_contracts%5B2%5D.Symbol, 1)%0A self.MarketOnCloseOrder(sorted_contracts%5B2%5D.Symbol, -1)%0A%0A%0A # set our strike/expiry filter for this option chain%0A def UniverseFunc(self, universe):%0A return universe.IncludeWeeklys().Strikes(-2, 2).Expiration(timedelta(0), timedelta(10))%0A%0A%0A def OnOrderEvent(self, orderEvent):%0A self.Log(str(orderEvent))
aafb77596ae0cb6c27b2564434367d2b4d5debd1
Add tests
Orange/widgets/visualize/tests/test_owscatterplot.py
Orange/widgets/visualize/tests/test_owscatterplot.py
Python
0
@@ -0,0 +1,1126 @@ +import numpy as np%0A%0Afrom Orange.data import Table%0Afrom Orange.widgets.tests.base import WidgetTest%0Afrom Orange.widgets.visualize.owscatterplot import OWScatterPlot%0A%0A%0Aclass TestOWScatterPlot(WidgetTest):%0A def setUp(self):%0A self.widget = self.create_widget(OWScatterPlot)%0A self.data = Table(%22iris%22)%0A%0A def test_set_data(self):%0A self.widget.set_data(self.data)%0A self.assertEqual(self.widget.data, self.data)%0A self.assertEqual(self.widget.subset_data, None)%0A%0A def test_subset_data(self):%0A self.widget.set_subset_data(self.data%5B:30%5D)%0A self.assertEqual(len(self.widget.subset_data), 30)%0A self.assertEqual(self.widget.data, None)%0A np.testing.assert_array_equal(self.widget.subset_data, self.data%5B:30%5D)%0A%0A def test_set_data_none(self):%0A self.widget.set_data(None)%0A self.assertEqual(self.widget.data, None)%0A self.assertEqual(self.widget.subset_data, None)%0A%0A def test_subset_data_none(self):%0A self.widget.set_subset_data(None)%0A self.assertEqual(self.widget.subset_data, None)%0A self.assertEqual(self.widget.data, None)%0A
a1049edc842e54784b5ac93427ab1c1dace930f7
Remove delay from end of volTM2DRotateClip.py.
Rendering/Volume/Testing/Python/volTM2DRotateClip.py
Rendering/Volume/Testing/Python/volTM2DRotateClip.py
#!/usr/bin/env python import time import vtk from vtk.test import Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() # Simple volume rendering example. reader = vtk.vtkImageReader() reader.SetDataByteOrderToLittleEndian() reader.SetDataExtent(0,63,0,63,1,93) reader.SetFilePrefix("" + str(VTK_DATA_ROOT) + "/Data/headsq/quarter") reader.SetDataMask(0x7fff) reader.SetDataSpacing(2,2,1) reader.SetDataScalarTypeToUnsignedShort() reader.Update() readerOutput = reader.GetOutput() readerOutput.SetOrigin(-63,-63,-46) # Disconnect the output from its reader. First get an extra reference. readerOutput.Register(None) # Create transfer functions for opacity and color opacityTransferFunction = vtk.vtkPiecewiseFunction() opacityTransferFunction.AddPoint(600,0.0) opacityTransferFunction.AddPoint(2000,1.0) colorTransferFunction = vtk.vtkColorTransferFunction() colorTransferFunction.ClampingOff() colorTransferFunction.AddHSVPoint(0.0,0.01,1.0,1.0) colorTransferFunction.AddHSVPoint(1000.0,0.50,1.0,1.0) colorTransferFunction.AddHSVPoint(2000.0,0.99,1.0,1.0) colorTransferFunction.SetColorSpaceToHSV() # Create properties, mappers, volume actors, and ray cast function volumeProperty = vtk.vtkVolumeProperty() volumeProperty.SetColor(colorTransferFunction) volumeProperty.SetScalarOpacity(opacityTransferFunction) volumeMapper = vtk.vtkVolumeTextureMapper2D() volumeMapper.SetInputData(readerOutput) volumeMapper.SetMaximumStorageSize(10000000) # The data object is now referenced by the connection. readerOutput.UnRegister(None) # not needed in python volume = vtk.vtkVolume() volume.SetMapper(volumeMapper) volume.SetProperty(volumeProperty) # Create geometric sphere sphereSource = vtk.vtkSphereSource() sphereSource.SetRadius(65) sphereSource.SetThetaResolution(20) sphereSource.SetPhiResolution(40) def colorCells (__vtk__temp0=0,__vtk__temp1=0): randomColorGenerator = vtk.vtkMath() input = randomColors.GetInput() output = randomColors.GetOutput() numCells = input.GetNumberOfCells() colors = vtk.vtkFloatArray() colors.SetNumberOfTuples(numCells) i = 0 while i < numCells: colors.SetValue(i,randomColorGenerator.Random(0,1)) i = i + 1 output.GetCellData().CopyScalarsOff() output.GetCellData().PassData(input.GetCellData()) output.GetCellData().SetScalars(colors) del colors #reference counting - it's ok del randomColorGenerator # Compute random scalars (colors) for each cell randomColors = vtk.vtkProgrammableAttributeDataFilter() randomColors.SetInputConnection(sphereSource.GetOutputPort()) randomColors.SetExecuteMethod(colorCells) # This does not need a hierarchical mapper, but hierarchical # mapper could use a test that has clipping so we use it here sphereMapper = vtk.vtkHierarchicalPolyDataMapper() sphereMapper.SetInputConnection(randomColors.GetOutputPort(0)) sphereActor = vtk.vtkActor() sphereActor.SetMapper(sphereMapper) # Set up the planes plane1 = vtk.vtkPlane() plane1.SetOrigin(0,0,-10) plane1.SetNormal(0,0,1) plane2 = vtk.vtkPlane() plane2.SetOrigin(0,0,10) plane2.SetNormal(0,0,-1) plane3 = vtk.vtkPlane() plane3.SetOrigin(-10,0,0) plane3.SetNormal(1,0,0) plane4 = vtk.vtkPlane() plane4.SetOrigin(10,0,0) plane4.SetNormal(-1,0,0) sphereMapper.AddClippingPlane(plane1) sphereMapper.AddClippingPlane(plane2) volumeMapper.AddClippingPlane(plane3) volumeMapper.AddClippingPlane(plane4) # Okay now the graphics stuff ren1 = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren1) renWin.SetSize(256,256) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) ren1.GetCullers().InitTraversal() culler = ren1.GetCullers().GetNextItem() culler.SetSortingStyleToBackToFront() ren1.AddViewProp(sphereActor) ren1.AddViewProp(volume) ren1.SetBackground(0.1,0.2,0.4) renWin.Render() ren1.GetActiveCamera().Azimuth(45) ren1.GetActiveCamera().Elevation(15) ren1.GetActiveCamera().Roll(45) ren1.GetActiveCamera().Zoom(2.0) iren.Initialize() i = 0 while i < 5: volume.RotateY(17) volume.RotateZ(13) sphereActor.RotateX(13) sphereActor.RotateY(17) renWin.Render() i = i + 1 # force a wait for hardware to finish rendering time.sleep(1) # --- end of script --
Python
0.000001
@@ -19,20 +19,8 @@ hon%0A -import time%0A impo @@ -4133,71 +4133,8 @@ 1%0A%0A -# force a wait for hardware to finish rendering%0Atime.sleep(1)%0A%0A # --
47ad7f4d3b69315e25ae96099fe73b4d9cd7666e
Use file extension to select config file parser
dotbot/config.py
dotbot/config.py
import yaml import json from .util import string class ConfigReader(object): def __init__(self, config_file_path): self._config = self._read(config_file_path) def _read(self, config_file_path): try: with open(config_file_path) as fin: try: data = yaml.safe_load(fin) except Exception as e: # try falling back to JSON, but return original exception # if that fails too try: fin.seek(0) data = json.load(fin) except Exception: raise e return data except Exception as e: msg = string.indent_lines(str(e)) raise ReadingError('Could not read config file:\n%s' % msg) def get_config(self): return self._config class ReadingError(Exception): pass
Python
0
@@ -17,16 +17,31 @@ rt json%0A +import os.path%0A from .ut @@ -249,17 +249,33 @@ -with open +_, ext = os.path.splitext (con @@ -292,16 +292,8 @@ ath) - as fin: %0A @@ -305,101 +305,42 @@ - try:%0A data = yaml.safe_load(fin)%0A except Exception +with open(config_file_path) as -e +fin :%0A @@ -357,175 +357,53 @@ - # try falling back to JSON, but return original exception%0A # if that fails too%0A try:%0A fin.seek(0)%0A +print ext%0A if ext == '.json':%0A @@ -456,36 +456,20 @@ - except Exception +else :%0A @@ -486,19 +486,34 @@ - raise e +data = yaml.safe_load(fin) %0A
935f1d257dc4126a3d4f8b2f76dda6890ce68f3b
Allow cycles of install/uninstall of Twisted Reactor
kivy/support.py
kivy/support.py
''' Support ======= Activate other framework/toolkit inside our event loop ''' __all__ = ('install_gobject_iteration', 'install_twisted_reactor', 'install_android') from kivy.compat import PY2 def install_gobject_iteration(): '''Import and install gobject context iteration inside our event loop. This is used as soon as gobject is used (like gstreamer) ''' from kivy.clock import Clock if PY2: import gobject else: from gi.repository import GObject as gobject if hasattr(gobject, '_gobject_already_installed'): # already installed, don't do it twice. return gobject._gobject_already_installed = True # get gobject mainloop / context loop = gobject.MainLoop() gobject.threads_init() context = loop.get_context() # schedule the iteration each frame def _gobject_iteration(*largs): # XXX we need to loop over context here, otherwise, we might have a lag. loop = 0 while context.pending() and loop < 10: context.iteration(False) loop += 1 Clock.schedule_interval(_gobject_iteration, 0) # ----------------------------------------------------------------------------- # Android support # ----------------------------------------------------------------------------- g_android_redraw_count = 0 def _android_ask_redraw(*largs): # after wakeup, we need to redraw more than once, otherwise we get a # black screen global g_android_redraw_count from kivy.core.window import Window Window.canvas.ask_update() g_android_redraw_count -= 1 if g_android_redraw_count < 0: return False def install_android(): '''Install hooks for android platform. * Automaticly sleep when the device is paused * Auto kill the application is the return key is hitted ''' try: import android except ImportError: print('Android lib is missing, cannot install android hooks') return from kivy.clock import Clock from kivy.logger import Logger import pygame Logger.info('Support: Android install hooks') # Init the library android.init() android.map_key(android.KEYCODE_MENU, pygame.K_MENU) android.map_key(android.KEYCODE_BACK, pygame.K_ESCAPE) # Check if android must be paused or not # If pause is asked, just leave the app. def android_check_pause(*largs): # do nothing until android ask for it. if not android.check_pause(): return from kivy.app import App from kivy.base import stopTouchApp from kivy.logger import Logger from kivy.core.window import Window global g_android_redraw_count # try to get the current running application Logger.info('Android: Must to in sleep mode, check the app') app = App.get_running_app() # no running application, stop our loop. if app is None: Logger.info('Android: No app running, stop everything.') stopTouchApp() return # try to go to pause mode if app.dispatch('on_pause'): Logger.info('Android: App paused, now wait for resume.') # app goes in pause mode, wait. android.wait_for_resume() # is it a stop or resume ? if android.check_stop(): # app must stop Logger.info('Android: Android want to close our app.') stopTouchApp() else: # app resuming now ! Logger.info('Android: Android resumed, resume the app') app.dispatch('on_resume') Window.canvas.ask_update() g_android_redraw_count = 25 # 5 frames/seconds during 5 seconds Clock.unschedule(_android_ask_redraw) Clock.schedule_interval(_android_ask_redraw, 1 / 5) Logger.info('Android: App resume completed.') # app don't support pause mode, just stop it. else: Logger.info('Android: App doesn\'t support pause mode, stop.') stopTouchApp() Clock.schedule_interval(android_check_pause, 0) _twisted_reactor_stopper = None _twisted_reactor_work = None def install_twisted_reactor(**kwargs): '''Installs a threaded twisted reactor, which will schedule one reactor iteration before the next frame only when twisted needs to do some work. any arguments or keyword arguments passed to this function will be passed on the the threadedselect reactors interleave function, these are the arguments one would usually pass to twisted's reactor.startRunning Unlike the default twisted reactor, the installed reactor will not handle any signals unnless you set the 'installSignalHandlers' keyword argument to 1 explicitly. This is done to allow kivy to handle teh signals as usual, unless you specifically want the twisted reactor to handle the signals (e.g. SIGINT).''' import twisted # prevent installing more than once if hasattr(twisted, '_kivy_twisted_reactor_installed'): return twisted._kivy_twisted_reactor_installed = True # dont let twisted handle signals, unless specifically requested kwargs.setdefault('installSignalHandlers', 0) # install threaded-select reactor, to use with own event loop from twisted.internet import _threadedselect _threadedselect.install() # now we can import twisted reactor as usual from twisted.internet import reactor from twisted.internet.error import ReactorNotRunning from collections import deque from kivy.base import EventLoop from kivy.logger import Logger from kivy.clock import Clock # will hold callbacks to twisted callbacks q = deque() # twisted will call the wake function when it needsto do work def reactor_wake(twisted_loop_next): Logger.trace("Support: twisted wakeup call to schedule task") q.append(twisted_loop_next) # called every frame, to process the reactors work in main thread def reactor_work(*args): Logger.trace("Support: processing twisted task queue") while len(q): q.popleft()() global _twisted_reactor_work _twisted_reactor_work = reactor_work # start the reactor, by telling twisted how to wake, and process def reactor_start(*args): Logger.info("Support: Starting twisted reactor") reactor.interleave(reactor_wake, **kwargs) Clock.schedule_interval(reactor_work, 0) # make sure twisted reactor is shutdown if eventloop exists def reactor_stop(*args): '''will shutdown the twisted reactor main loop ''' if reactor.threadpool: Logger.info("Support: Stooping twisted threads") reactor.threadpool.stop() Logger.info("Support: Shutting down twisted reactor") reactor._mainLoopShutdown() try: reactor.stop() except ReactorNotRunning: pass import sys sys.modules.pop('twisted.internet.reactor', None) global _twisted_reactor_stopper _twisted_reactor_stopper = reactor_stop # start and stop teh reactor along with kivy EventLoop Clock.schedule_once(reactor_start, 0) EventLoop.bind(on_stop=reactor_stop)
Python
0
@@ -7357,8 +7357,537 @@ _stop)%0A%0A +%0Adef uninstall_twisted_reactor(**kwargs):%0A '''Uninstalls a threaded twisted reactor. It blocks, and no iteration will%0A run after.%0A '''%0A import twisted%0A%0A # prevent uninstalling more than once%0A if not hasattr(twisted, '_kivy_twisted_reactor_installed'):%0A return%0A%0A from kivy.base import EventLoop%0A from kivy.logger import Logger%0A%0A global _twisted_reactor_stopper%0A _twisted_reactor_stopper()%0A EventLoop.unbind(on_stop=_twisted_reactor_stopper)%0A%0A del twisted._kivy_twisted_reactor_installed%0A
fec74a5401f925755484955a1b38dd3044824eb3
Create npy2ckpt.py
npy2ckpt.py
npy2ckpt.py
Python
0.000006
@@ -0,0 +1,2027 @@ +%22%22%22Conversion of the .npy weights into the .ckpt ones.%0A%0AThis script converts the weights of the DeepLab-ResNet model%0Afrom the numpy format into the TensorFlow one.%0A%22%22%22%0A%0Afrom __future__ import print_function%0A%0Aimport argparse%0Aimport os%0A%0Aimport tensorflow as tf%0Aimport numpy as np%0A%0Afrom deeplab_resnet import DeepLabResNetModel%0A%0ASAVE_DIR = './'%0A%0Adef get_arguments():%0A %22%22%22Parse all the arguments provided from the CLI.%0A %0A Returns:%0A A list of parsed arguments.%0A %22%22%22%0A parser = argparse.ArgumentParser(description=%22NPY to CKPT converter.%22)%0A parser.add_argument(%22npy_path%22, type=str,%0A help=%22Path to the .npy file, which contains the weights.%22)%0A parser.add_argument(%22--save_dir%22, type=str, default=SAVE_DIR,%0A help=%22Where to save the converted .ckpt file.%22)%0A return parser.parse_args()%0A%0Adef save(saver, sess, logdir):%0A model_name = 'model.ckpt'%0A checkpoint_path = os.path.join(logdir, model_name)%0A %0A if not os.path.exists(logdir):%0A os.makedirs(logdir)%0A%0A saver.save(sess, checkpoint_path, write_meta_graph=False)%0A print('The weights have been converted to %7B%7D.'.format(checkpoint_path))%0A%0A%0Adef main():%0A %22%22%22Create the model and start the training.%22%22%22%0A args = get_arguments()%0A %0A # Default image.%0A image_batch = tf.constant(0, tf.float32, shape=%5B1, 321, 321, 3%5D) %0A # Create network.%0A net = DeepLabResNetModel(%7B'data': image_batch%7D)%0A var_list = tf.trainable_variables()%0A %0A # Set up tf session and initialize variables. %0A config = tf.ConfigProto()%0A config.gpu_options.allow_growth = True%0A %0A with tf.Session(config=config) as sess:%0A init = tf.initialize_all_variables()%0A sess.run(init)%0A %0A # Loading .npy weights.%0A net.load(args.npy_path, sess)%0A %0A # Saver for converting the loaded weights into .ckpt.%0A saver = tf.train.Saver(var_list=var_list)%0A save(saver, sess, args.save_dir)%0A%0Aif __name__ == '__main__':%0A main()%0A
8b5bf433b304895f04813c64d556316c48c046fe
add setup.py for distribute
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,1327 @@ +#!/usr/bin/env python%0D%0Aimport os, os.path%0D%0Afrom distutils.core import setup, Extension%0D%0Aimport distutils.msvccompiler%0D%0A%0D%0Asource_files = %5B%22Engine.cpp%22, %22Wrapper.cpp%22, %22PyV8.cpp%22%5D%0D%0A%0D%0Amacros = %5B(%22BOOST_PYTHON_STATIC_LIB%22, None)%5D%0D%0Athird_party_libraries = %5B%22python%22, %22boost%22, %22v8%22%5D%0D%0A%0D%0Ainclude_dirs = os.environ%5B%22INCLUDE%22%5D.split(';') + %5Bos.path.join(%22lib%22, lib, %22inc%22) for lib in third_party_libraries%5D%0D%0Alibrary_dirs = os.environ%5B%22LIB%22%5D.split(';') + %5Bos.path.join(%22lib%22, lib, %22lib%22) for lib in third_party_libraries%5D%0D%0Alibraries = %5B%22winmm%22%5D%0D%0A%0D%0Apyv8 = Extension(name = %22_PyV8%22, %0D%0A sources = %5Bos.path.join(%22src%22, file) for file in source_files%5D,%0D%0A define_macros = macros,%0D%0A include_dirs = include_dirs,%0D%0A library_dirs = library_dirs,%0D%0A libraries = libraries,%0D%0A extra_compile_args = %5B%22/O2%22, %22/GL%22, %22/MT%22, %22/EHsc%22, %22/Gy%22, %22/Zi%22%5D,%0D%0A extra_link_args = %5B%22/DLL%22, %22/OPT:REF%22, %22/OPT:ICF%22, %22/MACHINE:X86%22%5D,%0D%0A )%0D%0A%0D%0Asetup(name='PyV8',%0D%0A version='0.1',%0D%0A description='Python Wrapper for Google V8 Engine',%0D%0A author='Flier Lu',%0D%0A author_email='[email protected]',%0D%0A url='http://code.google.com/p/pyv8/',%0D%0A license=%22Apache 2.0%22,%0D%0A py_modules=%5B'PyV8'%5D,%0D%0A ext_modules=%5Bpyv8%5D%0D%0A )
56915ed7d290fff6e37859181781687590a2e974
Remove early_stopping.py from estimator/contrib in favor of estimator/python/estimator/early_stopping.py. And the test.
tensorflow/contrib/estimator/python/estimator/early_stopping.py
tensorflow/contrib/estimator/python/estimator/early_stopping.py
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """early_stopping python module. Importing from tensorflow.python.estimator is unsupported and will soon break! """ # pylint: disable=unused-import,g-bad-import-order,g-import-not-at-top,wildcard-import from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow_estimator.contrib.estimator.python.estimator import early_stopping # Include attrs that start with single underscore. _HAS_DYNAMIC_ATTRIBUTES = True early_stopping.__all__ = [ s for s in dir(early_stopping) if not s.startswith('__') ] from tensorflow_estimator.contrib.estimator.python.estimator.early_stopping import *
Python
0.000131
@@ -1023,34 +1023,16 @@ timator. -contrib.estimator. python.e @@ -1266,26 +1266,8 @@ tor. -contrib.estimator. pyth
1ee1d0daab4b8e123bc04996019fb12cc65b8888
Add tISM SDB module (#36957)
salt/sdb/tism.py
salt/sdb/tism.py
Python
0
@@ -0,0 +1,2158 @@ +# -*- coding: utf-8 -*-%0A'''%0AtISM - the Immutalbe Secrets Manager SDB Module%0A%0A:maintainer: tISM%0A:maturity: New%0A:platform: all%0A%0A.. versionadded:: TBD%0A%0AThis module will decrypt PGP encrypted secrets against a tISM server.%0A%0A.. code::%0A%0A sdb://%3Cprofile%3E/%3Cencrypted secret%3E%0A%0A sdb://tism/hQEMAzJ+GfdAB3KqAQf9E3cyvrPEWR1sf1tMvH0nrJ0bZa9kDFLPxvtwAOqlRiNp0F7IpiiVRF+h+sW5Mb4ffB1TElMzQ+/G5ptd6CjmgBfBsuGeajWmvLEi4lC6/9v1rYGjjLeOCCcN4Dl5AHlxUUaSrxB8akTDvSAnPvGhtRTZqDlltl5UEHsyYXM8RaeCrBw5Or1yvC9Ctx2saVp3xmALQvyhzkUv5pTb1mH0I9Z7E0ian07ZUOD+pVacDAf1oQcPpqkeNVTQQ15EP0fDuvnW+a0vxeLhkbFLfnwqhqEsvFxVFLHVLcs2ffE5cceeOMtVo7DS9fCtkdZr5hR7a+86n4hdKfwDMFXiBwSIPMkmY980N/H30L/r50+CBkuI/u4M2pXDcMYsvvt4ajCbJn91qaQ7BDI=%0A%0AA profile must be setup in the minion configuration or pillar. If you want to use sdb in a runner or pillar you must also place a profile in the master configuration.%0A%0A.. code-block:: yaml%0A%0A tism:%0A driver: tism%0A url: https://my.tismd:8080/decrypt%0A token: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJhZG1pbiI6MSwiZXhwIjoxNTg1MTExNDYwLCJqdGkiOiI3NnA5cWNiMWdtdmw4Iiwia2V5cyI6WyJBTEwiXX0.RtAhG6Uorf5xnSf4Ya_GwJnoHkCsql4r1_hiOeDSLzo%0A'''%0A%0Aimport logging%0Aimport json%0A%0Aimport salt.utils.http as http%0Afrom salt.exceptions import SaltConfigurationError%0A%0Alog = logging.getLogger(__name__)%0A%0A__virtualname__ = %22tism%22%0A%0A%0Adef __virtual__():%0A '''%0A This module has no other system dependencies%0A '''%0A return __virtualname__%0A%0A%0Adef get(key, service=None, profile=None): # pylint: disable=W0613%0A '''%0A Get a decrypted secret from the tISMd API%0A '''%0A%0A if not profile.get('url') or not profile.get('token'):%0A raise SaltConfigurationError(%22url and/or token missing from the tism sdb profile%22)%0A%0A request = %7B%22token%22: profile%5B'token'%5D, %22encsecret%22: key%7D%0A%0A result = http.query(%0A profile%5B'url'%5D,%0A method='POST',%0A data=json.dumps(request),%0A )%0A%0A decrypted = result.get('body')%0A%0A if not decrypted:%0A log.warning('tism.get sdb decryption request failed with error %7B0%7D'.format(result.get('error', 'unknown')))%0A return %22ERROR%22+str(result.get('status', 'unknown'))%0A%0A return decrypted%0A
8dad8cf8c83eba037b29d3243b29b985dc4004a1
add setup.py
setup.py
setup.py
Python
0
@@ -0,0 +1,145 @@ +#!/usr/bin/python%0A%0Afrom distutils.core import setup%0A%0Asetup(%0A name='telepathy-python',%0A version='0.0.1',%0A packages=%5B'telepathy'%5D,%0A )%0A%0A
b5c2986ccf3c70b9cb52d0374c53bc8232719554
Add dbm_metrics.py script where the AIS method will be stored
pylearn2/scripts/dbm/dbm_metrics.py
pylearn2/scripts/dbm/dbm_metrics.py
Python
0
@@ -0,0 +1,396 @@ +#!/usr/bin/env python%0Aimport argparse%0A%0Aif __name__ == '__main__':%0A # Argument parsing%0A parser = argparse.ArgumentParser()%0A parser.add_argument(%22metric%22, help=%22the desired metric%22,%0A choices=%5B%22ais%22%5D)%0A parser.add_argument(%22model_path%22, help=%22path to the pickled DBM model%22)%0A args = parser.parse_args()%0A%0A metric = args.metric%0A model_path = args.model_path%0A
a8b079b8be1e9559770dd0f701385b2361158e24
Add tests_require to setup.py
setup.py
setup.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2015 Radim Rehurek <[email protected]> # # This code is distributed under the terms and conditions # from the MIT License (MIT). import io import os import sys if sys.version_info < (2, 6): raise ImportError("smart_open requires python >= 2.6") # TODO add ez_setup? from setuptools import setup, find_packages def read(fname): return io.open(os.path.join(os.path.dirname(__file__), fname), encoding='utf-8').read() setup( name = 'smart_open', version = '1.3.4', description = 'Utils for streaming large files (S3, HDFS, gzip, bz2...)', long_description = read('README.rst'), packages=find_packages(), author = u'Radim Řehůřek', author_email = '[email protected]', maintainer = u'Radim Řehůřek', maintainer_email = '[email protected]', url = 'https://github.com/piskvorky/smart_open', download_url = 'http://pypi.python.org/pypi/smart_open', keywords = 'file streaming, s3, hdfs', license = 'MIT', platforms = 'any', install_requires=[ 'boto >= 2.32', 'bz2file', 'requests', ], test_suite="smart_open.tests", classifiers = [ # from http://pypi.python.org/pypi?%3Aaction=list_classifiers 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: System :: Distributed Computing', 'Topic :: Database :: Front-Ends', ], )
Python
0.000001
@@ -1151,16 +1151,98 @@ %5D,%0A%0A + tests_require=%5B%0A 'mock',%0A 'moto',%0A 'responses',%0A %5D,%0A%0A%0A test
c230fc69e2509c79190e53589457f161accd1626
Change long_description in setup.py.
setup.py
setup.py
import re import ast from setuptools import setup, find_packages _version_re = re.compile(r'__version__\s+=\s+(.*)') with open('mycli/__init__.py', 'rb') as f: version = str(ast.literal_eval(_version_re.search( f.read().decode('utf-8')).group(1))) description = 'CLI for MySQL Database. With auto-completion and syntax highlighting.' setup( name='mycli', author='Amjith Ramanujam', author_email='amjith[dot]r[at]gmail.com', version=version, license='LICENSE.txt', url='http://mycli.net', packages=find_packages(), package_data={'mycli': ['myclirc', '../AUTHORS', '../SPONSORS']}, description=description, long_description=open('README.md').read(), install_requires=[ 'click >= 4.1', 'Pygments >= 2.0', # Pygments has to be Capitalcased. WTF? 'prompt_toolkit==0.45', 'PyMySQL >= 0.6.6', 'sqlparse == 0.1.14', 'configobj >= 5.0.6', ], entry_points=''' [console_scripts] mycli=mycli.main:cli ''', classifiers=[ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: Unix', 'Programming Language :: Python', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: SQL', 'Topic :: Database', 'Topic :: Database :: Front-Ends', 'Topic :: Software Development', 'Topic :: Software Development :: Libraries :: Python Modules', ], )
Python
0
@@ -716,32 +716,19 @@ ion= -open('README.md').read() +description ,%0A
2a331f0165b2e3874243fcfecc3e3deab2760ff4
Add python setup filie
setup.py
setup.py
Python
0.000002
@@ -0,0 +1,297 @@ +from setuptools import setup%0A%0Asetup(name='bitevery',%0A version='0.0.1.b2',%0A description='BitEvery Python API',%0A url='https://www.bitevery.com',%0A author='BitEvery',%0A author_email='[email protected]',%0A license='MIT',%0A packages=%5B'bitevery'%5D,%0A zip_safe=False)
57bfd23957bdd535b5ae21ed1df3ff25dd75a8bd
Add setup.py
setup.py
setup.py
Python
0.000001
@@ -0,0 +1,373 @@ +from setuptools import setup%0A%0Asetup(%0A name='pirx',%0A version='0.1',%0A author='Piotr Wasilewski',%0A author_email='[email protected]',%0A description='Django settings builder',%0A license='MIT',%0A keywords='django settings build builder',%0A url='https://github.com/piotrekw/pirx',%0A scripts=%5B'scripts/pirx-build.py'%5D,%0A packages=%5B'pirx'%5D%0A )%0A%0A
876d02a03382863acaf1e8a5327475014734cc8b
add metadata proxy support for Quantum Networks
setup.py
setup.py
# Copyright 2011 OpenStack, LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import setuptools from quantum.openstack.common import setup from quantum.version import version_info as version requires = setup.parse_requirements() depend_links = setup.parse_dependency_links() Name = 'quantum' Url = "https://launchpad.net/quantum" Version = version.canonical_version_string(always=True) License = 'Apache License 2.0' Author = 'Netstack' AuthorEmail = '[email protected]' Maintainer = '' Summary = 'Quantum (virtual network service)' ShortDescription = Summary Description = Summary EagerResources = [ 'quantum', ] ProjectScripts = [ 'bin/quantum-rootwrap', ] config_path = 'etc/quantum/' init_path = 'etc/init.d' rootwrap_path = 'etc/quantum/rootwrap.d' ovs_plugin_config_path = 'etc/quantum/plugins/openvswitch' cisco_plugin_config_path = 'etc/quantum/plugins/cisco' linuxbridge_plugin_config_path = 'etc/quantum/plugins/linuxbridge' nvp_plugin_config_path = 'etc/quantum/plugins/nicira' ryu_plugin_config_path = 'etc/quantum/plugins/ryu' meta_plugin_config_path = 'etc/quantum/plugins/metaplugin' nec_plugin_config_path = 'etc/quantum/plugins/nec' DataFiles = [ (config_path, ['etc/quantum.conf', 'etc/rootwrap.conf', 'etc/api-paste.ini', 'etc/policy.json', 'etc/dhcp_agent.ini', 'etc/l3_agent.ini']), (rootwrap_path, ['etc/quantum/rootwrap.d/dhcp.filters', 'etc/quantum/rootwrap.d/iptables-firewall.filters', 'etc/quantum/rootwrap.d/l3.filters', 'etc/quantum/rootwrap.d/linuxbridge-plugin.filters', 'etc/quantum/rootwrap.d/nec-plugin.filters', 'etc/quantum/rootwrap.d/openvswitch-plugin.filters', 'etc/quantum/rootwrap.d/ryu-plugin.filters']), (init_path, ['etc/init.d/quantum-server']), (ovs_plugin_config_path, ['etc/quantum/plugins/openvswitch/ovs_quantum_plugin.ini']), (cisco_plugin_config_path, ['etc/quantum/plugins/cisco/credentials.ini', 'etc/quantum/plugins/cisco/l2network_plugin.ini', 'etc/quantum/plugins/cisco/nexus.ini', 'etc/quantum/plugins/cisco/ucs.ini', 'etc/quantum/plugins/cisco/cisco_plugins.ini', 'etc/quantum/plugins/cisco/db_conn.ini']), (linuxbridge_plugin_config_path, ['etc/quantum/plugins/linuxbridge/linuxbridge_conf.ini']), (nvp_plugin_config_path, ['etc/quantum/plugins/nicira/nvp.ini']), (ryu_plugin_config_path, ['etc/quantum/plugins/ryu/ryu.ini']), (meta_plugin_config_path, ['etc/quantum/plugins/metaplugin/metaplugin.ini']), (nec_plugin_config_path, ['etc/quantum/plugins/nec/nec.ini']), ] setuptools.setup( name=Name, version=Version, url=Url, author=Author, author_email=AuthorEmail, description=ShortDescription, long_description=Description, license=License, classifiers=[ 'Environment :: OpenStack', 'Intended Audience :: System Administrators', 'Intended Audience :: Information Technology', 'License :: OSI Approved :: Apache Software License', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', ], scripts=ProjectScripts, install_requires=requires, dependency_links=depend_links, include_package_data=False, setup_requires=['setuptools_git>=0.4'], packages=setuptools.find_packages('.'), cmdclass=setup.get_cmdclass(), data_files=DataFiles, eager_resources=EagerResources, entry_points={ 'console_scripts': [ 'quantum-dhcp-agent = quantum.agent.dhcp_agent:main', 'quantum-dhcp-agent-dnsmasq-lease-update =' 'quantum.agent.linux.dhcp:Dnsmasq.lease_update', 'quantum-netns-cleanup = quantum.agent.netns_cleanup_util:main', 'quantum-l3-agent = quantum.agent.l3_agent:main', 'quantum-linuxbridge-agent =' 'quantum.plugins.linuxbridge.agent.linuxbridge_quantum_agent:main', 'quantum-openvswitch-agent =' 'quantum.plugins.openvswitch.agent.ovs_quantum_agent:main', 'quantum-ryu-agent = ' 'quantum.plugins.ryu.agent.ryu_quantum_agent:main', 'quantum-nec-agent = ' 'quantum.plugins.nec.agent.nec_quantum_agent:main', 'quantum-server = quantum.server:main', 'quantum-debug = quantum.debug.shell:main', ] }, )
Python
0
@@ -4603,32 +4603,221 @@ um_agent:main',%0A + 'quantum-metadata-agent ='%0A 'quantum.agent.metadata.agent:main',%0A 'quantum-ns-metadata-proxy ='%0A 'quantum.agent.metadata.namespace_proxy:main',%0A 'qua
76a8834243cc70f3065b686dd09004f1dc3ffdb0
Create rapideye_remover_bordas_catalogo.py
rapideye_remover_bordas_catalogo.py
rapideye_remover_bordas_catalogo.py
Python
0.000038
@@ -0,0 +1,1124 @@ +from osgeo import ogr%0Aimport os%0Afrom osgeo import osr%0Afrom qgis.core import *%0A%0Ashapefile = %22C:/Users/pedro.mendes/Desktop/Brasil_00_2016.shp%22%0Adriver = ogr.GetDriverByName(%22ESRI Shapefile%22)%0AdataSource = driver.Open(shapefile, 0)%0Alayer = dataSource.GetLayer()%0Aproj=layer.GetSpatialRef()%0A%0A%0AoutputMergefn = %22C:/Users/pedro.mendes/Desktop/Brasil_01_2016.shp%22%0A%0A%0AdriverName = 'ESRI Shapefile'%0AgeometryType = ogr.wkbPolygon%0Aout_driver = ogr.GetDriverByName( driverName )%0Aif os.path.exists(outputMergefn):%0A out_driver.DeleteDataSource(outputMergefn)%0Aout_ds = out_driver.CreateDataSource(outputMergefn)%0Aout_layer = out_ds.CreateLayer(outputMergefn, geom_type=geometryType, srs=proj)%0A%0A%0A%0A%0AjuntaDefn=layer.GetLayerDefn()%0AjuntaFeat=ogr.Geometry(3)%0A%0Ac=0%0A%0Afor feature in layer:%0A geom = feature.GetGeometryRef()%0A geom2 = geom.Difference(juntaFeat)%0A juntaFeat= juntaFeat.Union(geom)%0A out_feat = ogr.Feature(out_layer.GetLayerDefn())%0A out_feat.SetGeometry(geom2)%0A out_layer.CreateFeature(out_feat)%0A out_layer.SyncToDisk()%0A c+=1%0A #break%0A%0A %0Alayer = None%0AdataSource=None%0A %0Aprint %22total de feicoes: %25i %22 %25( c)%0A
737dadd2e447c9f03de80ea808e137dcc1206c9b
Create Nvidia_GPU_Temperature.py
Nvidia_GPU_Temperature.py
Nvidia_GPU_Temperature.py
Python
0.000028
@@ -0,0 +1,1025 @@ +import time%0Afrom BlinkyTape import BlinkyTape%0Aimport subprocess%0Aimport os%0Aimport re%0A%0A#bb = BlinkyTape('/dev/tty.usbmodemfa131')%0Abb = BlinkyTape('COM8')%0A%0Awhile True:%0A%0A output = subprocess.check_output(%5B%22C:%5C%5CProgram Files%5C%5CNVIDIA Corporation%5C%5CNVSMI%5C%5Cnvidia-smi.exe%22, %22-a%22%5D, shell=True)%0A #os.popen('C:%5C%5CProgram Files%5CNVIDIA Corporation%5CNVSMI%5Cnvidia-smi.exe')%0A #output=os.popen(%22C:%5C%5CProgram Files%5C%5CNVIDIA Corporation%5C%5CNVSMI%5C%5Cnvidia-smi.exe%22).read()%0A %0A #print(%22====%22 + str(output) + %22=====%22)%0A temp = re.search(%22GPU Current.*%22,output).group()%5B30:33%5D%0A temp_baseline = 60%0A temp_multiplier = 5%0A color_temp = (int(temp) - temp_baseline ) * temp_multiplier%0A green = 100 - color_temp%0A red = 0 + color_temp%0A blue = 0%0A print %22Current GPU Temp: %25s RGB: %25s %25s %25s%22 %25 (temp, red, green, blue)%0A %0A for x in range(60):%0A bb.sendPixel(red, green, blue)%0A bb.show()%0A %0A #time.sleep(1)%0A %0A #for x in range(60):%0A # bb.sendPixel(100, 0, 0)%0A #bb.show()%0A%0A time.sleep(1)%0A%0A %0A
b39af3af2104875919577f769701e7bde73967fd
clean file initialized
genetic_music.py
genetic_music.py
Python
0.000003
@@ -0,0 +1,18 @@ +print('hola chio')
fbc780c7beb94d73b2a4ea110e733f8c87763741
Add location name lookup for ajax_select.
geoip/lookups.py
geoip/lookups.py
Python
0
@@ -0,0 +1,1532 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A##%0A## Author: Orcun Avsar %[email protected]%3E%0A##%0A## Copyright (C) 2011 S2S Network Consultoria e Tecnologia da Informacao LTDA%0A##%0A## This program is free software: you can redistribute it and/or modify%0A## it under the terms of the GNU Affero General Public License as%0A## published by the Free Software Foundation, either version 3 of the%0A## License, or (at your option) any later version.%0A##%0A## This program is distributed in the hope that it will be useful,%0A## but WITHOUT ANY WARRANTY; without even the implied warranty of%0A## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A## GNU Affero General Public License for more details.%0A##%0A## You should have received a copy of the GNU Affero General Public License%0A## along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A##%0A%22%22%22Module for ajax autocomplete lookups for locations.%0A%22%22%22%0A%0Afrom ajax_select import LookupChannel%0A%0Afrom geoip.models import Location%0Afrom geoip.models import LocationNamesAggregation%0A%0A%0Aclass LocationLookup(LookupChannel):%0A%0A model = Location%0A %0A def get_query(self,q,request):%0A words = q.replace(',',' ').replace('-', ' ').split()%0A query = Location.objects.all()%0A %0A queries = %5B%5D%0A for word in words:%0A query = Location.objects.filter(name__icontains=word)%5B:20%5D%0A queries.append(query)%0A %0A entities = %5B%5D%0A for query in queries:%0A for entity in query:%0A entities.append(entity)%0A %0A return entities%0A %0A def format_match(self,obj):%0A obj.name
af3ba846a8074132c64568c420ecb9b6ade9c6ea
Work on defining RegEx to find and format molecular geometries in Gaussian output files.
geomRegexTest.py
geomRegexTest.py
Python
0
@@ -0,0 +1,1327 @@ +__author__ = 'Thomas Heavey'%0A%0Aimport re%0A%0Afilename = %22testg.out%22%0A%0Adef findgeoms(filename):%0A %22%22%22A function that takes a file name and returns a list of%0A geometries.%22%22%22%0A relevantelem = %5B1,3,4,5%5D%0A xyzformat = '%7B:%3E2%7D %7B: f%7D %7B: f%7D %7B: f%7D'%0A geomregex = re.compile(%0A r'(?:Standard orientation)' # non-capturing (nc) start of geometry%0A r'(?:.+?)' # nc geometry header%0A r'((?:(?:%5Cs+%5Cd+%5Cs+)' # nc atom number%0A r'(%5Cd+%5Cs+)' # (capturing) atomic number%0A r'(?:%5Cd+%5Cs+)' # nc atomic type%0A r'(-?%5Cd+%5C.%5Cd+%5Cs*)%7B3,3%7D' # 3 cartesian coordinates (x,y,z)%0A r')+)' # repeat for at least one atom%0A r'(?:-)' # nc end at line of dashes%0A , re.DOTALL)%0A%0A with open(filename, 'r') as file:%0A geoms = geomregex.search(file.read())%0A print(geoms.group(1))%0A mlgeoms = geoms.group(1)%0A for line in mlgeoms.split('%5Cn'):%0A # Ignore blank lines:%0A if len(line) %3C 2:%0A continue%0A xyzelemstring = %5Bline.split()%5Bi%5D for i in relevantelem%5D%0A xyzelemnum = %5Bfloat(i) for i in xyzelemstring%5D%0A xyzelemnum%5B0%5D = int(xyzelemstring%5B0%5D)%0A print(xyzformat.format(*xyzelemnum))%0A%0Afindgeoms(filename)
df9a6ab91eedfe91343ceb103156fe08cd965614
test script form new Keras 2x API model config
app/backend-test/keras_2x_api/run01_print_keras_model_json.py
app/backend-test/keras_2x_api/run01_print_keras_model_json.py
Python
0
@@ -0,0 +1,96 @@ +#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A__author__ = 'ar'%0A%0Aif __name__ == '__main__':%0A pass
1498e786201c1c1e2127da7d23db142559ad68a8
Add support for Assembla
services/assembla.py
services/assembla.py
Python
0
@@ -0,0 +1,813 @@ +import foauth.providers%0A%0A%0Aclass Assembla(foauth.providers.OAuth2):%0A # General info about the provider%0A provider_url = 'https://www.assembla.com/'%0A docs_url = 'http://api-doc.assembla.com/content/api_reference.html'%0A category = 'Code'%0A%0A # URLs to interact with the API%0A authorize_url = 'https://api.assembla.com/authorization'%0A access_token_url = 'https://api.assembla.com/token'%0A api_domain = 'api.assembla.com'%0A%0A available_permissions = %5B%0A (None, 'read, write and manage your projects'),%0A %5D%0A%0A def __init__(self, *args, **kwargs):%0A super(Assembla, self).__init__(*args, **kwargs)%0A self.auth = (self.client_id, self.client_secret)%0A%0A def get_user_id(self, key):%0A r = self.api(key, self.api_domain, u'/v1/user')%0A return unicode(r.json()%5Bu'id'%5D)%0A
e67abde6228feaa231b2b3bfc97d6ca1f2cf8276
Use match argument in calls to pytest.raises when testing pin
tests/unit_tests/test_pin.py
tests/unit_tests/test_pin.py
""" Tests for constructing Pin universes """ import numpy import pytest import openmc from openmc.model import Pin @pytest.fixture def pin_mats(): fuel = openmc.Material(name="UO2") clad = openmc.Material(name="zirc") water = openmc.Material(name="water") return fuel, clad, water @pytest.fixture def good_radii(): return (0.4, 0.42) def test_failure(pin_mats, good_radii): """Check for various failure modes""" # Bad material type with pytest.raises(TypeError): Pin.from_radii(good_radii, [mat.name for mat in pin_mats]) # Incorrect lengths with pytest.raises(ValueError) as exec_info: Pin.from_radii(good_radii[: len(pin_mats) - 2], pin_mats) assert "length" in str(exec_info) # Non-positive radii rad = (-0.1,) + good_radii[1:] with pytest.raises(ValueError) as exec_info: Pin.from_radii(rad, pin_mats) assert "index 0" in str(exec_info) # Non-increasing radii rad = tuple(reversed(good_radii)) with pytest.raises(ValueError) as exec_info: Pin.from_radii(rad, pin_mats) assert "index 1" in str(exec_info) # Bad orientation with pytest.raises(ValueError) as exec_info: Pin.from_radii(good_radii, pin_mats, orientation="fail") assert "Orientation" in str(exec_info) def test_from_radii(pin_mats, good_radii): name = "test pin" p = Pin.from_radii(good_radii, pin_mats, name=name) assert len(p.cells) == len(pin_mats) assert p.name == name assert p.radii == good_radii def test_subdivide(pin_mats, good_radii): surfs = [openmc.ZCylinder(r=r) for r in good_radii] pin = Pin(surfs, pin_mats) assert pin.radii == good_radii assert len(pin.cells) == len(pin_mats) # subdivide inner region N = 5 pin.subdivide_ring(0, N) assert len(pin.radii) == len(good_radii) + N - 1 assert len(pin.cells) == len(pin_mats) + N - 1 # check volumes of new rings bounds = (0,) + pin.radii[:N] sqrs = numpy.square(bounds) assert sqrs[1:] - sqrs[:-1] == pytest.approx(good_radii[0] ** 2 / N) # subdivide non-inner most region new_pin = Pin.from_radii(good_radii, pin_mats) new_pin.subdivide_ring(1, N) assert len(new_pin.radii) == len(good_radii) + N - 1 assert len(new_pin.cells) == len(pin_mats) + N - 1 # check volumes of new rings bounds = new_pin.radii[:N + 1] sqrs = numpy.square(bounds) assert sqrs[1:] - sqrs[:-1] == pytest.approx((good_radii[1] ** 2 - good_radii[0] ** 2) / N)
Python
0
@@ -613,32 +613,48 @@ aises(ValueError +, match=%22length%22 ) as exec_info:%0A @@ -722,46 +722,8 @@ ats) -%0A assert %22length%22 in str(exec_info) %0A%0A @@ -805,32 +805,49 @@ aises(ValueError +, match=%22index 0%22 ) as exec_info:%0A @@ -887,47 +887,8 @@ ats) -%0A assert %22index 0%22 in str(exec_info) %0A%0A @@ -975,32 +975,49 @@ aises(ValueError +, match=%22index 1%22 ) as exec_info:%0A @@ -1057,47 +1057,8 @@ ats) -%0A assert %22index 1%22 in str(exec_info) %0A%0A @@ -1110,16 +1110,37 @@ lueError +, match=%22Orientation%22 ) as exe @@ -1216,51 +1216,8 @@ l%22)%0A - assert %22Orientation%22 in str(exec_info)%0A %0A%0Ade @@ -1436,16 +1436,17 @@ _radii%0A%0A +%0A def test @@ -2373,16 +2373,25 @@ .approx( +%0A (good_ra @@ -2425,13 +2425,12 @@ ** 2) / N)%0A -%0A
0fc46c92f8682879591d9fc473be34116c9106be
add migration
custom/ilsgateway/migrations/0010_auto_20160830_1923.py
custom/ilsgateway/migrations/0010_auto_20160830_1923.py
Python
0.000001
@@ -0,0 +1,1166 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0Aimport datetime%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('ilsgateway', '0009_auto_20160413_1311'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='deliverygroupreport',%0A name='report_date',%0A field=models.DateTimeField(default=datetime.datetime.utcnow),%0A preserve_default=True,%0A ),%0A migrations.AlterField(%0A model_name='slabconfig',%0A name='sql_location',%0A field=models.OneToOneField(to='locations.SQLLocation'),%0A preserve_default=True,%0A ),%0A migrations.AlterField(%0A model_name='supplypointstatus',%0A name='status_type',%0A field=models.CharField(max_length=50, choices=%5B(b'rr_fac', b'rr_fac'), (b'trans_fac', b'trans_fac'), (b'soh_fac', b'soh_fac'), (b'super_fac', b'super_fac'), (b'rr_dist', b'rr_dist'), (b'del_del', b'del_del'), (b'la_fac', b'la_fac'), (b'del_dist', b'del_dist'), (b'del_fac', b'del_fac')%5D),%0A preserve_default=True,%0A ),%0A %5D%0A
fdd9ac1da19d37ca482d770bb0c8f159fb7d4752
optimize to not instantiate Fortune.
flask/app.py
flask/app.py
#!/usr/bin/env python from flask import Flask, jsonify, request, render_template from flask.ext.sqlalchemy import SQLAlchemy from sqlalchemy import create_engine from random import randint from operator import attrgetter try: import MySQLdb mysql_schema = "mysql:" except ImportError: mysql_schema = "mysql+pymysql:" # setup app = Flask(__name__) app.config['SQLALCHEMY_DATABASE_URI'] = mysql_schema + '//benchmarkdbuser:benchmarkdbpass@DBHOSTNAME:3306/hello_world?charset=utf8' db = SQLAlchemy(app) dbraw_engine = create_engine(app.config['SQLALCHEMY_DATABASE_URI']) # models class World(db.Model): __tablename__ = "World" id = db.Column(db.Integer, primary_key=True) randomNumber = db.Column(db.Integer) # http://stackoverflow.com/questions/7102754/jsonify-a-sqlalchemy-result-set-in-flask @property def serialize(self): """Return object data in easily serializeable format""" return { 'id' : self.id, 'randomNumber': self.randomNumber } class Fortune(db.Model): __tablename__ = "Fortune" id = db.Column(db.Integer, primary_key=True) message = db.Column(db.String) # views @app.route("/json") def hello(): resp = {"message": "Hello, World!"} return jsonify(resp) @app.route("/db") def get_random_world(): num_queries = request.args.get("queries", 1) worlds = [] for i in range(int(num_queries)): wid = randint(1, 10000) worlds.append(World.query.get(wid).serialize) return jsonify(worlds=worlds) @app.route("/dbs") def get_random_world_single(): wid = randint(1, 10000) worlds = [World.query.get(wid).serialize] return jsonify(worlds=worlds) @app.route("/dbraw") def get_random_world_raw(): connection = dbraw_engine.connect() num_queries = request.args.get("queries", 1) worlds = [] for i in range(int(num_queries)): wid = randint(1, 10000) result = connection.execute("SELECT * FROM world WHERE id = " + str(wid)).fetchone() worlds.append({'id': result[0], 'randomNumber': result[1]}) connection.close() return jsonify(worlds=worlds) @app.route("/dbsraw") def get_random_world_single_raw(): connection = dbraw_engine.connect() wid = randint(1, 10000) result = connection.execute("SELECT * FROM world WHERE id = " + str(wid)).fetchone() worlds = [{'id': result[0], 'randomNumber': result[1]}] connection.close() return jsonify(worlds=worlds) @app.route("/fortunes") def get_fortunes(): fortunes = list(Fortune.query.all()) fortunes.append(Fortune(id=0, message="Additional fortune added at request time.")) fortunes.sort(key=attrgetter('message')) return render_template('fortunes.html', fortunes=fortunes) @app.route("/fortunesraw") def get_forutens_raw(): fortunes = [] for row in dbraw_engine.execute("SELECT * FROM Fortune"): fortunes.append(Fortune(id=row.id, message=row.message)) fortunes.append(Fortune(id=0, message="Additional fortune added at request time.")) fortunes.sort(key=attrgetter('message')) return render_template('fortunes.html', fortunes=fortunes) # entry point for debugging if __name__ == "__main__": app.run(debug=True)
Python
0
@@ -2748,26 +2748,13 @@ s = -%5B%5D%0A for row in +list( dbra @@ -2798,73 +2798,8 @@ ne%22) -:%0A fortunes.append(Fortune(id=row.id, message=row.message) )%0A
c2b69a51faac56689edc88e747a00b60cf08cc04
Add default ordering of progress outcome groups
dthm4kaiako/poet/migrations/0003_auto_20190731_1912.py
dthm4kaiako/poet/migrations/0003_auto_20190731_1912.py
Python
0
@@ -0,0 +1,358 @@ +# Generated by Django 2.1.5 on 2019-07-31 07:12%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('poet', '0002_progressoutcomegroup'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelOptions(%0A name='progressoutcomegroup',%0A options=%7B'ordering': %5B'name'%5D%7D,%0A ),%0A %5D%0A
32a79573b38c6d2ea7f5b81363610a5d9332ed4e
Add python script to parse JSON output
src/main/resources/jsonformat.py
src/main/resources/jsonformat.py
Python
0.000006
@@ -0,0 +1,1202 @@ +#!/usr/bin/python2.7%0Aimport json%0Aimport socket%0Aimport sys%0A%0Adef readOutput(host, port):%0A%09data = None%0A%09s = None%0A%09try:%0A%09%09s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0A%09%09s.connect((host, int(port)))%0A%09except socket.error as msg:%0A%09%09s = None%0A%09%09print msg%0A%09if s is None:%0A%09%09return None%0A%09try:%0A%09%09data = s.recv(1024)%0A%09except socket.error as msg:%0A%09%09print msg%0A%09if s is not None:%0A%09%09s.close%0A%09return data%0A%0Adef parseData(jsonData, metric, key):%0A%09data = json.loads(jsonData)%0A%09for x in data:%0A%09%09if not 'name' in x:%0A%09%09%09continue%0A%09%09if x%5B'name'%5D == metric:%0A%09%09%09if not 'datapoint' in x:%0A%09%09%09%09continue%0A%09%09%09monitorData = x%5B'datapoint'%5D%0A%09%09%09for k in monitorData:%0A%09%09%09%09if k == key:%0A%09%09%09%09%09return monitorData%5Bk%5D%0A%09return 'Metric %5B%25s:%25s%5D not found'%25(metric,key)%0A%0Aif __name__ == '__main__':%0A%09if len(sys.argv) %3C 4:%0A%09%09print 'Usage python jsonformat.py host port metric:key ...'%0A%09%09print 'The output like:'%0A%09%09print '%5Bvalue1,value2,...%5D'%0A%09else:%0A%09%09jsonData = readOutput(sys.argv%5B1%5D, sys.argv%5B2%5D)%0A%09%09if jsonData is None:%0A%09%09%09print 'Read JSON data error'%0A%09%09else:%0A%09%09%09l = %5B%5D%09%09%09%0A%09%09%09for x in sys.argv%5B3:%5D:%0A%09%09%09%09args = x.split(':')%0A%09%09%09%09if len(args) != 2:%0A%09%09%09%09%09continue%0A%09%09%09%09value = parseData(jsonData, args%5B0%5D, args%5B1%5D)%0A%09%09%09%09l.append(value)%0A%09%09%09print l%09%09%0A
699469342179fdc4319b5f39ea201015860ef09d
Add migration for CI fix
infrastructure/migrations/0020_auto_20210922_0929.py
infrastructure/migrations/0020_auto_20210922_0929.py
Python
0
@@ -0,0 +1,606 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.29 on 2021-09-22 07:29%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('infrastructure', '0019_project_latest_implementation_year'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='project',%0A name='latest_implementation_year',%0A field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='infrastructure.FinancialYear'),%0A ),%0A %5D%0A
2ead746f0e697276e7753c735befbd1a14feba6d
Restrict parquet many cols test to one test dimension.
tests/query_test/test_parquet.py
tests/query_test/test_parquet.py
#!/usr/bin/env python # Copyright (c) 2012 Cloudera, Inc. All rights reserved. import pytest from tests.common.test_vector import * from tests.common.impala_test_suite import * # Tests specific to parquet. class TestParquetManyColumns(ImpalaTestSuite): @classmethod def get_workload(self): return 'functional-query' @classmethod def add_test_dimensions(cls): super(TestParquetManyColumns, cls).add_test_dimensions() cls.TestMatrix.add_constraint(lambda v:\ v.get_value('table_format').file_format == 'parquet') if cls.exploration_strategy() == 'core': # Don't run on core. This test is very slow (IMPALA-864) and we are unlikely # to regress here. cls.TestMatrix.add_constraint(lambda v: False); def test_many_columns(self, vector): NUM_COLS = 2000 TABLE_NAME = "functional_parquet.parquet_many_cols" self.client.execute("drop table if exists " + TABLE_NAME) col_descs = ["col" + str(i) + " int" for i in range(NUM_COLS)] create_stmt = "CREATE TABLE " + TABLE_NAME +\ "(" + ', '.join(col_descs) + ") stored as parquet" col_vals = [str(i) for i in range(NUM_COLS)] insert_stmt = "INSERT INTO " + TABLE_NAME + " VALUES(" + ", ".join(col_vals) + ")" expected_result = "\t".join(col_vals) self.client.execute(create_stmt) self.client.execute(insert_stmt) result = self.client.execute("select count(*) from " + TABLE_NAME) assert result.data == ["1"] result = self.client.execute("select * from " + TABLE_NAME) assert result.data == [expected_result]
Python
0.999962
@@ -428,16 +428,154 @@ sions()%0A + # There is no reason to run these tests using all dimensions.%0A cls.TestMatrix.add_dimension(create_single_exec_option_dimension())%0A cls.
09d815c6b53c74ae9a2f3831a2eec9c2b266eca7
add the prototype.
simple_xls_to_xml.py
simple_xls_to_xml.py
Python
0
@@ -0,0 +1,824 @@ +%EF%BB%BF# encoding:utf-8%0A%0Aimport codecs%0Aimport xlrd%0Aimport xml.dom.minidom%0A%0Afilter_words = None%0A%0Adef xlsRead():%0A global filter_words%0A %0A data = xlrd.open_workbook(%22filter.xlsx%22)%0A table = data.sheets()%5B0%5D # %E8%8E%B7%E5%8F%96%E7%AC%AC%E4%B8%80%E4%B8%AAsheet%0A filter_words = table.col_values(0)%0A %0Adef createXML():%0A if filter_words is None:%0A return%0A %0A impl = xml.dom.minidom.getDOMImplementation()%0A dom = impl.createDocument(None, %22filters%22, None)%0A root = dom.documentElement%0A %0A for f in filter_words:%0A filter = dom.createElement(%22filter%22)%0A filter.setAttribute(%22word%22, f)%0A root.appendChild(filter)%0A %0A out = codecs.open(%22filters.xml%22, %22w%22, %22utf-8%22)%0A dom.writexml(out, addindent=%22 %22, newl=%22%5Cn%22, encoding=%22utf-8%22)%0A out.close()%0A %0Aif __name__ == %22__main__%22:%0A xlsRead()%0A createXML()
6a268c69fced2a5b9e97086fa2a9089837376db4
add subfolder
keras/metrics/empty.py
keras/metrics/empty.py
Python
0.000005
@@ -0,0 +1,2 @@ +#%0A
1d1712259a1e6e23b7a6a5541f70573b05619e99
Create stock.py
stock.py
stock.py
Python
0.000001
@@ -0,0 +1,2308 @@ +from openerp.osv import fields, osv%0A%0Aclass stock_move(osv.Model):%0A _name = 'stock.move'%0A _inherit = 'stock.move'%0A%0A def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False,%0A loc_dest_id=False, partner_id=False):%0A res_prod = super(stock_move, self).onchange_product_id(cr, uid, ids, prod_id, loc_id,loc_dest_id, partner_id)%0A prod_obj = self.pool.get('product.product')%0A obj = prod_obj.browse(cr, uid, prod_id)%0A res_prod%5B'value'%5D.update(%7B'image_small': obj.image_small%7D)%0A return res_prod%0A%0A%0A _columns = %7B%0A 'image_small' : fields.binary('Product Image'),%0A %7D%0A%0Astock_move()%0A%0Aclass sale_order_line(osv.Model):%0A _name = 'sale.order.line'%0A _inherit = 'sale.order.line'%0A _columns = %7B%0A 'image_small' : fields.binary('Product Image'),%0A %7D%0A%0A def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,%0A uom=False, qty_uos=0, uos=False, name='', partner_id=False,%0A lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False,image_small=False, context=None):%0A context = context or %7B%7D%0A%0A res = super(sale_order_line, self).product_id_change(cr, uid, ids, pricelist, product, qty=qty,%0A uom=uom, qty_uos=qty_uos, uos=uos, name=name, partner_id=partner_id,%0A lang=lang, update_tax=update_tax, date_order=date_order, packaging=packaging, fiscal_position=fiscal_position, flag=flag, context=context)%0A%0A product_obj = self.pool.get('product.product')%0A product_obj = product_obj.browse(cr, uid, product, context=context)%0A%0A res%5B'value'%5D.update(%7B'image_small': product_obj.image_small or False%7D)%0A return res%0A%0A%0Asale_order_line()%0A%0Aclass sale_order(osv.Model):%0A _name = 'sale.order'%0A _inherit = 'sale.order'%0A%0A def _prepare_order_line_move(self, cr, uid, order, line, picking_id, date_planned, context=None):%0A res = super(sale_order, self)._prepare_order_line_move(cr, uid, order=order, line=line, picking_id=picking_id, date_planned=date_planned, context=context)%0A res%5B'image_small'%5D = line.image_small%0A return res%0A%0Asale_order()%0A
4094ea54fd9bec8c40d7666364c4ad4427eaf1c3
Use `caches` over `get_cache` per django deprecation
regulations/generator/api_reader.py
regulations/generator/api_reader.py
from django.core.cache import get_cache from regulations.generator import api_client class ApiCache(object): """ Interface with the cache. """ def __init__(self): self.cache = get_cache('api_cache') def get(self, key): return self.cache.get(key) def set(self, key, value): self.cache.set(key, value) def generate_key(self, cache_key_elements): return '-'.join(cache_key_elements) class ApiReader(object): """ Access the regulations API. Either hit the cache, or if there's a miss, hit the API instead and cache the results. """ def __init__(self): self.cache = ApiCache() self.client = api_client.ApiClient() def all_regulations_versions(self): """ Get all versions, for all regulations. """ return self._get(['all_regulations_versions'], 'regulation') def regversions(self, label): return self._get( ['regversions', label], 'regulation/%s' % label) def cache_root_and_interps(self, reg_tree, version, is_root=True): """We will re-use the root tree at multiple points during page rendering, so cache it now. If caching an interpretation, also store child interpretations with titles (so that, when rendering slide-down interpretations, we don't perform additional fetches)""" if is_root or reg_tree.get('title'): tree_id = '-'.join(reg_tree['label']) cache_key = self.cache.generate_key(['regulation', tree_id, version]) self.cache.set(cache_key, reg_tree) for child in reg_tree['children']: if child.get('node_type') == 'interp': self.cache_root_and_interps(child, version, False) def regulation(self, label, version): cache_key = self.cache.generate_key(['regulation', label, version]) cached = self.cache.get(cache_key) if cached is not None: return cached else: regulation = self.client.get('regulation/%s/%s' % (label, version)) #Add the tree to the cache if regulation: self.cache_root_and_interps(regulation, version) return regulation def _get(self, cache_key_elements, api_suffix, api_params={}): """ Retrieve from the cache whenever possible, or get from the API """ cache_key = self.cache.generate_key(cache_key_elements) cached = self.cache.get(cache_key) if cached is not None: return cached else: element = self.client.get(api_suffix, api_params) self.cache.set(cache_key, element) return element def layer(self, layer_name, label, version): regulation_part = label.split('-')[0] return self._get( ['layer', layer_name, regulation_part, version], 'layer/%s/%s/%s' % (layer_name, regulation_part, version)) def diff(self, label, older, newer): """ End point for diffs. """ return self._get( ['diff', label, older, newer], "diff/%s/%s/%s" % (label, older, newer)) def notices(self, part=None): """ End point for notice searching. Right now just a list. """ if part: return self._get( ['notices', part], 'notice', {'part': part}) else: return self._get( ['notices'], 'notices') def notice(self, fr_document_number): """ End point for retrieving a single notice. """ return self._get( ['notice', fr_document_number], 'notice/%s' % fr_document_number) def search(self, query, version=None, regulation=None, page=0): """Search via the API. Never cache these (that's the duty of the search index)""" params = {'q': query, 'page': page} if version: params['version'] = version if regulation: params['regulation'] = regulation return self.client.get('search', params)
Python
0.000001
@@ -23,25 +23,22 @@ import -get_ cache +s %0Afrom re @@ -188,18 +188,15 @@ e = -get_ cache -( +s%5B 'api @@ -202,17 +202,17 @@ i_cache' -) +%5D %0A%0A de @@ -2109,16 +2109,17 @@ # + Add the
ed09ca11fc3586c9782103269b12240ed6b27911
complete and tested juliaset, HW4
juliaset.py
juliaset.py
Python
0
@@ -0,0 +1,956 @@ +class JuliaSet(object):%0A%0A def set_plane(self, _d):%0A self._d=_d%0A self._complexplane=%5B%5D%0A x=-2%0A y=-2%0A while x%3C=2:%0A while y%3C=2:%0A self._complexplane.append(complex(x,y))%0A y+=_d%0A x+=_d%0A y=-2%0A return self._complexplane%0A%0A def __init__(self, c, n=100):%0A self.c = c%0A self.n = n%0A self._d=0.001%0A self._complexplane=%5B%5D#self.set_plane(self._d)%0A%0A def juliamap(self, z):%0A return ((z**2)+self.c)%0A%0A def iterate(self, z):%0A m = 0%0A while True:%0A m+=1%0A z=self.juliamap(z)%0A if abs(z)%3E2:%0A return m%0A elif m%3E=self.n:%0A return 0%0A%0A def set_spacing(self, d):%0A self._d = d%0A self._complexplane=self.set_plane(self._d)%0A%0A def generate(self):%0A self.set = %5Bself.iterate(z) for z in self._complexplane%5D%0A return self.set%0A
8282cca05b784bb0966ba8246900627286c5d98c
Use invoke as build tool
tasks.py
tasks.py
Python
0.000001
@@ -0,0 +1,2416 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals, absolute_import%0A%0Afrom os.path import join, abspath, dirname, exists%0A%0Afrom invoke import run, task%0A%0AROOT = abspath(join(dirname(__file__)))%0AI18N_DOMAIN = 'udata-admin'%0A%0A%0Adef green(text):%0A return '%5C033%5B1;32m%7B0%7D%5C033%5B0;m'.format(text)%0A%0A%0Adef red(text):%0A return '%5C033%5B1;31m%7B0%7D%5C033%5B0;m'.format(text)%0A%0A%0Adef cyan(text):%0A return '%5C033%5B1;36m%7B0%7D%5C033%5B0;m'.format(text)%0A%0A%0Adef lrun(command, *args, **kwargs):%0A run('cd %7B0%7D && %7B1%7D'.format(ROOT, command), *args, **kwargs)%0A%0A%0Adef nrun(command, *args, **kwargs):%0A lrun('node_modules/.bin/%7B0%7D'.format(command), *args, **kwargs)%0A%0A%0A@task%0Adef clean(bower=False, node=False):%0A '''Cleanup all build artifacts'''%0A patterns = %5B%0A 'build', 'dist', 'cover', 'docs/_build',%0A '**/*.pyc', '*.egg-info', '.tox'%0A %5D%0A if bower:%0A patterns.append('udata/static/bower')%0A if node:%0A patterns.append('node_modules')%0A for pattern in patterns:%0A print('Removing %7B0%7D'.format(pattern))%0A run('cd %7B0%7D && rm -rf %7B1%7D'.format(ROOT, pattern))%0A%0A%0A@task%0Adef test():%0A '''Run tests suite'''%0A run('cd %7B0%7D && nosetests --rednose --force-color udata'.format(ROOT), pty=True)%0A%0A%0A@task%0Adef cover():%0A '''Run tests suite with coverage'''%0A run('cd %7B0%7D && nosetests --rednose --force-color %5C%0A --with-coverage --cover-html --cover-package=udata'.format(ROOT), pty=True)%0A%0A%0A@task%0Adef doc():%0A '''Build the documentation'''%0A run('cd %7B0%7D/doc && make html'.format(ROOT), pty=True)%0A%0A%0A@task%0Adef qa():%0A '''Run a quality report'''%0A run('flake8 %7B0%7D/udata'.format(ROOT))%0A%0A@task%0Adef serve():%0A run('cd %7B0%7D && python manage.py serve -d -r'.format(ROOT), pty=True)%0A%0A%0A@task%0Adef work(loglevel='info'):%0A run('celery -A udata.worker worker --purge --autoreload -l %25s' %25 loglevel)%0A%0A%0A@task%0Adef beat(loglevel='info'):%0A run('celery -A udata.worker beat -l %25s' %25 loglevel)%0A%0A%0A@task%0Adef i18n():%0A run('python setup.py extract_messages')%0A run('python setup.py update_catalog')%0A run('udata i18njs -d udata udata/static')%0A%0A%0A@task%0Adef i18nc():%0A run('cd %7B0%7D && python setup.py compile_catalog'.format(ROOT))%0A%0A%0A@task%0Adef build():%0A print(cyan('Compiling translations'))%0A lrun('python setup.py compile_catalog')%0A%0A%0A@task(build)%0Adef dist():%0A '''Package for distribution'''%0A print(cyan('Building a distribuable package'))%0A lrun('python setup.py bdist_wheel', pty=True)%0A
c63144242d9cf2ecf02d58eb9a93cfe426acc6dc
Add script to send unregister user emails
scripts/send_preprint_unreg_contributor_emails.py
scripts/send_preprint_unreg_contributor_emails.py
Python
0
@@ -0,0 +1,2354 @@ +# -*- coding: utf-8 -*-%0A%22%22%22Sends an unregistered user claim email for preprints created after 2017-03-14. A hotfix was made on that%0Adate which caused unregistered user claim emails to not be sent. The regression was fixed on 2017-05-05. This%0Asends the emails that should have been sent during that time period.%0A%0ANOTE: This script should only be run ONCE.%0A%22%22%22%0Aimport sys%0Aimport logging%0Aimport datetime as dt%0Aimport pytz%0Afrom framework.auth import Auth%0A%0Afrom website.app import init_app%0Ainit_app(routes=False)%0A%0Afrom website.project import signals as project_signals%0Afrom scripts import utils as script_utils%0Afrom website.project.views import contributor # flake8: noqa (set up listeners)%0A%0Afrom osf.models import PreprintService%0A%0Alogger = logging.getLogger(__name__)%0Alogging.getLogger('website.mails.mails').setLevel(logging.CRITICAL)%0A%0A# datetime at which https://github.com/CenterForOpenScience/osf.io/commit/568413a77cc51511a0f7afe081a218676a36ebb6 was committed%0ASTART_DATETIME = dt.datetime(2017, 3, 14, 19, 10, tzinfo=pytz.utc)%0A# datetime at which https://github.com/CenterForOpenScience/osf.io/commit/38513916bb9584eb723c46e35553dc6d2c267e1a was deployed%0AEND_DATETIME = dt.datetime(2017, 5, 5, 5, 48, tzinfo=pytz.utc)%0A%0Adef main():%0A dry_run = '--dry' in sys.argv%0A if not dry:%0A # If we're not running in dry mode log everything to a file%0A script_utils.add_file_logger(logger, __file__)%0A count = 0%0A preprints = PreprintService.objects.filter(%0A is_published=True,%0A date_published__gte=START_DATETIME,%0A date_published__lte=END_DATETIME%0A ).order_by('date_published').select_related('node', 'node__creator')%0A for preprint in preprints:%0A auth = Auth(preprint.node.creator)%0A for author in preprint.node.contributors.filter(is_active=False):%0A assert not author.is_registered%0A logger.info('Sending email to unregistered User %7B%7D on PreprintService %7B%7D'.format(author._id, preprint._id))%0A if not dry_run:%0A project_signals.contributor_added.send(%0A preprint.node,%0A contributor=author,%0A auth=auth,%0A email_template='preprint'%0A )%0A count += 1%0A logger.info('Sent an email to %7B%7D unregistered users'.format(count))%0A%0Aif __name__ == '__main__':%0A main()%0A
83bb6d6685f9ac20da9324580c760306b04eb7d6
Add Experiment.from_results
learning/experiment.py
learning/experiment.py
from __future__ import division import sys sys.path.append("../lib") import logging import time import cPickle as pickle import os import os.path import errno from shutil import copyfile import numpy as np import h5py import theano import theano.tensor as T from utils.datalog import dlog, StoreToH5, TextPrinter from dataset import DataSet from model import Model from training import TrainerBase from termination import Termination from monitor import DLogModelParams _logger = logging.getLogger() class Experiment(object): @classmethod def from_param_file(cls, fname): experiment = cls() experiment.load_param_file(fname) return experiment #------------------------------------------------------------------------- def __init__(self): self.params = {} self.param_fname = None self.out_dir = None self.logger = _logger def load_param_file(self, fname): self.param_fname = fname execfile(fname, self.params) self.set_trainer(self.params['trainer']) def setup_output_dir(self, exp_name=None, with_suffix=True): if exp_name is None: # Determine experiment name if self.param_fname: exp_name = self.param_fname else: exp_name = sys.argv[0] if with_suffix: # Determine suffix if 'PBS_JOBID' in os.environ: job_no = os.environ['PBS_JOBID'].split('.')[0] # Job Number suffix = "j"+job_no elif 'SLURM_JOBID' in os.environ: job_no = os.environ['SLURM_JOBID'] suffix = "j"+job_no else: suffix = time.strftime("%Y-%m-%d-%H-%M") if not with_suffix: suffix = "-" suffix_counter = 0 dirname = "output/%s.%s" % (exp_name, suffix) while True: try: os.makedirs(dirname) except OSError, e: if e.errno != errno.EEXIST: raise e suffix_counter += 1 dirname = "output/%s.%s+%d" % (exp_name, suffix, suffix_counter) else: break else: dirname = "output/%s" % (exp_name) try: os.makedirs(dirname) except OSError, e: if e.errno != errno.EEXIST: raise e out_dir = dirname+"/" self.out_dir = out_dir if self.param_fname: copyfile(self.param_fname, os.path.join(self.out_dir, "paramfile.py")) def setup_logging(self): assert self.out_dir results_fname = os.path.join(self.out_dir, "results.h5") dlog.set_handler("*", StoreToH5, results_fname) #FORMAT = '[%(asctime)s] %(module)-15s %(message)s' FORMAT = '[%(asctime)s] %(name)-15s %(message)s' DATEFMT = "%H:%M:%S" formatter = logging.Formatter(FORMAT, DATEFMT) logger_fname = os.path.join(self.out_dir, "logfile.txt") fh = logging.FileHandler(logger_fname) fh.setLevel(logging.INFO) fh.setFormatter(formatter) root_logger = logging.getLogger("") root_logger.addHandler(fh) def print_summary(self): logger = self.logger logger.info("Parameter file: %s" % self.param_fname) logger.info("Output directory: %s" % self.out_dir) logger.info("-- Trainer hyperparameter --") for k, v in self.trainer.get_hyper_params().iteritems(): if not isinstance(v, (int, float)): continue logger.info(" %20s: %s" % (k, v)) logger.info("-- Model hyperparameter --") model = self.trainer.model desc = [str(layer.n_X) for layer in model.p_layers] logger.info(" %20s: %s" % ("layer sizes", "-".join(desc))) desc = [str(layer.__class__) for layer in model.p_layers] logger.info(" %20s: %s" % ("p-layers", " - ".join(desc))) desc = [str(layer.__class__) for layer in model.q_layers] logger.info(" %20s: %s" % ("q-layers", " - ".join(desc))) #for pl, ql in zip(model.p_layers[:-1], model.q_layers): # logger.info(" %s" % l.__class__) # for k, v in l.get_hyper_params().iteritems(): # logger.info(" %20s: %s" % (k, v)) #logger.info("Total runtime: %f4.1 h" % runtime) def run_experiment(self): self.sanity_check() self.trainer.load_data() self.trainer.compile() self.trainer.perform_learning() def continue_experiment(self, results_h5): logger = self.logger self.sanity_check() logger.info("Copying results from %s" % results_h5) with h5py.File(results_h5, "r") as h5: for key in h5.keys(): n_rows = h5[key].shape[0] for r in xrange(n_rows): dlog.append(key, h5[key][r]) # Identify last row without NaN's LL100 = h5['learning.monitor.100.LL'] row = max(np.where(np.isfinite(LL100))[0])-1 logger.info("Continuing from row %d (%d rows total)" %(row, LL100.shape[0])) self.trainer.load_data() self.trainer.compile() self.trainer.model.model_params_from_h5(h5, row=row) self.trainer.perform_learning() #--------------------------------------------------------------- def sanity_check(self): if not isinstance(self.trainer, TrainerBase): raise ValueError("Trainer not set properly") if not any( [isinstance(m, DLogModelParams) for m in self.trainer.epoch_monitors] ): self.logger.warn("DLogModelParams is not setup as an epoch_monitor. Model parameters wouldn't be saved. Adding default DLogModelParams()") self.trainer.epoch_monitors += DLogModelParams() def set_trainer(self, trainer): assert isinstance(trainer, TrainerBase) self.trainer = trainer
Python
0.000013
@@ -679,16 +679,435 @@ riment%0A%0A + @classmethod%0A def from_results(cls, path, row=-1):%0A param_fname = path + %22/paramfile.py%22%0A results_fname = path + %22/results.h5%22%0A%0A experiment = cls()%0A experiment.load_param_file(param_fname)%0A %0A model = experiment.params%5B'model'%5D%0A with h5py.File(results_fname, %22r%22) as h5:%0A model.model_params_from_h5(h5, row, basekey=%22mode.%22)%0A%0A return experiment%0A%0A #---
fcac525d3f974c7d4a1e90c1adc444c6d6e72018
Add sed executor #123
executors/SED.py
executors/SED.py
Python
0.000001
@@ -0,0 +1,497 @@ +%0Afrom .base_executor import ScriptExecutor%0Afrom judgeenv import env%0A%0A%0Aclass Executor(ScriptExecutor):%0A ext = '.sed'%0A name = 'SED'%0A command = env%5B'runtime'%5D.get('sed')%0A test_program = '''s/.*/echo: Hello, World!/%0Aq'''%0A fs = %5B'.*%5C.(so%7Csed)', '/dev/urandom$', '/proc/self/maps$', '/proc/filesystems$', '/+lib/charset.alias$'%5D%0A syscalls = %5B'getgroups32', 'statfs64'%5D%0A%0A def get_cmdline(self):%0A return %5Bself.get_command(), '-f', self._code%5D%0A%0Ainitialize = Executor.initialize%0A
84cf95cde942d91f53959fea4151847902a69d14
Add a cleanup script.
rl-rc-car/cleanup.py
rl-rc-car/cleanup.py
Python
0
@@ -0,0 +1,58 @@ +from rccar import RCCar%0A%0Acar = RCCar()%0Acar.cleanup_gpio()%0A
b2741a8316ea1ffbf9e88a9fb883ef9e2507be42
Upgrade libchromiuncontent to 3245ef8
script/lib/config.py
script/lib/config.py
#!/usr/bin/env python import platform import sys BASE_URL = 'https://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' LIBCHROMIUMCONTENT_COMMIT = 'f0c3a4546d8e75689c16b9aee1052a72951e58de' ARCH = { 'cygwin': '32bit', 'darwin': '64bit', 'linux2': platform.architecture()[0], 'win32': '32bit', }[sys.platform] DIST_ARCH = { '32bit': 'ia32', '64bit': 'x64', }[ARCH] TARGET_PLATFORM = { 'cygwin': 'win32', 'darwin': 'darwin', 'linux2': 'linux', 'win32': 'win32', }[sys.platform]
Python
0
@@ -155,47 +155,47 @@ = ' -f0c3a4546d8e75689c16b9aee1052a72951e58d +3245ef802fbf546f1a1d206990aa9d18be6bfbf e'%0A%0A
a03eaddd3e950f628320d1b5b007d87b11906844
add saveload.py (with error)
converter/saveload.py
converter/saveload.py
Python
0
@@ -0,0 +1,1409 @@ +#!/usr/local/bin/python%0A# -*- encoding:utf-8%0A%0Aimport sys%0Aimport subprocess as sp%0Aimport numpy%0A%0Adef load_mp3(filename):%0A%09command = %5B 'ffmpeg',%0A%09'-i', sys.argv%5B1%5D,%0A%09'-f', 's16le',%0A%09'-acodec', 'pcm_s16le',%0A%09'-ar', '44100', # ouput will have 44100 Hz%0A%09'-ac', '2', # stereo (set to '1' for mono)%0A%09'-'%5D%0A%09pipe = sp.Popen(command, stdout=sp.PIPE, bufsize=10**8)%0A%09raw_audio = pipe.proc.stdout.read(128000*6)%0A%09# Reorganize raw_audio as a Numpy array with two-columns (1 per channel)%0A%0A%09audio_array = numpy.fromstring(raw_audio, dtype=%22int16%22)%0A%09audio_array = audio_array.reshape(len(audio_array))%0A%09return audio_array%0A%0Adef save_mp3(filename,audio_array):%0A%09pipe2 = sp.Popen(%5B 'ffmpeg',%0A%09'-y', # (optional) means overwrite the output file if it already exists.%0A%09%22-f%22, 's16le', # means 16bit input%0A%09%22-acodec%22, %22pcm_s16le%22, # means raw 16bit input%0A%09'-ar', %2244100%22, # the input will have 44100 Hz%0A%09'-ac','2', # the input will have 2 channels (stereo)%0A%09'-i', '-', # means that the input will arrive from the pipe%0A%09'-vn', # means %22don't expect any video input%22%0A%09'-acodec', %22libmp3lame%22, # output audio codec%0A#%09'-b', %223000k%22, # output bitrate (=quality). Here, 3000kb/second%0A%09filename%5D,%0A%09stdin=sp.PIPE,stdout=sp.PIPE, stderr=sp.PIPE)%0A%09audio_array.astype(%22int16%22).tofile(self.proc.stdin)%0A%0Adef main():%0A%09ary = load_mp3(sys.argv%5B1%5D)%0A#%09ary = ary.reshape((ary.shape%5B0%5D*2))%0A%09save_mp3(sys.argv%5B2%5D,ary)%0A%0Aif __name__ == '__main__':%0A%09main()%0A
4ab3e59b7e9fe339c96042107c3f59bdf1afc46a
add instagram compliance fix
requests_oauthlib/compliance_fixes/instagram.py
requests_oauthlib/compliance_fixes/instagram.py
Python
0
@@ -0,0 +1,1071 @@ +try:%0A from urlparse import urlparse, parse_qs%0Aexcept ImportError:%0A from urllib.parse import urlparse, parse_qs%0A%0Afrom oauthlib.common import add_params_to_uri%0A%0A%0Adef instagram_compliance_fix(session):%0A def _non_compliant_param_name(url, headers, data):%0A # If the user has already specified the token, either in the URL%0A # or in a data dictionary, then there's nothing to do.%0A # If the specified token is different from %60%60session.access_token%60%60,%0A # we assume the user intends to override the access token.%0A url_query = dict(parse_qs(urlparse(url).query))%0A token = url_query.get(%22token%22)%0A if not token and isinstance(data, dict):%0A token = data.get(%22token%22)%0A%0A if token:%0A # Nothing to do, just return.%0A return url, headers, data%0A%0A token = %5B('access_token', session.access_token)%5D%0A url = add_params_to_uri(url, token)%0A return url, headers, data%0A%0A session.register_compliance_hook(%0A 'protected_request', _non_compliant_param_name)%0A return session%0A
b9b34eb2bca76e76ba4f7399b12daa27ed2ab7f4
Create uvSetTgl.py
af_scripts/uv/uvSetTgl.py
af_scripts/uv/uvSetTgl.py
Python
0.000002
@@ -0,0 +1,888 @@ +# This script will switch UV Set between %22map1%22 and %22atlasmap%22.%0A# Useage:%0A# Select meshes and run this script%0Aimport maya.cmds as cmds%0Adef uvsetTgl():%0A shape_node = cmds.ls(sl=True, fl=True, dag=True, type='shape')%0A current_uvset = cmds.polyUVSet(shape_node%5B0%5D,q=True, currentUVSet=True)%0A %0A for shape in shape_node:%0A uvsets = cmds.polyUVSet(shape,q=True,auv=True)%0A if %22map1%22 and %22atlasUV%22 in uvsets:%0A if current_uvset%5B0%5D == 'map1':%0A cmds.polyUVSet(shape, currentUVSet=True, uvSet=%22atlasUV%22)%0A elif current_uvset%5B0%5D == 'atlasUV':%0A cmds.polyUVSet(shape, currentUVSet=True, uvSet=%22map1%22)%0A else:%0A cmds.polyUVSet(shape, currentUVSet=True, uvSet=%22map1%22)%0A elif %22map1%22 in uvsets and %22atlasUV%22 not in uvsets:%0A cmds.polyUVSet(shape, currentUVSet=True, uvSet=%22map1%22)%0AuvsetTgl()%0A
a9b45bf50dae68c9a801ec7942c4f4cc38fa08f5
Create GenerateUnifiedReports.py
GenerateUnifiedReports.py
GenerateUnifiedReports.py
Python
0
@@ -0,0 +1,1252 @@ +import argparse%0A%0A%0A%0A# Read options on which PayPal records to process (year / month) or run on discovery to find the files or discover new files and generate new unified files but preserve the old ones (default)%0A# load all the Website records based on discovery%0A# load the PayPal monthly report(s)%0A# reconsile each record in PayPal records to identify the event and standardize the fields%0A# save to file the unified records%0A%0A%0A# GenerateUnifiedReports.py %5Bno options%5D - this will discover which PayPay files exist wihtout corrisponsiding unified record files and generate the missing unified record files.%0A# GenerateUnifiedReports.py -f - this will force the generation of all unfied record files even if they already exist%0A# GenerateUnifiedReports.py -start 2012 01 -end 2013 07 - this will generate the unified record files for the range specified. (start year, start month, end year, end month)%0A%0A%0Aparser = argparse.ArgumentParser( description='Process options for generating unified reports')%0Aparser.add_argument( '-force', metavar='force generate')%0Aparser.add_argument( '-start', metavar='start year / month', nargs='2')%0Aparser.add_argument( '-end', metavar='end year / month', nargs='2')%0A%0A#GenerateUnifiedRecord( paypal_filename, unified_filename )%0A
24f665e02912a3f79eec9776c86863a9e172d94a
Create HR_pythonPrintFunction.py
HR_pythonPrintFunction.py
HR_pythonPrintFunction.py
Python
0.000991
@@ -0,0 +1,229 @@ +import sys%0A%0Aif __name__ == '__main__':%0A n = int(input())%0A %0A # imported sys for a elegant solution, Python 3%0A # * before range means taking everything 0 or more%0A print(*range(1,n+1), sep='',end='%5Cn', file= sys.stdout)%0A
ce552a70f77934d4b76b5710b76b22967484d17e
Create folderwatcher.py
folderwatcher.py
folderwatcher.py
Python
0.000001
@@ -0,0 +1,390 @@ +import os%0Aimport time%0Aimport datetime%0A%0Aoutold = %5B%5D%0Atry:%0A%09while True:%0A%09%09out = os.listdir()%0A%09%09if outold != out:%0A%09%09%09ldate= datetime.datetime.now().strftime('%25I:%25M:%25S')%0A%09%09%09for x in outold:%0A%09%09%09%09if x not in out:%0A%09%09%09%09%09print ('Moved: '+ldate+' '+x)%0A%09%09%09for x in out:%0A%09%09%09%09if x not in outold:%0A%09%09%09%09%09print ('New: '+ldate+' '+x)%0A%09%09%09outold = out%0A%09%09time.sleep(1)%0Aexcept KeyboardInterrupt:%0A pass%0A
207c211f93b7b98b2e55a060d91e3329d441faaf
format metrics more idiomatically
src/diamond/handler/tsdb.py
src/diamond/handler/tsdb.py
# coding=utf-8 """ Send metrics to a [OpenTSDB](http://opentsdb.net/) server. [OpenTSDB](http://opentsdb.net/) is a distributed, scalable Time Series Database (TSDB) written on top of [HBase](http://hbase.org/). OpenTSDB was written to address a common need: store, index and serve metrics collected from computer systems (network gear, operating systems, applications) at a large scale, and make this data easily accessible and graphable. Thanks to HBase's scalability, OpenTSDB allows you to collect many thousands of metrics from thousands of hosts and applications, at a high rate (every few seconds). OpenTSDB will never delete or downsample data and can easily store billions of data points. As a matter of fact, StumbleUpon uses it to keep track of hundred of thousands of time series and collects over 1 billion data points per day in their main production datacenter. Imagine having the ability to quickly plot a graph showing the number of DELETE statements going to your MySQL database along with the number of slow queries and temporary files created, and correlate this with the 99th percentile of your service's latency. OpenTSDB makes generating such graphs on the fly a trivial operation, while manipulating millions of data point for very fine grained, real-time monitoring. ==== Notes We don't automatically make the metrics via mkmetric, so we recommand you run with the null handler and log the output and extract the key values to mkmetric yourself. - enable it in `diamond.conf` : ` handlers = diamond.handler.tsdb.TSDBHandler ` """ from Handler import Handler import socket class TSDBHandler(Handler): """ Implements the abstract Handler class, sending data to graphite """ RETRY = 3 def __init__(self, config=None): """ Create a new instance of the TSDBHandler class """ # Initialize Handler Handler.__init__(self, config) # Initialize Data self.socket = None # Initialize Options self.host = self.config['host'] self.port = int(self.config['port']) self.timeout = int(self.config['timeout']) # Connect self._connect() def get_default_config_help(self): """ Returns the help text for the configuration options for this handler """ config = super(TSDBHandler, self).get_default_config_help() config.update({ 'host': '', 'port': '', 'timeout': '', }) return config def get_default_config(self): """ Return the default config for the handler """ config = super(TSDBHandler, self).get_default_config() config.update({ 'host': '', 'port': 1234, 'timeout': 5, }) return config def __del__(self): """ Destroy instance of the TSDBHandler class """ self._close() def process(self, metric): """ Process a metric by sending it to TSDB """ # Just send the data as a string self._send("put " + str(metric)) def _send(self, data): """ Send data to TSDB. Data that can not be sent will be queued. """ retry = self.RETRY # Attempt to send any data in the queue while retry > 0: # Check socket if not self.socket: # Log Error self.log.error("TSDBHandler: Socket unavailable.") # Attempt to restablish connection self._connect() # Decrement retry retry -= 1 # Try again continue try: # Send data to socket self.socket.sendall(data) # Done break except socket.error, e: # Log Error self.log.error("TSDBHandler: Failed sending data. %s.", e) # Attempt to restablish connection self._close() # Decrement retry retry -= 1 # try again continue def _connect(self): """ Connect to the TSDB server """ # Create socket self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if socket is None: # Log Error self.log.error("TSDBHandler: Unable to create socket.") # Close Socket self._close() return # Set socket timeout self.socket.settimeout(self.timeout) # Connect to graphite server try: self.socket.connect((self.host, self.port)) # Log self.log.debug("Established connection to TSDB server %s:%d", self.host, self.port) except Exception, ex: # Log Error self.log.error("TSDBHandler: Failed to connect to %s:%i. %s", self.host, self.port, ex) # Close Socket self._close() return def _close(self): """ Close the socket """ if self.socket is not None: self.socket.close() self.socket = None
Python
0
@@ -2135,16 +2135,121 @@ meout'%5D) +%0A self.metric_format = str(self.config%5B'format'%5D)%0A self.tags = str(self.config%5B'tags'%5D) %0A%0A @@ -2585,32 +2585,82 @@ 'timeout': '',%0A + 'format': '',%0A 'tags': '',%0A %7D)%0A%0A @@ -2950,16 +2950,129 @@ ut': 5,%0A + 'format': '%7BCollector%7D.%7BMetric%7D %7Btimestamp%7D %7Bvalue%7D hostname=%7Bhost%7D %7Btags%7D',%0A 'tags': '',%0A @@ -3311,32 +3311,407 @@ TSDB%0A %22%22%22 +%0A%0A metric_str = self.metric_format.format(%0A Collector = metric.getCollectorPath(), %0A Path = metric.path,%0A Metric = metric.getMetricPath(),%0A host = metric.host,%0A timestamp = metric.timestamp,%0A value = metric.value,%0A tags = self.tags%0A ) %0A # Just @@ -3774,17 +3774,29 @@ r(metric -) +_str) + %22%5Cn%22 )%0A%0A d
41fc87e402aa2864c22adb5c09a713c2b0eacb72
Add replace test that shutdowns a node and replaces a pod (#806)
frameworks/cassandra/tests/test_recovery_shutdown.py
frameworks/cassandra/tests/test_recovery_shutdown.py
Python
0
@@ -0,0 +1,2324 @@ +import pytest%0Afrom tests.config import *%0Aimport sdk_install as install%0Aimport sdk_tasks as tasks%0Aimport sdk_utils as utils%0Aimport json%0Aimport shakedown%0Aimport time%0Aimport sdk_cmd as cmd%0A%0A%0Adef setup_module(module):%0A install.uninstall(PACKAGE_NAME)%0A utils.gc_frameworks()%0A%0A # check_suppression=False due to https://jira.mesosphere.com/browse/CASSANDRA-568%0A install.install(PACKAGE_NAME, DEFAULT_TASK_COUNT, check_suppression=False)%0A%0A%0Adef setup_function(function):%0A tasks.check_running(PACKAGE_NAME, DEFAULT_TASK_COUNT)%0A%0A%0Adef teardown_module(module):%0A install.uninstall(PACKAGE_NAME)%0A%0A%[email protected]%[email protected]%[email protected]_node%0Adef test_shutdown_host_test():%0A%0A service_ip = shakedown.get_service_ips(PACKAGE_NAME).pop()%0A print('marathon ip = %7B%7D'.format(service_ip))%0A%0A node_ip = 0%0A for pod_id in range(0, DEFAULT_TASK_COUNT):%0A node_ip = get_pod_host(pod_id)%0A if node_ip != service_ip:%0A break%0A%0A if node_ip is None:%0A assert Fail, 'could not find a node to shutdown'%0A%0A old_agent = get_pod_agent(pod_id)%0A print('pod id = %7B%7D, node_ip = %7B%7D, agent = %7B%7D'.format(pod_id, node_ip, old_agent))%0A%0A task_ids = tasks.get_task_ids(PACKAGE_NAME, 'node-%7B%7D'.format(pod_id))%0A%0A # instead of partition/reconnect, we shutdown host permanently%0A status, stdout = shakedown.run_command_on_agent(node_ip, 'sudo shutdown -h +1')%0A print('shutdown agent %7B%7D: %5B%7B%7D%5D %7B%7D'.format(node_ip, status, stdout))%0A assert status is True%0A time.sleep(100)%0A%0A cmd.run_cli('cassandra pods replace node-%7B%7D'.format(pod_id))%0A%0A tasks.check_tasks_updated(PACKAGE_NAME, 'node', task_ids)%0A%0A #double check all tasks are running%0A tasks.check_running(PACKAGE_NAME, DEFAULT_TASK_COUNT)%0A new_agent = get_pod_agent(pod_id)%0A%0A assert old_agent != new_agent%0A%0A%0Adef get_pod_agent(id):%0A stdout = cmd.run_cli('cassandra pods info node-%7B%7D'.format(id))%0A return json.loads(stdout)%5B0%5D%5B'info'%5D%5B'slaveId'%5D%5B'value'%5D%0A%0A%0Adef get_pod_label(id):%0A stdout = cmd.run_cli('cassandra pods info node-%7B%7D'.format(id))%0A return json.loads(stdout)%5B0%5D%5B'info'%5D%5B'labels'%5D%5B'labels'%5D%0A%0A%0Adef get_pod_host(id):%0A labels = get_pod_label(id)%0A for i in range(0, len(labels)):%0A if labels%5Bi%5D%5B'key'%5D == 'offer_hostname':%0A return labels%5Bi%5D%5B'value'%5D%0A return None%0A
c11e74d4210c6de8917dfde6cb33d75f6b1b835a
add migration that solves BigAutoField problem
hordak/migrations/0032_check_account_type_big_int.py
hordak/migrations/0032_check_account_type_big_int.py
Python
0
@@ -0,0 +1,733 @@ +# Generated by Django 4.0.7 on 2022-09-18 10:33%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A (%22hordak%22, %220031_alter_account_currencies%22),%0A %5D%0A%0A operations = %5B%0A migrations.RunSQL(%0A %22%22%22%0A CREATE OR REPLACE FUNCTION check_account_type()%0A RETURNS TRIGGER AS%0A $$%0A BEGIN%0A IF NEW.parent_id::INT::BOOL THEN%0A NEW.type = (SELECT type FROM hordak_account WHERE id = NEW.parent_id);%0A END IF;%0A RETURN NEW;%0A END;%0A $$%0A LANGUAGE plpgsql;%0A %22%22%22,%0A %22DROP FUNCTION check_account_type()%22,%0A ),%0A %5D%0A
d3a11021f8be8e93c5c067b5fcf59bc4f9f92cea
add computation of sts for ISUSM
scripts/dbutil/compute_isusm_sts.py
scripts/dbutil/compute_isusm_sts.py
Python
0.000376
@@ -0,0 +1,1163 @@ +%22%22%22%0A Figure out when the ISUSM data started...%0A%22%22%22%0A%0Aimport psycopg2%0Aimport network%0Aimport sys%0Aimport datetime%0Aimport pytz%0A%0Abasets = datetime.datetime.now()%0Abasets = basets.replace(tzinfo=pytz.timezone(%22America/Chicago%22))%0A%0Aisuag = psycopg2.connect(database='isuag', host='iemdb')%0Aicursor = isuag.cursor()%0Amesosite = psycopg2.connect(database='mesosite', host='iemdb')%0Amcursor = mesosite.cursor()%0A%0Atable = network.Table(%22ISUSM%22)%0A%0Aicursor.execute(%22%22%22SELECT station, min(valid), max(valid) from sm_hourly %0A GROUP by station ORDER by min ASC%22%22%22)%0Afor row in icursor:%0A station = row%5B0%5D%0A if not table.sts.has_key(station):%0A print 'Whoa station: %25s does not exist in metadatabase?' %25 (station,)%0A continue%0A if table.sts%5Bstation%5D%5B'archive_begin'%5D != row%5B1%5D:%0A print 'Updated %25s STS WAS: %25s NOW: %25s' %25 (station, %0A table.sts%5Bstation%5D%5B'archive_begin'%5D, row%5B1%5D)%0A %0A mcursor.execute(%22%22%22UPDATE stations SET archive_begin = %25s %0A WHERE id = %25s and network = %25s%22%22%22 , (row%5B1%5D, station, 'ISUSM') )%0A if mcursor.rowcount == 0:%0A print 'ERROR: No rows updated'%0A %0Amcursor.close()%0Amesosite.commit()%0Amesosite.close()%0A
133a4311fdb3c96edeb927250e549fcaf4080696
add silly module
modules/silly.py
modules/silly.py
Python
0.000001
@@ -0,0 +1,444 @@ +# -*- coding: ISO-8859-15 -*-%0A%0Afrom core.Uusipuu import UusipuuModule%0Aimport random, time%0Afrom core.tdiff import *%0A%0Aclass Module(UusipuuModule):%0A %0A def cmd_noppa(self, user, target, params):%0A self.log('ok noppaa heitetn!!')%0A self.chanmsg('%25s!' %25 random.choice((%0A 'ykknen',%0A 'kakkonen',%0A 'kolmonen',%0A 'nelonen',%0A 'vitonen',%0A 'kutonen')))%0A%0A# vim: set et sw=4:%0A
f340bde6e047d86171385b90a023ac01e8914d0c
Add simple neural network (#6452)
neural_network/simple_neural_network.py
neural_network/simple_neural_network.py
Python
0
@@ -0,0 +1,1596 @@ +%22%22%22%0AForward propagation explanation:%0Ahttps://towardsdatascience.com/forward-propagation-in-neural-networks-simplified-math-and-code-version-bbcfef6f9250%0A%22%22%22%0A%0Aimport math%0Aimport random%0A%0A%0A# Sigmoid%0Adef sigmoid_function(value: float, deriv: bool = False) -%3E float:%0A %22%22%22Return the sigmoid function of a float.%0A%0A %3E%3E%3E sigmoid_function(3.5)%0A 0.9706877692486436%0A %3E%3E%3E sigmoid_function(3.5, True)%0A -8.75%0A %22%22%22%0A if deriv:%0A return value * (1 - value)%0A return 1 / (1 + math.exp(-value))%0A%0A%0A# Initial Value%0AINITIAL_VALUE = 0.02%0A%0A%0Adef forward_propagation(expected: int, number_propagations: int) -%3E float:%0A %22%22%22Return the value found after the forward propagation training.%0A%0A %3E%3E%3E res = forward_propagation(32, 10000000)%0A %3E%3E%3E res %3E 31 and res %3C 33%0A True%0A%0A %3E%3E%3E res = forward_propagation(32, 1000)%0A %3E%3E%3E res %3E 31 and res %3C 33%0A False%0A %22%22%22%0A%0A # Random weight%0A weight = float(2 * (random.randint(1, 100)) - 1)%0A%0A for _ in range(number_propagations):%0A # Forward propagation%0A layer_1 = sigmoid_function(INITIAL_VALUE * weight)%0A # How much did we miss?%0A layer_1_error = (expected / 100) - layer_1%0A # Error delta%0A layer_1_delta = layer_1_error * sigmoid_function(layer_1, True)%0A # Update weight%0A weight += INITIAL_VALUE * layer_1_delta%0A%0A return layer_1 * 100%0A%0A%0Aif __name__ == %22__main__%22:%0A import doctest%0A%0A doctest.testmod()%0A%0A expected = int(input(%22Expected value: %22))%0A number_propagations = int(input(%22Number of propagations: %22))%0A print(forward_propagation(expected, number_propagations))%0A
4b07d7cdd791a03ef4c7ec7e6e4188b625ffb8dc
Add migration
src/clarityv2/portfolio/migrations/0002_auto_20180228_2055.py
src/clarityv2/portfolio/migrations/0002_auto_20180228_2055.py
Python
0.000002
@@ -0,0 +1,471 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.6 on 2018-02-28 18:55%0Afrom __future__ import unicode_literals%0A%0Aimport ckeditor.fields%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('portfolio', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='entry',%0A name='description',%0A field=ckeditor.fields.RichTextField(blank=True),%0A ),%0A %5D%0A
0bc48c7131e0589e7f2980e16bce6c2dfcdbafda
Fix usage message from tag:file to tag=file
python/utils.py
python/utils.py
''' This file is part of the PyPhantomJS project. Copyright (C) 2011 James Roe <[email protected]> Copyright (C) 2011 Ariya Hidayat <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import argparse version_major = 1 version_minor = 1 version_patch = 0 version = '%d.%d.%d' % (version_major, version_minor, version_patch) license = ''' PyPhantomJS Version %s Copyright (C) 2011 James Roe <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' % version def argParser(): parser = argparse.ArgumentParser( description='Minimalistic headless WebKit-based JavaScript-driven tool', usage='%(prog)s [options] script.[js|coffee] [script argument [script argument ...]]', formatter_class=argparse.RawTextHelpFormatter ) parser.add_argument('--load-images', default='yes', choices=['yes', 'no'], help='Load all inlined images (default: %(default)s)' ) parser.add_argument('--load-plugins', default='no', choices=['yes', 'no'], help='Load all plugins (i.e. Flash, Silverlight, ...)\n(default: %(default)s)' ) parser.add_argument('--proxy', metavar='address:port', help='Set the network proxy' ) parser.add_argument('--upload-file', nargs='*', metavar='tag:file', help='Upload 1 or more files' ) parser.add_argument('script', metavar='script.[js|coffee]', nargs='*', help='The script to execute, and any args to pass to it' ) parser.add_argument('--version', action='version', version=license, help='show this program\'s version and license' ) return parser
Python
0.000623
@@ -2488,17 +2488,17 @@ var='tag -: += file', h
4065a08ea401e0d95e8d40d9d735edf92edda861
Add unit tests on cache handler
oslo_policy/tests/test_cache_handler.py
oslo_policy/tests/test_cache_handler.py
Python
0.000002
@@ -0,0 +1,2076 @@ +# Copyright (c) 2020 OpenStack Foundation.%0A# All Rights Reserved.%0A%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0A%22%22%22Test the cache handler module%22%22%22%0A%0Aimport os%0A%0Aimport fixtures%0Afrom oslotest import base as test_base%0A%0Afrom oslo_policy import _cache_handler as _ch%0A%0A%0Aclass CacheHandlerTest(test_base.BaseTestCase):%0A%0A def setUp(self):%0A super().setUp()%0A self.tmpdir = self.useFixture(fixtures.TempDir())%0A%0A def test_read_cached_file(self):%0A file_cache = %7B%7D%0A%0A path = os.path.join(self.tmpdir.path, 'tmpfile')%0A with open(path, 'w+') as fp:%0A fp.write('test')%0A%0A reloaded, data = _ch.read_cached_file(file_cache, path)%0A self.assertEqual('test', data)%0A self.assertTrue(reloaded)%0A%0A reloaded, data = _ch.read_cached_file(file_cache, path)%0A self.assertEqual('test', data)%0A self.assertFalse(reloaded)%0A%0A reloaded, data = _ch.read_cached_file(%0A file_cache, path, force_reload=True)%0A self.assertEqual('test', data)%0A self.assertTrue(reloaded)%0A%0A def test_read_cached_file_with_updates(self):%0A file_cache = %7B%7D%0A%0A path = os.path.join(self.tmpdir.path, 'tmpfile')%0A with open(path, 'w+') as fp:%0A fp.write('test')%0A%0A reloaded, data = _ch.read_cached_file(file_cache, path)%0A%0A # update the timestamps%0A times = (os.stat(path).st_atime + 1, os.stat(path).st_mtime + 1)%0A os.utime(path, times)%0A%0A reloaded, data = _ch.read_cached_file(file_cache, path)%0A self.assertTrue(reloaded)%0A
a4012beca6f7ff2514076297cbc4fb9b1b126590
Bump and pypi publish core
depends/docker-registry-core/docker_registry/core/__init__.py
depends/docker-registry-core/docker_registry/core/__init__.py
# -*- coding: utf-8 -*- # Copyright (c) 2014 Docker. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. # ___ _ _ ___ # | \ _ _ ___(_)__| |___ / __|___ _ _ ___ # | |) | '_/ _ \ / _` |___| (__/ _ \ '_/ -_) # |___/|_| \___/_\__,_| \___\___/_| \___| """ Docker Registry Core ~~~~~~~~~~~~~~~~~~~~~ The core of the registry. Defines helper methods and generics to be used by either the registry itself, or drivers packages. :copyright: (c) 2014 by Docker. :license: Apache 2.0, see LICENSE for more details. """ from __future__ import absolute_import import logging __author__ = 'Docker' __copyright__ = 'Copyright 2014 Docker' __credits__ = [] __license__ = 'Apache 2.0' __version__ = '2.0.1' __maintainer__ = 'Docker' __email__ = '[email protected]' __status__ = 'Production' __title__ = 'docker-registry-core' __build__ = 0x000000 __url__ = 'https://github.com/docker/docker-registry' __description__ = 'Docker registry core package' __download__ = 'https://github.com/docker/docker-registry/archive/master.zip' try: NullHandler = logging.NullHandler except AttributeError: class NullHandler(logging.Handler): def emit(self, record): pass logging.getLogger(__name__).addHandler(NullHandler()) # ..II7N.. .. .. # .77I7II?7$$$$ZOO~. # . I7I7I$7$77$7II7ZZZZZO. # .....8$77I?I?I?$$77II77II7Z$ZZZO$ZOO.. # .IIII77$?8I???$?I?I777I7$777$$$$O888O8 . # 7~III777II???+I??7I7Z7I?I??$O8Z$ZZZ7O8D+8. # ?.?I7777Z?+=+$ODDIM7Z8I7$.+788OZ$8$77OO$$. # .DM7$$I$$7I$?78+O$$$++Z7I8NNNNNNNO$+NIIND8. # M$OODOO7INDNNN77?=+~~~+7NNNNNNN877N7$D. # .MNNDDDDODND7O8Z??=.$+?7NNNNM8D88Z$D.. # ..:=7MDNNNNNDDNDNNDDNNNNNNM.. # . . .. .. .88$7NOO77$ZODO8D.. ......... # ..OZOZOZ88DD8D8$D888DDDNDDNNNDO78D8DD7.. # .Z77$$$$$Z$ZZZZZZZZ?I$ZOOOOOOZ$IOOOOOOOO$ZOOZZZ. # 7?7?IIIII?7$7777777777$$$7$$Z$$77$$$ZZZ$OZOZ88D. # .O8MMMII7IID++=+????IIIII7II7II77$$$$OZODMMMNZ. # ..O$$O7$7777$78DDN7DNONO$ZZZO888OZZ$$OD8ZZ8ZOO.. # 7$$77$7$O$$$$ZO8O7O8ZZ?OI+?I$IO$Z$8$O8$78NZNI # ..IZ7$O$7$8$IOO7O78O$$+77II?ZIO$$ZZOO$7Z8O8DI. # .$7$$I88$777I$7OZ7$?IZI7+O+$$$OZOOIIONZODD # .~7OI77II??7O7II??Z?7?N?ZOZZZO8OO8$7O8. # ..ZNMNNMNMDZNN8NOZZZ8N7DNNDZDNO8ODD.. . # .$MNN8DN7$N8M8D$$$8NOOM:MNNNOD88$Z. # .7$II7OZDN..DOZ. =+I77OI.I88D8N8$$O. # .$Z=I7OZZNN.+88. .+7ZDM DD8Z$$I$7$ . # ..7$. .ZOZODN.ZO7. OO .8OOZZ8$II7. # 778...$DZDDD8.ZDZ. Z$ .O8OI7O7II: # ?I?. . ..8O8OD,.$.=. ~?.. .8O.~7II??.. . # ..7IZ.. =?OOZ..8... . .?.. .88?..+II?I... # I87I.. .?.ZOOZ78$. .7.. Z7Z~ .II+I.. # .D87. . ?OOO.O8,.. .7.. .8.....+$+I # ..IO.. ?ZOO..8. .7+. ..O.,. ..?O??.. # ..8+.. .:.OO7.$. . .~I. .$. ~. .7Z7D. .. # .MID. ..$O$.7. . .?. .II. .7.=+ZD$. # ~:I.. .I$OZ8.O. . .7~ . .?$ ?++I.. # ...I. .:$I+N.$.. .?IO. I. .I+I.Z. # ..7O.. .:8D..7.. . .:I?O. .O. =+~.Z.. # .OO7.. .7OO. +$. . :I. .Z. .=: .=.. # .?..Z.IN?7. .Z .7$ OZ=..==.... # . +I$ 7. Z$.. +.$. I= =. # . IZZ... .7,. ..Z... ....+.. . # .=Z.,. +$.. 8I.. +?.. :. # ..$ ,.. ..Z.. .78. .$O+?. I.. # .Z,7. .?IOO.. . I+O7I?,. . # .I8$+.. .IIZ8 . ..ZDDI+7?+IM.. ?.. ... # +8DO.. . ..M:.. .... ...+==~=~~I78$,..I # I7IIO~. 7M . ..+~.. . # ..7?Z$O... . ... ..... # . ..OO... # ...$Z. # ?$$.. # .?OO:. # .+OI$ # ..7..$ # .. .7... # ..=. # 7..
Python
0
@@ -1206,17 +1206,17 @@ = '2.0. -1 +2 '%0A__main
b3a7bca64b256dcc09b8ad49a7491e7a3717e74f
disable automatic deployment of images (needs verification)
planetstack/observer/steps/sync_image_deployments.py
planetstack/observer/steps/sync_image_deployments.py
import os import base64 from collections import defaultdict from django.db.models import F, Q from planetstack.config import Config from observer.openstacksyncstep import OpenStackSyncStep from core.models.deployment import Deployment from core.models.image import Image, ImageDeployments from util.logger import Logger, logging logger = Logger(level=logging.INFO) class SyncImageDeployments(OpenStackSyncStep): provides=[ImageDeployments] requested_interval=0 def fetch_pending(self): # ensure images are available across all deployments image_deployments = ImageDeployments.objects.all() image_deploy_lookup = defaultdict(list) for image_deployment in image_deployments: image_deploy_lookup[image_deployment.image].append(image_deployment.deployment) all_deployments = Deployment.objects.all() for image in Image.objects.all(): expected_deployments = all_deployments for expected_deployment in expected_deployments: if image not in image_deploy_lookup or \ expected_deployment not in image_deploy_lookup[image]: id = ImageDeployments(image=image, deployment=expected_deployment) id.save() # now we return all images that need to be enacted return ImageDeployments.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None)) def sync_record(self, image_deployment): logger.info("Working on image %s on deployment %s" % (image_deployment.image.name, image_deployment.deployment.name)) driver = self.driver.admin_driver(deployment=image_deployment.deployment.name) images = driver.shell.glance.get_images() glance_image = None for image in images: if image['name'] == image_deployment.image.name: glance_image = image break if glance_image: logger.info("Found image %s on deployment %s" % (image_deployment.image.name, image_deployment.deployment.name)) image_deployment.glance_image_id = glance_image['id'] elif image_deployment.image.path: image = { 'name': image_deployment.image.name, 'is_public': True, 'disk_format': 'raw', 'container_format': 'bare', 'file': image_deployment.image.path, } logger.info("Creating image %s on deployment %s" % (image_deployment.image.name, image_deployment.deployment.name)) glance_image = driver.shell.glanceclient.images.create(name=image_deployment.image.name, is_public=True, disk_format='raw', container_format='bare') glance_image.update(data=open(image_deployment.image.path, 'rb')) # While the images returned by driver.shell.glance.get_images() # are dicts, the images returned by driver.shell.glanceclient.images.create # are not dicts. We have to use getattr() instead of [] operator. if not glance_image or not getattr(glance_image,"id",None): raise Exception, "Add image failed at deployment %s" % image_deployment.deployment.name image_deployment.glance_image_id = getattr(glance_image, "id") image_deployment.save()
Python
0
@@ -495,16 +495,193 @@ (self):%0A + # smbaker: commented out automatic creation of ImageDeployments as%0A # as they will now be configured in GUI. Not sure if this is%0A # sufficient.%0A%0A# @@ -725,32 +725,33 @@ all deployments%0A +# image_de @@ -785,32 +785,33 @@ s.objects.all()%0A +# image_de @@ -842,16 +842,17 @@ t(list)%0A +# @@ -886,32 +886,33 @@ ge_deployments:%0A +# imag @@ -987,17 +987,19 @@ oyment)%0A -%0A +#%0A# @@ -1041,16 +1041,17 @@ s.all()%0A +# @@ -1084,16 +1084,17 @@ .all():%0A +# @@ -1136,16 +1136,17 @@ oyments%0A +# @@ -1198,16 +1198,17 @@ yments:%0A +# @@ -1256,16 +1256,17 @@ up or %5C%0A +# @@ -1330,16 +1330,17 @@ image%5D:%0A +# @@ -1418,16 +1418,17 @@ oyment)%0A +#
3661ca3947763656165f8fc68ea42358ad37285a
Add stub for qiprofile update test.
test/unit/helpers/test_qiprofile.py
test/unit/helpers/test_qiprofile.py
Python
0
@@ -0,0 +1,1208 @@ +import os%0Aimport glob%0Aimport shutil%0Afrom nose.tools import (assert_equal, assert_is_not_none)%0Aimport qixnat%0Afrom ... import (project, ROOT)%0Afrom ...helpers.logging import logger%0Afrom qipipe.helpers import qiprofile%0A%0ACOLLECTION = 'Sarcoma'%0A%22%22%22The test collection.%22%22%22%0A%0ASUBJECT = 'Sarcoma001'%0A%22%22%22The test subjects.%22%22%22%0A%0ASESSION = 'Session01'%0A%22%22%22The test session.%22%22%22%0A%0A%0Aclass TestQIProfile(object):%0A %22%22%22qiprofile update tests.%22%22%22%0A%0A def setUp(self):%0A self._clean()%0A self._seed()%0A%0A def tearDown(self):%0A self._clean()%0A%0A def test_sync_session(self):%0A logger(__name__).debug(%22Testing qiprofile sync on %25s %25s...%22 %25%0A (SUBJECT, SESSION))%0A %0A def _clean(self):%0A %22%22%22Deletes the test XNAT session.%22%22%22%0A with qixnat.connect() as xnat:%0A # Delete the test subject, if it exists.%0A xnat.delete_subjects(project, subject)%0A %0A def _seed(self):%0A %22%22%22Populates the test XNAT session.%22%22%22%0A with qixnat.connect() as xnat:%0A # Delete the test subject, if it exists.%0A xnat.delete_subjects(project(), subject)%0A%0A%0Aif __name__ == %22__main__%22:%0A import nose%0A nose.main(defaultTest=__name__)%0A