commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
69939f351cd9c9d555fa1cd091b67314558e862b
Add __future__ import
powerline/segments/tmux.py
powerline/segments/tmux.py
# vim:fileencoding=utf-8:noet from powerline.bindings.tmux import get_tmux_output def attached_clients(pl, minimum=1): '''Return the number of tmux clients attached to the currently active session :param int minimum: The minimum number of attached clients that must be present for this segment to be visible. ''' session_output = get_tmux_output('list-panes', '-F', '#{session_name}') if not session_output: return None session_name = session_output.rstrip().split('\n')[0] attached_clients_output = get_tmux_output('list-clients', '-t', session_name) attached_count = len(attached_clients_output.rstrip().split('\n')) return None if attached_count < minimum else str(attached_count)
Python
0.999979
@@ -22,16 +22,99 @@ f-8:noet +%0Afrom __future__ import absolute_import, unicode_literals, division, print_function %0A%0Afrom p
ba76ae145c570fce671f0ab115d4a0740a29cde4
add hadoop
conpaas-client/cps/hadoop.py
conpaas-client/cps/hadoop.py
Python
0.000001
@@ -0,0 +1,227 @@ +import sys%0A%0Afrom cps.base import BaseClient%0A%0Aclass Client(BaseClient):%0A%0A def info(self, service_id):%0A service = BaseClient.info(self, service_id)%0A%0A def usage(self, cmdname):%0A BaseClient.usage(self, cmdname)%0A
2476e7202933c197004688d32994d3b24a7ce74f
Add missing fulltoc for Sphinx documentation.
docs/vendor/sphinxcontrib/fulltoc.py
docs/vendor/sphinxcontrib/fulltoc.py
Python
0
@@ -0,0 +1,3288 @@ +# -*- encoding: utf-8 -*-%0A#%0A# Copyright %C2%A9 2012 New Dream Network, LLC (DreamHost)%0A#%0A# Author: Doug Hellmann %[email protected]%3E%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0A%0Afrom sphinx import addnodes%0A%0A%0Adef html_page_context(app, pagename, templatename, context, doctree):%0A %22%22%22Event handler for the html-page-context signal.%0A Modifies the context directly.%0A - Replaces the 'toc' value created by the HTML builder with one%0A that shows all document titles and the local table of contents.%0A - Sets display_toc to True so the table of contents is always%0A displayed, even on empty pages.%0A - Replaces the 'toctree' function with one that uses the entire%0A document structure, ignores the maxdepth argument, and uses%0A only prune and collapse.%0A %22%22%22%0A rendered_toc = get_rendered_toctree(app.builder, pagename)%0A context%5B'toc'%5D = rendered_toc%0A context%5B'display_toc'%5D = True # force toctree to display%0A%0A if %22toctree%22 not in context:%0A # json builder doesn't use toctree func, so nothing to replace%0A return%0A%0A def make_toctree(collapse=True):%0A return get_rendered_toctree(app.builder,%0A pagename,%0A prune=False,%0A collapse=collapse,%0A )%0A%0A context%5B'toctree'%5D = make_toctree%0A%0A%0Adef get_rendered_toctree(builder, docname, prune=False, collapse=True):%0A %22%22%22Build the toctree relative to the named document,%0A with the given parameters, and then return the rendered%0A HTML fragment.%0A %22%22%22%0A fulltoc = build_full_toctree(builder,%0A docname,%0A prune=prune,%0A collapse=collapse,%0A )%0A rendered_toc = builder.render_partial(fulltoc)%5B'fragment'%5D%0A return rendered_toc%0A%0A%0Adef build_full_toctree(builder, docname, prune, collapse):%0A %22%22%22Return a single toctree starting from docname containing all%0A sub-document doctrees.%0A %22%22%22%0A env = builder.env%0A doctree = env.get_doctree(env.config.master_doc)%0A toctrees = %5B%5D%0A for toctreenode in doctree.traverse(addnodes.toctree):%0A toctree = env.resolve_toctree(docname, builder, toctreenode,%0A collapse=collapse,%0A prune=prune,%0A )%0A toctrees.append(toctree)%0A if not toctrees:%0A return None%0A result = toctrees%5B0%5D%0A for toctree in toctrees%5B1:%5D:%0A if toctree:%0A result.extend(toctree.children)%0A env.resolve_references(result, docname, builder)%0A return result%0A%0A%0Adef setup(app):%0A app.connect('html-page-context', html_page_context)%0A
0822547f3fcd79a5332c450d78cd24999e5e81d0
Migrate huts
c2corg_api/scripts/migration/documents/waypoints/huts.py
c2corg_api/scripts/migration/documents/waypoints/huts.py
Python
0.000001
@@ -0,0 +1,3209 @@ +from c2corg_api.scripts.migration.documents.waypoints.waypoint import %5C%0A MigrateWaypoints%0A%0A%0Aclass MigrateHuts(MigrateWaypoints):%0A%0A def get_name(self):%0A return 'huts'%0A%0A def get_count_query(self):%0A return (%0A 'select count(*) from app_huts_archives;'%0A )%0A%0A def get_query(self):%0A return (%0A 'select '%0A ' id, document_archive_id, is_latest_version, elevation, '%0A ' is_protected, redirects_to, '%0A ' ST_Force2D(ST_SetSRID(geom, 3857)) geom, '%0A ' shelter_type, is_staffed, phone, url, staffed_capacity, '%0A ' unstaffed_capacity, has_unstaffed_matress, '%0A ' has_unstaffed_blanket, has_unstaffed_gas, has_unstaffed_wood '%0A 'from app_huts_archives '%0A 'order by id, document_archive_id;'%0A )%0A%0A def get_count_query_locales(self):%0A return (%0A 'select count(*) from app_huts_i18n_archives;'%0A )%0A%0A def get_query_locales(self):%0A return (%0A 'select '%0A ' id, document_i18n_archive_id, is_latest_version, culture, '%0A ' name, description, pedestrian_access, staffed_period '%0A 'from app_huts_i18n_archives '%0A 'order by id, document_i18n_archive_id;'%0A )%0A%0A def get_document(self, document_in, version):%0A waypoint_type = self.convert_type(%0A document_in.shelter_type, MigrateHuts.shelter_types)%0A if waypoint_type is None:%0A waypoint_type = 'hut'%0A%0A return dict(%0A document_id=document_in.id,%0A version=version,%0A waypoint_type=waypoint_type,%0A elevation=document_in.elevation,%0A is_staffed=document_in.is_staffed,%0A phone=document_in.phone,%0A url=document_in.url,%0A capacity_staffed=document_in.staffed_capacity,%0A capacity=document_in.unstaffed_capacity,%0A matress_unstaffed=self.convert_type(%0A document_in.has_unstaffed_matress, MigrateHuts.boolean_types),%0A blanket_unstaffed=self.convert_type(%0A document_in.has_unstaffed_blanket, MigrateHuts.boolean_types),%0A gas_unstaffed=self.convert_type(%0A document_in.has_unstaffed_gas, MigrateHuts.boolean_types),%0A heating_unstaffed=self.convert_type(%0A document_in.has_unstaffed_wood, MigrateHuts.boolean_types)%0A )%0A%0A def get_document_locale(self, document_in, version):%0A # TODO extract summary%0A return dict(%0A document_id=document_in.id,%0A id=document_in.document_i18n_archive_id,%0A version=version,%0A culture=document_in.culture,%0A title=document_in.name,%0A description=document_in.description,%0A access=document_in.pedestrian_access,%0A access_period=document_in.staffed_period%0A )%0A%0A shelter_types = %7B%0A '1': 'hut',%0A '5': 'gite',%0A '2': 'shelter',%0A '3': 'bivouac',%0A '4': 'base_camp',%0A '6': 'camp_site'%0A %7D%0A%0A boolean_types = %7B%0A '1': False,%0A '8': True,%0A '0': None,%0A '10': None # non applicable%0A %7D%0A
c994c1c86df7e6698ccef342b1b2101f03c01587
Add daily stats
tpt/ppatrigger/management/commands/dailystats.py
tpt/ppatrigger/management/commands/dailystats.py
Python
0.000012
@@ -0,0 +1,1989 @@ +import traceback%0Aimport pytz%0Afrom datetime import datetime, timedelta%0Afrom django.core.management.base import BaseCommand%0Afrom ppatrigger.models import Package%0Afrom ppatrigger.models import DailyStats%0Afrom ppatrigger.models import Build%0A%0Aclass Command(BaseCommand):%0A args = ''%0A help = 'Compile daily stats for all projects'%0A%0A def handle(self, *args, **options):%0A%0A packages = Package.objects.all()%0A%0A for package in packages:%0A%0A try:%0A latest_daily = DailyStats.objects.filter(%0A package__exact = package).latest('created_at')%0A%0A latest = latest_daily.created_at%0A except DailyStats.DoesNotExist:%0A # First time running, use package creation%0A latest = package.created_at%0A%0A now = datetime.utcnow().replace(tzinfo = pytz.utc)%0A day = latest%0A%0A while day %3C= now: %0A self.stdout.write(str(day))%0A next_day = day + timedelta(days=1)%0A%0A builds = Build.objects.filter(%0A project__package__exact = package,%0A fetched_at__gte = day,%0A fetched_at__lt = next_day)%0A%0A if len(builds):%0A successful = 0%0A failed = 0%0A errors = 0%0A%0A for build in builds:%0A self.stdout.write(str(build))%0A if build.is_success():%0A successful += 1%0A elif build.is_failure():%0A failed += 1%0A else:%0A errors += 1%0A%0A stats = DailyStats(package = package,%0A created_at = now,%0A successful = successful,%0A failed = failed,%0A errors = errors)%0A%0A stats.save()%0A%0A day = next_day%0A
7c2095c0330d14382db76bef944efae5f8d76faf
Add file with tests from rainbow categorical type example
dynd/tests/test_types_categorical.py
dynd/tests/test_types_categorical.py
Python
0
@@ -0,0 +1,2723 @@ +import sys%0Aimport unittest%0Afrom dynd import nd, ndt%0A%0Aclass TestDType(unittest.TestCase):%0A def test_make_categorical(self):%0A # Create categorical type with 256 values%0A tp = ndt.make_categorical(nd.range(0, 512, 2))%0A self.assertEqual(tp.type_id, 'categorical')%0A self.assertEqual(tp.storage_type, ndt.uint8)%0A self.assertEqual(tp.category_type, ndt.int32)%0A # Create categorical type with 256 %3C x %3C 65536 values%0A tp = ndt.make_categorical(nd.range(40000, dtype=ndt.float32))%0A self.assertEqual(tp.type_id, 'categorical')%0A self.assertEqual(tp.storage_type, ndt.uint16)%0A self.assertEqual(tp.category_type, ndt.float32)%0A # Create categorical type with %3E 65536 values%0A tp = ndt.make_categorical(nd.range(70000, dtype=ndt.int128))%0A self.assertEqual(tp.type_id, 'categorical')%0A self.assertEqual(tp.storage_type, ndt.uint32)%0A self.assertEqual(tp.category_type, ndt.int128)%0A%0A def test_factor_categorical(self):%0A a = nd.array(%5B%222012-05-10T02:29:42%22%5D * 100, %22datetime%22)%0A dt1 = ndt.factor_categorical(a.date)%0A #print (dt1)%0A self.assertEqual(nd.as_py(dt1.categories.ucast(ndt.string)),%0A %5B'2012-05-10'%5D)%0A%0A def test_factor_fixedstring(self):%0A adata = %5B('M', 13), ('F', 17), ('F', 34), ('M', 19),%0A ('M', 13), ('F', 34), ('F', 22)%5D%0A a = nd.array(adata, dtype='%7Bgender: string%5B1%5D, age: int32%7D')%0A catdt = ndt.factor_categorical(a)%0A b = a.ucast(catdt)%0A x = repr(b)%0A self.assertTrue('%5B%22M%22, 13%5D' in x)%0A%0A def test_rainbow_example(self):%0A rainbow_vals = %5B'red', 'orange', 'yellow',%0A 'green', 'blue', 'indigo', 'violet'%5D%0A color_vals = %5B'red', 'red', 'violet', 'blue',%0A 'yellow', 'yellow', 'red', 'indigo'%5D%0A color_vals_int = %5Brainbow_vals.index(x) for x in color_vals%5D%0A # Create the type%0A rainbow = ndt.make_categorical(rainbow_vals)%0A # Make sure it looks the way we expect%0A self.assertEqual(rainbow.type_id, 'categorical')%0A self.assertEqual(rainbow.data_size, 1)%0A self.assertEqual(rainbow.data_alignment, 1)%0A self.assertEqual(rainbow.storage_type, ndt.uint8)%0A self.assertEqual(rainbow.category_type, ndt.string)%0A self.assertEqual(nd.as_py(rainbow.categories), rainbow_vals)%0A # Create an array of the type%0A colors = nd.array(color_vals, dtype=rainbow)%0A # Make sure it is convertible back to strings/pyobject/int%0A self.assertEqual(nd.as_py(colors), color_vals)%0A self.assertEqual(nd.as_py(colors.ints), color_vals_int)%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
9f73e60ba9d3775ef4dda9c815412f28ed80b518
Add new package: lzop (#17098)
var/spack/repos/builtin/packages/lzop/package.py
var/spack/repos/builtin/packages/lzop/package.py
Python
0
@@ -0,0 +1,984 @@ +# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Lzop(CMakePackage):%0A %22%22%22lzop is a file compressor which is very similar to gzip. lzop uses%0A the LZO data compression library for compression services, and its main%0A advantages over gzip are much higher compression and decompression speed%0A (at the cost of some compression ratio).%22%22%22%0A%0A homepage = %22https://www.lzop.org%22%0A url = %22https://www.lzop.org/download/lzop-1.03.tar.gz%22%0A%0A version('1.04', sha256='7e72b62a8a60aff5200a047eea0773a8fb205caf7acbe1774d95147f305a2f41')%0A version('1.03', sha256='c1425b8c77d49f5a679d5a126c90ea6ad99585a55e335a613cae59e909dbb2c9')%0A version('1.01', sha256='28acd94d933befbc3af986abcfe833173fb7563b66533fdb4ac592f38bb944c7')%0A%0A depends_on('pkgconfig', type='build')%0A depends_on('lzo')%0A
1f5158d7c24e304b1b2ed2c374cd05aa662aa333
Add LruCache object.
statbot/cache.py
statbot/cache.py
Python
0
@@ -0,0 +1,1336 @@ +#%0A# cache.py%0A#%0A# statbot - Store Discord records for later analysis%0A# Copyright (c) 2017 Ammon Smith%0A#%0A# statbot is available free of charge under the terms of the MIT%0A# License. You are free to redistribute and/or modify it under those%0A# terms. It is distributed in the hopes that it will be useful, but%0A# WITHOUT ANY WARRANTY. See the LICENSE file for more details.%0A#%0A%0Afrom collections import MutableMapping, OrderedDict%0A%0A__all__ = %5B%0A 'LruCache',%0A%5D%0A%0Aclass LruCache(MutableMapping):%0A __slots__ = (%0A 'store',%0A 'max_size',%0A )%0A%0A def __init__(self, max_size=None):%0A self.store = OrderedDict()%0A self.max_size = max_size%0A%0A def __getitem__(self, key):%0A obj = self.store.pop(key)%0A self.store%5Bkey%5D = obj%0A return obj%0A%0A def get(self, key, default=None):%0A try:%0A return self%5Bkey%5D%0A except KeyError:%0A return default%0A%0A def __setitem__(self, key, value):%0A self.store.pop(key, None)%0A self.store%5Bkey%5D = value%0A%0A while len(self) %3E self.max_size:%0A self.store.popitem(last=False)%0A%0A def __delitem__(self, key):%0A del self.store%5Bkey%5D%0A%0A def __contains__(self, key):%0A return key in self.store%0A%0A def __iter__(self):%0A return iter(self.store)%0A%0A def __len__(self):%0A return len(self.store)%0A
20b8f5c5d390d8449e4dc18cf98291486aeb7153
[151. Reverse Words in a String][Accepted]committed by Victor
151-Reverse-Words-in-a-String/solution.py
151-Reverse-Words-in-a-String/solution.py
Python
0.999993
@@ -0,0 +1,479 @@ +class Solution(object):%0A def reverseWords(self, s):%0A %22%22%22%0A :type s: str%0A :rtype: str%0A %22%22%22%0A if not s:%0A return %22%22%0A %0A words=s.split()%0A print words%0A i,j=0,len(words)-1%0A while i%3Cj:%0A words%5Bi%5D,words%5Bj%5D=words%5Bj%5D,words%5Bi%5D%0A i+=1%0A j-=1%0A resstr=''%0A for word in words:%0A resstr+=word+' '%0A %0A return resstr.strip()%0A %0A
8d7a4dd6c7a1d7a3ed65ff1ce9ddbde21e1b7cdb
set mac address properly
scripts/opencontrail-kubelet/opencontrail_kubelet/lxc_manager.py
scripts/opencontrail-kubelet/opencontrail_kubelet/lxc_manager.py
import logging import re import subprocess import sys def shell_command(str): logging.debug('Ran shell command: %s' % str) cmd = subprocess.check_output(str, shell=True) logging.debug('output: %s' % cmd.rstrip()) return cmd class LxcManager(object): def __init__(self): pass def _interface_generate_unique_name(self): output = shell_command('ip link list') ids = {} for line in output.split('\n'): m = re.match(r'[\d]+: instance([\d]+)', line) if m: ids[m.group(1)] = True for i in range(256): if str(i) in ids: continue return 'instance%d' % i return None # Find the correct interface for this nsname def interface_find_peer_name(self, ifname_instance, nsname): ns_ifindex = shell_command("ip netns exec %s ethtool -S %s | grep peer_ifindex | awk '{print $2}'" % (nsname, ifname_instance)) # Now look through all interfaces in the bridge and find the one whose # ifindex is 1 less than ns_ifindex bridge_members = [ i[i.find("veth"):] for i in \ shell_command("brctl show docker0 | grep veth").split("\n") \ ] # Remove the trailing empty string, which comes as a result of split. bridge_members.pop() bridge_members_ifindex = [ shell_command( \ "ethtool -S %s | grep peer_ifindex | awk '{print $2}'" % i) \ for i in bridge_members ] try: member_index = bridge_members_ifindex.index('%s\n' % \ (int(ns_ifindex) - 1)) except: logging.info('did not find member %s' % bridge_members[member_index]) logging.error("Cannot find matching veth interface among brige members") raise logging.info('found member %s' % bridge_members[member_index]) return bridge_members[member_index] # Remove the interface out of the docker bridge def move_interface(self, nsname, pid, ifname_instance, vmi): ifname_master = self.interface_find_peer_name(ifname_instance, nsname) shell_command('brctl delif docker0 %s' % ifname_master) if vmi: mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0] shell_command('ip netns exec %s ifconfig eth0 hw ether %s' \ % (nsname, mac)) return ifname_master def create_interface(self, nsname, ifname_instance, vmi=None): ifname_master = self._interface_generate_unique_name() shell_command('ip link add %s type veth peer name %s' % (ifname_instance, ifname_master)) if vmi: mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0] shell_command('ifconfig %s hw ether %s' % (ifname_instance,mac)) shell_command('ip link set %s netns %s' % (ifname_instance, nsname)) shell_command('ip link set %s up' % ifname_master) return ifname_master def _interface_list_contains(self, output, iface): for line in output.split('\n'): m = re.match(r'[\d]+: ' + iface + ':', line) if m: return True return False def _get_master_ifname(self, daemon, ifname_instance): output = shell_command('ip netns exec ns-%s ethtool -S %s' % (daemon, ifname_instance)) m = re.search(r'peer_ifindex: (\d+)', output) ifindex = m.group(1) output = shell_command('ip link list') expr = '^' + ifindex + ': (\w+): ' regex = re.compile(expr, re.MULTILINE) m = regex.search(output) return m.group(1) def interface_update(self, daemon, vmi, ifname_instance): """ 1. Make sure that the interface exists in the name space. 2. Update the mac address. """ output = shell_command('ip netns exec ns-%s ip link list' % daemon) if not self._interface_list_contains(output, ifname_instance): ifname_master = self.create_interface('ns-%s' % daemon, ifname_instance) else: ifname_master = self._get_master_ifname(daemon, ifname_instance) mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0] shell_command('ip netns exec ns-%s ifconfig %s hw ether %s' % (daemon, ifname_instance, mac)) return ifname_master def interface_config(self, daemon, ifname_guest, advertise_default=True, ip_prefix=None): """ Once the interface is operational, configure the IP addresses. For a bi-directional interface we use dhclient. """ if advertise_default: shell_command('ip netns exec ns-%s dhclient %s' % (daemon, ifname_guest)) else: shell_command('ip netns exec ns-%s ip addr add %s/%d dev %s' % (daemon, ip_prefix[0], ip_prefix[1], ifname_guest)) shell_command('ip netns exec ns-%s ip link set %s up' % (daemon, ifname_guest)) # disable reverse path filtering shell_command('ip netns exec ns-%s sh -c ' + '"echo 2 >/proc/sys/net/ipv4/conf/%s/rp_filter"' % (daemon, ifname_guest)) def clear_interfaces(self, nsname): shell_command('ip netns exec %s dhclient -r' % nsname) output = shell_command('ip netns exec %s ip link list' % nsname) for line in output.split('\n'): m = re.match(r'^[\d]+: ([\w]+):', line) if m: ifname = m.group(1) if ifname == 'lo': continue shell_command('ip netns exec %s ip link delete %s' % (nsname, ifname)) def namespace_init(self, daemon): output = shell_command('ip netns list') for line in output.split(): if line == 'ns-' + daemon: return False shell_command('ip netns add ns-%s' % daemon) return True def namespace_delete(self, daemon): shell_command('ip netns delete ns-%s' % daemon)
Python
0
@@ -2404,32 +2404,153 @@ (nsname, mac))%0A + shell_command('ip netns exec %25s ip addr flush dev %25s'%0A %25 (nsname, ifname_instance))%0A return i
0eb573afa067e23422c5a5495563a6f4d87a549d
Create soundcloud.py
apis/soundcloud.py
apis/soundcloud.py
Python
0.000009
@@ -0,0 +1,2060 @@ +%22%22%22 Contains functions to fetch info from api.soundcloud.com %22%22%22%0Afrom utilities import web%0A%0A# Soundcloud API key.%0ASOUNDCLOUD_API_KEY = '4ce43a6430270a1eea977ff8357a25a3'%0A%0A%0Adef soundcloud_search(search):%0A %22%22%22%0A Searches soundcloud's API for a given search term.%0A%0A :param search: str the search term to search for.%0A :return: dict%7B'type=soundcloud', 'video_id', 'video_time', 'video_title'%7D or None on no match or error.%0A %22%22%22%0A if search:%0A search_url = 'http://api.soundcloud.com/tracks/?' %5C%0A 'filter=streamable&q=%25s&limit=25&client_id=%25s' %25 (search, SOUNDCLOUD_API_KEY)%0A%0A response = web.http_get(search_url, json=True)%0A if response%5B'json'%5D is not None:%0A try:%0A track_id = response%5B'json'%5D%5B0%5D%5B'id'%5D%0A track_time = response%5B'json'%5D%5B0%5D%5B'duration'%5D%0A track_title = response%5B'json'%5D%5B0%5D%5B'title'%5D.encode('ascii', 'ignore')%0A return %7B%0A 'type': 'soundCloud',%0A 'video_id': track_id,%0A 'video_time': track_time,%0A 'video_title': track_title%0A %7D%0A except (IndexError, KeyError):%0A return None%0A return None%0A%0A%0Adef soundcloud_track_info(track_id):%0A if track_id:%0A info_url = 'http://api.soundcloud.com/tracks/%25s?client_id=%25s' %25 (track_id, SOUNDCLOUD_API_KEY)%0A response = web.http_get(info_url, json=True)%0A%0A if response%5B'json'%5D is not None:%0A try:%0A user_id = response%5B'json'%5D%5B0%5D%5B'user_id'%5D%0A track_time = response%5B'json'%5D%5B0%5D%5B'duration'%5D%0A track_title = response%5B'json'%5D%5B0%5D%5B'title'%5D.encode('ascii', 'ignore')%0A return %7B%0A 'type': 'soundCloud',%0A 'video_id': track_id,%0A 'video_time': track_time,%0A 'video_title': track_title,%0A 'user_id': user_id%0A %7D%0A except (IndexError, KeyError):%0A return None%0A return None%0A
4d01b80a6ef61f50604a8f996fa1f3bf3205b017
check point
scripts/opencontrail-kubelet/opencontrail_kubelet/lxc_manager.py
scripts/opencontrail-kubelet/opencontrail_kubelet/lxc_manager.py
import logging import re import subprocess import sys def shell_command(str): cmd = subprocess.check_output(str, shell=True) logging.debug('Ran shell command: %s' % str) logging.debug('output: %s' % cmd.rstrip()) return cmd class LxcManager(object): def __init__(self): pass def _interface_generate_unique_name(self): output = shell_command('ip link list') ids = {} for line in output.split('\n'): m = re.match(r'[\d]+: instance([\d]+)', line) if m: ids[m.group(1)] = True for i in range(256): if str(i) in ids: continue return 'instance%d' % i return None # Find the correct interface for this nsname def interface_find_peer_name(self, ifname_instance, pid): ns_ifindex = shell_command("echo ethtool -S %s | sudo nsenter -n -t %s sh | grep peer_ifindex | awk '{print $2}'" % (ifname_instance, pid)) # Now look through all interfaces in the bridge and find the one whose # ifindex is 1 less than ns_ifindex bridge_members = [ i[i.find("veth"):] for i in \ subprocess.check_output("brctl show docker0 | grep veth", \ shell = True).split("\n") \ ] # Remove the trailing empty string, which comes as a result of split. bridge_members.pop() bridge_members_ifindex = [ subprocess.check_output( \ "ethtool -S %s | grep peer_ifindex | awk '{print $2}'" % i, \ shell = True) for i in bridge_members ] try: member_index = bridge_members_ifindex.index('%s\n' % (ns_ifindex-1)) except: print "Cannot find matching veth interface among brige members" raise return bridge_members[member_index] # Remove the interface out of the docker bridge def move_interface(self, nsname, pid, ifname_instance, vmi): ifname_master = self.interface_find_peer_name(ifname_instance, pid) shell_command('brctl delif docker0 %s' % ifname_master) if vmi: mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0] shell_command('ip netns exec %s hw ether %s' % (nsname, mac)) return ifname_master def create_interface(self, nsname, ifname_instance, vmi=None): ifname_master = self._interface_generate_unique_name() shell_command('ip link add %s type veth peer name %s' % (ifname_instance, ifname_master)) if vmi: mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0] shell_command('ifconfig %s hw ether %s' % (ifname_instance,mac)) shell_command('ip link set %s netns %s' % (ifname_instance, nsname)) shell_command('ip link set %s up' % ifname_master) return ifname_master def _interface_list_contains(self, output, iface): for line in output.split('\n'): m = re.match(r'[\d]+: ' + iface + ':', line) if m: return True return False def _get_master_ifname(self, daemon, ifname_instance): output = shell_command('ip netns exec ns-%s ethtool -S %s' % (daemon, ifname_instance)) m = re.search(r'peer_ifindex: (\d+)', output) ifindex = m.group(1) output = shell_command('ip link list') expr = '^' + ifindex + ': (\w+): ' regex = re.compile(expr, re.MULTILINE) m = regex.search(output) return m.group(1) def interface_update(self, daemon, vmi, ifname_instance): """ 1. Make sure that the interface exists in the name space. 2. Update the mac address. """ output = shell_command('ip netns exec ns-%s ip link list' % daemon) if not self._interface_list_contains(output, ifname_instance): ifname_master = self.create_interface('ns-%s' % daemon, ifname_instance) else: ifname_master = self._get_master_ifname(daemon, ifname_instance) mac = vmi.virtual_machine_interface_mac_addresses.mac_address[0] shell_command('ip netns exec ns-%s ifconfig %s hw ether %s' % (daemon, ifname_instance, mac)) return ifname_master def interface_config(self, daemon, ifname_guest, advertise_default=True, ip_prefix=None): """ Once the interface is operational, configure the IP addresses. For a bi-directional interface we use dhclient. """ if advertise_default: shell_command('ip netns exec ns-%s dhclient %s' % (daemon, ifname_guest)) else: shell_command('ip netns exec ns-%s ip addr add %s/%d dev %s' % (daemon, ip_prefix[0], ip_prefix[1], ifname_guest)) shell_command('ip netns exec ns-%s ip link set %s up' % (daemon, ifname_guest)) # disable reverse path filtering shell_command('ip netns exec ns-%s sh -c ' + '"echo 2 >/proc/sys/net/ipv4/conf/%s/rp_filter"' % (daemon, ifname_guest)) def clear_interfaces(self, nsname): shell_command('ip netns exec %s dhclient -r' % nsname) output = shell_command('ip netns exec %s ip link list' % nsname) for line in output.split('\n'): m = re.match(r'^[\d]+: ([\w]+):', line) if m: ifname = m.group(1) if ifname == 'lo': continue shell_command('ip netns exec %s ip link delete %s' % (nsname, ifname)) def namespace_init(self, daemon): output = shell_command('ip netns list') for line in output.split(): if line == 'ns-' + daemon: return False shell_command('ip netns add ns-%s' % daemon) return True def namespace_delete(self, daemon): shell_command('ip netns delete ns-%s' % daemon)
Python
0.000001
@@ -1169,38 +1169,28 @@ s -ubprocess.check_output +hell_command (%22brctl @@ -1218,26 +1218,16 @@ eth%22, %5C%0A - @@ -1432,38 +1432,28 @@ ex = %5B s -ubprocess.check_output +hell_command ( %5C%0A
5feef18ca3dda099f33568bf0f2b189fe297a3e0
add test function rosenbrock
pyea/functions/__init__.py
pyea/functions/__init__.py
Python
0.000002
@@ -0,0 +1,1165 @@ +#TODO: Add documentation%0A%0Aimport numpy as np%0A%0A%0Adef func_rosenbrock(pop, a=1, b=100):%0A # http://en.wikipedia.org/wiki/Rosenbrock_function%0A x = pop%5B:, 0%5D%0A y = pop%5B:, 1%5D%0A return (a - x)**2 + b * (y - x**2)**2%0A%0A%0Adef print_func(func, **kwargs):%0A from mpl_toolkits.mplot3d import Axes3D%0A import matplotlib.pyplot as plt%0A from matplotlib import cm%0A%0A if func == 'rosenbrock':%0A # Initial parammeters%0A params = dict(range_=%5B-1.5, 1.5%5D, step_=0.05)%0A # Params in kwargs%0A for param in params.keys():%0A if param in kwargs:%0A params%5Bparam%5D = kwargs%5Bparam%5D%0A # Fill grid%0A x = np.arange(params%5B'range_'%5D%5B0%5D, params%5B'range_'%5D%5B1%5D, params%5B'step_'%5D)%0A y = np.arange(params%5B'range_'%5D%5B0%5D, params%5B'range_'%5D%5B1%5D, params%5B'step_'%5D)%0A x, y = np.meshgrid(x, y)%0A pop = np.vstack((x.flatten(), y.flatten())).transpose()%0A z = func_rosenbrock(pop)%0A z = z.reshape(x.shape)%0A # Plot%0A fig = plt.figure()%0A ax = fig.gca(projection='3d')%0A surf = ax.plot_surface(x, y, z, rstride=1, cstride=1, cmap=cm.jet, linewidth=0, antialiased=False)%0A plt.show()
378bd7f4d647243a1e736f4dc0bfd0742d5f3d0b
Create Combinations.py
Array/Combinations.py
Array/Combinations.py
Python
0.000001
@@ -0,0 +1,684 @@ +%60%60%60%0AGiven two integers n and k, return all possible combinations of k numbers out of 1 ... n.%0A%0AFor example,%0AIf n = 4 and k = 2, a solution is:%0A%0A%5B%0A %5B2,4%5D,%0A %5B3,4%5D,%0A %5B2,3%5D,%0A %5B1,2%5D,%0A %5B1,3%5D,%0A %5B1,4%5D,%0A%5D%0A%60%60%60%0A%0Aclass Solution:%0A # @return a list of lists of integers%0A def combine(self, n, k):%0A def combine_helper(n, k, start, depth, subres):%0A if depth == k: %0A result.append(subres)%0A return%0A for i in xrange(start, n+1):%0A combine_helper(n, k, i+1, depth+1, subres+%5Bi%5D)%0A %0A if n == 0 or k == 0: return %5B%5B%5D%5D %0A result = %5B%5D%0A combine_helper( n, k, 1, 0, %5B%5D)%0A return result %0A
be9a1afc61be483e8c585cef247f62071809c894
add BuildWorktree.set_default_config
python/qibuild/worktree.py
python/qibuild/worktree.py
import os import qisys.command import qisys.worktree from qibuild.dependencies_solver import topological_sort import qibuild.build import qibuild.build_config import qibuild.project class BuildWorkTreeError(Exception): pass class BuildWorkTree(qisys.worktree.WorkTreeObserver): """ """ def __init__(self, worktree): self.worktree = worktree self.root = self.worktree.root self.build_config = qibuild.build_config.CMakeBuildConfig() self.build_projects = self._load_build_projects() worktree.register(self) @property def qibuild_xml(self): config_path = os.path.join(self.worktree.dot_qi, "qibuild.xml") if not os.path.exists(config_path): with open(config_path, "w") as fp: fp.write("<qibuild />\n") return config_path def get_build_project(self, name, raises=True): """ Get a build project given its name """ for build_project in self.build_projects: if build_project.name == name: return build_project if raises: raise BuildWorkTreeError("No such qibuild project: %s" % name) def get_deps(self, top_project, runtime=False, build_deps_only=False): """ Get the depencies of a project """ to_sort = dict() if build_deps_only: for project in self.build_projects: to_sort[project.name] = project.depends elif runtime: for project in self.build_projects: to_sort[project.name] = project.rdepends else: for project in self.build_projects: to_sort[project.name] = project.rdepends.union(project.depends) names = topological_sort(to_sort, [top_project.name]) deps = list() for name in names: dep_project = self.get_build_project(name, raises=False) if dep_project: deps.append(dep_project) return deps def on_project_added(self, project): """ Called when a new project has been registered """ self.build_projects = self._load_build_projects() def on_project_removed(self, project): """ Called when a build project has been removed """ self.build_projects = self._load_build_projects() def _load_build_projects(self): """ Create BuildProject for every buildable project in the worktree """ build_projects = list() for wt_project in self.worktree.projects: if not os.path.exists(wt_project.qiproject_xml): continue build_project = qibuild.project.BuildProject(self, wt_project) build_projects.append(build_project) return build_projects def configure_build_profile(self, name, flags): """ Configure a build profile for the worktree """ qibuild.profile.configure_build_profile(self.qibuild_xml, name, flags) def remove_build_profile(self, name): """ Remove a build profile for this worktree """ qibuild.profile.configure_build_profile(self.qibuild_xml, name)
Python
0.000001
@@ -285,16 +285,72 @@ %0A %22%22%22 + Stores a list of projects that can be built using CMake %0A%0A %22%22 @@ -3230,10 +3230,382 @@ name)%0A%0A -%0A + def set_default_config(self, name):%0A %22%22%22 Set the default toolchain for this worktree %22%22%22%0A local_settings = qibuild.config.LocalSettings()%0A tree = qisys.qixml.read(self.qibuild_xml)%0A local_settings.parse(tree)%0A local_settings.defaults.config = name%0A tree = local_settings.tree()%0A qisys.qixml.write(tree, self.qibuild_xml) %0A
474c5f977ab5b035567f0107c457622c51189ac6
Add new topics migration file
csunplugged/topics/migrations/0086_auto_20171108_0840.py
csunplugged/topics/migrations/0086_auto_20171108_0840.py
Python
0
@@ -0,0 +1,2529 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.5 on 2017-11-08 08:40%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('topics', '0085_auto_20171030_0035'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='programmingchallengelanguage',%0A name='name_de',%0A field=models.CharField(max_length=200, null=True),%0A ),%0A migrations.AddField(%0A model_name='programmingchallengelanguage',%0A name='name_en',%0A field=models.CharField(max_length=200, null=True),%0A ),%0A migrations.AddField(%0A model_name='programmingchallengelanguage',%0A name='name_fr',%0A field=models.CharField(max_length=200, null=True),%0A ),%0A migrations.AlterField(%0A model_name='classroomresource',%0A name='description',%0A field=models.CharField(default='', max_length=100),%0A ),%0A migrations.AlterField(%0A model_name='classroomresource',%0A name='description_de',%0A field=models.CharField(default='', max_length=100, null=True),%0A ),%0A migrations.AlterField(%0A model_name='classroomresource',%0A name='description_en',%0A field=models.CharField(default='', max_length=100, null=True),%0A ),%0A migrations.AlterField(%0A model_name='classroomresource',%0A name='description_fr',%0A field=models.CharField(default='', max_length=100, null=True),%0A ),%0A migrations.AlterField(%0A model_name='curriculumarea',%0A name='name',%0A field=models.CharField(default='', max_length=100),%0A ),%0A migrations.AlterField(%0A model_name='curriculumarea',%0A name='name_de',%0A field=models.CharField(default='', max_length=100, null=True),%0A ),%0A migrations.AlterField(%0A model_name='curriculumarea',%0A name='name_en',%0A field=models.CharField(default='', max_length=100, null=True),%0A ),%0A migrations.AlterField(%0A model_name='curriculumarea',%0A name='name_fr',%0A field=models.CharField(default='', max_length=100, null=True),%0A ),%0A migrations.AlterField(%0A model_name='programmingchallengelanguage',%0A name='name',%0A field=models.CharField(max_length=200),%0A ),%0A %5D%0A
afe0f8fc731639cbe28798bb2a554c84ccbd8b2a
Create test_script.py
test_script.py
test_script.py
Python
0.000004
@@ -0,0 +1,20 @@ +print %22hello world%22%0A
a2bc05454ba166e3931fba130e44f49f66a79080
Add virtualshackles crawler
comics/comics/virtualshackles.py
comics/comics/virtualshackles.py
Python
0
@@ -0,0 +1,1278 @@ +import re%0A%0Afrom comics.aggregator.crawler import CrawlerBase, CrawlerResult%0Afrom comics.meta.base import MetaBase%0A%0Aclass Meta(MetaBase):%0A name = 'Virtual Shackles'%0A language = 'en'%0A url = 'http://www.virtualshackles.com/'%0A start_date = '2009-03-27'%0A rights = 'Jeremy Vinar and Mike Fahmie'%0A%0Aclass Crawler(CrawlerBase):%0A history_capable_days = 32%0A schedule = 'We,Fr'%0A%0A def crawl(self, pub_date):%0A feed = self.parse_feed('http://feeds.feedburner.com/VirtualShackles?format=atom')%0A for entry in feed.for_date(pub_date):%0A url = entry.summary.src('img%5Bsrc*=%22virtualshackles.com/img/%22%5D')%0A title = entry.title%0A%0A page_url = entry.raw_entry.feedburner_origlink%0A page_url = re.sub(r'/(%5Cd+/?)', '/-%5Cg%3C1%3E', page_url)%0A%0A page = self.parse_page(page_url)%0A%0A orion = page.text('#orionComments')%0A jack = page.text('#jackComments')%0A%0A if orion and jack:%0A comments = u'orion: %25s%5Cn jack: %25s' %25 (orion, jack)%0A elif orion:%0A comments = u'orion: %25s' %25 (orion)%0A elif jack:%0A comments = u'jack: %25s' %25 (jack)%0A else:%0A comments = None%0A%0A return CrawlerResult(url, title, comments)%0A
9fc7f98812a67606bb707a02c1acbe9dd77d94fa
Update ExamLogFilter.filter_collection
kolibri/logger/api.py
kolibri/logger/api.py
from django.db.models.query import F from django_filters import ModelChoiceFilter from django_filters.rest_framework import CharFilter, DjangoFilterBackend, FilterSet from kolibri.auth.api import KolibriAuthPermissions, KolibriAuthPermissionsFilter from kolibri.auth.filters import HierarchyRelationsFilter from kolibri.auth.models import Classroom, Collection, Facility, FacilityUser, LearnerGroup from kolibri.content.api import OptionalPageNumberPagination from kolibri.core.exams.models import Exam from rest_framework import filters, viewsets from .models import AttemptLog, ContentSessionLog, ContentSummaryLog, ExamAttemptLog, ExamLog, MasteryLog, UserSessionLog from .permissions import ExamActivePermissions from .serializers import ( AttemptLogSerializer, ContentSessionLogSerializer, ContentSummaryLogSerializer, ExamAttemptLogSerializer, ExamLogSerializer, MasteryLogSerializer, TotalContentProgressSerializer, UserSessionLogSerializer ) class BaseLogFilter(FilterSet): facility = ModelChoiceFilter(method="filter_facility", queryset=Facility.objects.all()) classroom = ModelChoiceFilter(method="filter_classroom", queryset=Classroom.objects.all()) learner_group = ModelChoiceFilter(method="filter_learner_group", queryset=LearnerGroup.objects.all()) # Only a superuser can filter by facilities def filter_facility(self, queryset, name, value): return queryset.filter(user__facility=value) def filter_classroom(self, queryset, name, value): return HierarchyRelationsFilter(queryset).filter_by_hierarchy( ancestor_collection=value, target_user=F("user"), ) def filter_learner_group(self, queryset, name, value): return HierarchyRelationsFilter(queryset).filter_by_hierarchy( ancestor_collection=value, target_user=F("user"), ) class ContentSessionLogFilter(BaseLogFilter): class Meta: model = ContentSessionLog fields = ['user_id', 'content_id'] class ContentSessionLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend) queryset = ContentSessionLog.objects.all() serializer_class = ContentSessionLogSerializer pagination_class = OptionalPageNumberPagination filter_class = ContentSessionLogFilter class ContentSummaryLogFilter(BaseLogFilter): class Meta: model = ContentSummaryLog fields = ['user_id', 'content_id'] class ContentSummaryLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend) queryset = ContentSummaryLog.objects.all() serializer_class = ContentSummaryLogSerializer pagination_class = OptionalPageNumberPagination filter_class = ContentSummaryLogFilter class TotalContentProgressViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter,) queryset = FacilityUser.objects.all() serializer_class = TotalContentProgressSerializer class UserSessionLogFilter(BaseLogFilter): class Meta: model = UserSessionLog fields = ['user_id'] class UserSessionLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend) queryset = UserSessionLog.objects.all() serializer_class = UserSessionLogSerializer pagination_class = OptionalPageNumberPagination filter_class = UserSessionLogFilter class MasteryFilter(FilterSet): class Meta: model = MasteryLog fields = ['summarylog'] class MasteryLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend) queryset = MasteryLog.objects.all() serializer_class = MasteryLogSerializer pagination_class = OptionalPageNumberPagination filter_class = MasteryFilter class AttemptFilter(FilterSet): content = CharFilter(method="filter_content") def filter_content(self, queryset, name, value): return queryset.filter(masterylog__summarylog__content_id=value) class Meta: model = AttemptLog fields = ['masterylog', 'complete', 'user', 'content'] class AttemptLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend, filters.OrderingFilter) queryset = AttemptLog.objects.all() serializer_class = AttemptLogSerializer pagination_class = OptionalPageNumberPagination filter_class = AttemptFilter ordering_fields = ('end_timestamp',) ordering = ('end_timestamp',) class ExamAttemptFilter(FilterSet): exam = ModelChoiceFilter(method="filter_exam", queryset=Exam.objects.all()) user = ModelChoiceFilter(method="filter_user", queryset=FacilityUser.objects.all()) def filter_exam(self, queryset, name, value): return queryset.filter(examlog__exam=value) def filter_user(self, queryset, name, value): return queryset.filter(examlog__user=value) class Meta: model = ExamAttemptLog fields = ['examlog', 'exam', 'user'] class ExamAttemptLogViewSet(viewsets.ModelViewSet): permission_classes = (ExamActivePermissions, KolibriAuthPermissions, ) filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend, filters.OrderingFilter) queryset = ExamAttemptLog.objects.all() serializer_class = ExamAttemptLogSerializer pagination_class = OptionalPageNumberPagination filter_class = ExamAttemptFilter class ExamLogFilter(BaseLogFilter): collection = ModelChoiceFilter(method="filter_collection", queryset=Collection.objects.all()) def filter_collection(self, queryset, collection): return HierarchyRelationsFilter(queryset).filter_by_hierarchy( target_user=F('user'), ancestor_collection=collection, ) class Meta: model = ExamLog fields = ['user', 'exam'] class ExamLogViewSet(viewsets.ModelViewSet): permission_classes = (KolibriAuthPermissions,) filter_backends = (KolibriAuthPermissionsFilter, DjangoFilterBackend) queryset = ExamLog.objects.all() serializer_class = ExamLogSerializer pagination_class = OptionalPageNumberPagination filter_class = ExamLogFilter
Python
0.000001
@@ -5943,16 +5943,22 @@ ueryset, + name, collect
c13014c18496f35c4c94f156a18d442d3859f73b
Add assembly testing module
testing_ops.py
testing_ops.py
Python
0
@@ -0,0 +1,648 @@ +from __future__ import absolute_import, print_function, division%0A%0Afrom firedrake import *%0A%0Amesh = UnitSquareMesh(2, 2, quadrilateral=False)%0An = FacetNormal(mesh)%0A%0Adegree = 1%0AV = FunctionSpace(mesh, %22RT%22, degree)%0AU = FunctionSpace(mesh, %22DG%22, degree - 1)%0AW = V * U%0A%0Au, p = TrialFunctions(W)%0Av, q = TestFunctions(W)%0A%0Aa = (dot(u, v) + div(v)*p + q*div(u))*dx%0A%0Ax = SpatialCoordinate(mesh)%0Af = Function(U).assign(0)%0A%0AL = f*q*dx + 42*dot(v, n)*ds(4)%0A%0Abcs = %5BDirichletBC(W.sub(0), Expression((%220%22, %220%22)), (1, 2))%5D%0AL = assemble(L)%0Ay = Function(W)%0Afor bc in bcs:%0A bc.apply(y)%0A%0Arhs = assemble(L - assemble(action(a, y)))%0A%0Afor bc in bcs:%0A bc.apply(rhs)%0A
71afe426a84789b65953ccd057014d17a11de859
Add a command to extend the generation_high from generation 1 to 2
mzalendo/core/management/commands/core_extend_areas_to_generation_2.py
mzalendo/core/management/commands/core_extend_areas_to_generation_2.py
Python
0.002458
@@ -0,0 +1,858 @@ +# The import of data into Kenyan MapIt had the constituencies in%0A# generation 2, while all the other area types were in generation 1.%0A# This is unfortunate since it makes it appear to later import scripts%0A# that the district type disappeared between generation 1 and 3.%0A#%0A# This script just extends the generation_high to generation 2 for%0A# every area where it was set to generation 2.%0A%0Afrom django.core.management.base import NoArgsCommand%0Afrom mapit.models import Area, Generation, Type, NameType, Country, CodeType%0A%0Aclass Command(NoArgsCommand):%0A help = 'Change all genertion_high=1 to generation_high=2'%0A def handle_noargs(self, **options):%0A g1 = Generation.objects.get(id=1)%0A g2 = Generation.objects.get(id=2)%0A for area in Area.objects.filter(generation_high=g1):%0A area.generation_high = g2%0A area.save()%0A
17fddbb1df78420aaebb811785b8e99769b45fa9
Create keyradius.py
bin/keyradius.py
bin/keyradius.py
Python
0
@@ -0,0 +1,919 @@ +%0Aimport csv%0Afrom numpy import sqrt%0A%0A# Midpoint for Key Bridge%0Ax1 = 38.902543%0Ay1 = -77.069830%0A%0A# Threshold marker %0Ax2 = 38.900122%0Ay2 = -77.071176%0A%0A%0Aradius_squared = (x2 - x1)**2 + (y2 - y1)**2%0Aradius = sqrt(radius_squared)%0A%0Adata_file = open(%22IncidentData_24OCT14.csv%22, %22rU%22)%0Adata = csv.DictReader(data_file)%0A%0Aresults = %5B%5D%0A%0Afor row in data:%0A%09lat = float(row%5B%22Latitude%22%5D)%0A%09lon = float(row%5B%22Longitude%22%5D)%0A%0A%09distance_squared = ((lat) - x1)**2 + ((lon) - y1)**2%0A%09distance = sqrt(distance_squared)%0A%0A%09if distance %3C= radius:%0A%09%09results.append(%7B%22Location%22: row%5B%22Location%22%5D, %22Type%22: row%5B%22Standardized Type%22%5D, %0A%09%09%09%22Start Time%22: row%5B%22Time Opened%22%5D, %22End Time%22: row%5B%22Time Closed%22%5D, %0A%09%09%09%22Latitude%22: row%5B%22Latitude%22%5D, %22Longitude%22: row%5B%22Longitude%22%5D%7D)%0A%0Af = open('radiuskeybridge.csv','wb')%0Aw = csv.DictWriter(f, fieldnames = %5B%22Location%22, %22Type%22, %22Start Time%22, %22End Time%22, %22Latitude%22, %22Longitude%22%5D)%0Aw.writeheader()%0Aw.writerows(results)%09%0A%0A%0A%0A%0A
431fab0520c38a8698c8a1569f31a860012cfd7c
fix reset_password show twice in reset email (#134)
CTFd/auth.py
CTFd/auth.py
from flask import render_template, request, redirect, abort, jsonify, url_for, session, Blueprint from CTFd.utils import sha512, is_safe_url, authed, mailserver, sendmail, can_register, get_config, verify_email from CTFd.models import db, Teams from itsdangerous import TimedSerializer, BadTimeSignature, Signer, BadSignature from passlib.hash import bcrypt_sha256 from flask import current_app as app import logging import time import re import os auth = Blueprint('auth', __name__) @auth.route('/confirm', methods=['POST', 'GET']) @auth.route('/confirm/<data>', methods=['GET']) def confirm_user(data=None): if not get_config('verify_emails'): return redirect(url_for('challenges.challenges_view')) if data and request.method == "GET": ## User is confirming email account try: s = Signer(app.config['SECRET_KEY']) email = s.unsign(data.decode('base64')) except BadSignature: return render_template('confirm.html', errors=['Your confirmation link seems wrong']) team = Teams.query.filter_by(email=email).first() team.verified = True db.session.commit() db.session.close() if authed(): return redirect(url_for('challenges.challenges_view')) return redirect(url_for('auth.login')) if not data and request.method == "GET": ## User has been directed to the confirm page because his account is not verified team = Teams.query.filter_by(id=session['id']).first() if team.verified: return redirect(url_for('views.profile')) return render_template('confirm.html', team=team) @auth.route('/reset_password', methods=['POST', 'GET']) @auth.route('/reset_password/<data>', methods=['POST', 'GET']) def reset_password(data=None): if data is not None and request.method == "GET": return render_template('reset_password.html', mode='set') if data is not None and request.method == "POST": try: s = TimedSerializer(app.config['SECRET_KEY']) name = s.loads(data.decode('base64'), max_age=1800) except BadTimeSignature: return render_template('reset_password.html', errors=['Your link has expired']) team = Teams.query.filter_by(name=name).first() team.password = bcrypt_sha256.encrypt(request.form['password'].strip()) db.session.commit() db.session.close() return redirect(url_for('auth.login')) if request.method == 'POST': email = request.form['email'].strip() team = Teams.query.filter_by(email=email).first() if not team: return render_template('reset_password.html', errors=['Check your email']) s = TimedSerializer(app.config['SECRET_KEY']) token = s.dumps(team.name) text = """ Did you initiate a password reset? {0}/reset_password/{1} """.format(url_for('auth.reset_password', _external=True), token.encode('base64')) sendmail(email, text) return render_template('reset_password.html', errors=['Check your email']) return render_template('reset_password.html') @auth.route('/register', methods=['POST', 'GET']) def register(): if not can_register(): return redirect(url_for('auth.login')) if request.method == 'POST': errors = [] name = request.form['name'] email = request.form['email'] password = request.form['password'] name_len = len(name) == 0 names = Teams.query.add_columns('name', 'id').filter_by(name=name).first() emails = Teams.query.add_columns('email', 'id').filter_by(email=email).first() pass_short = len(password) == 0 pass_long = len(password) > 128 valid_email = re.match("[^@]+@[^@]+\.[^@]+", request.form['email']) if not valid_email: errors.append("That email doesn't look right") if names: errors.append('That team name is already taken') if emails: errors.append('That email has already been used') if pass_short: errors.append('Pick a longer password') if pass_long: errors.append('Pick a shorter password') if name_len: errors.append('Pick a longer team name') if len(errors) > 0: return render_template('register.html', errors=errors, name=request.form['name'], email=request.form['email'], password=request.form['password']) else: with app.app_context(): team = Teams(name, email.lower(), password) db.session.add(team) db.session.commit() db.session.flush() session['username'] = team.name session['id'] = team.id session['admin'] = team.admin session['nonce'] = sha512(os.urandom(10)) if mailserver() and get_config('verify_emails'): verify_email(team.email) else: if mailserver(): sendmail(request.form['email'], "You've successfully registered for {}".format(get_config('ctf_name'))) db.session.close() logger = logging.getLogger('regs') logger.warn("[{0}] {1} registered with {2}".format(time.strftime("%m/%d/%Y %X"), request.form['name'].encode('utf-8'), request.form['email'].encode('utf-8'))) return redirect(url_for('challenges.challenges_view')) else: return render_template('register.html') @auth.route('/login', methods=['POST', 'GET']) def login(): if request.method == 'POST': errors = [] name = request.form['name'] team = Teams.query.filter_by(name=name).first() if team and bcrypt_sha256.verify(request.form['password'], team.password): try: session.regenerate() # NO SESSION FIXATION FOR YOU except: pass # TODO: Some session objects don't implement regenerate :( session['username'] = team.name session['id'] = team.id session['admin'] = team.admin session['nonce'] = sha512(os.urandom(10)) db.session.close() logger = logging.getLogger('logins') logger.warn("[{0}] {1} logged in".format(time.strftime("%m/%d/%Y %X"), session['username'].encode('utf-8'))) if request.args.get('next') and is_safe_url(request.args.get('next')): return redirect(request.args.get('next')) return redirect(url_for('challenges.challenges_view')) else: errors.append("That account doesn't seem to exist") db.session.close() return render_template('login.html', errors=errors) else: db.session.close() return render_template('login.html') @auth.route('/logout') def logout(): if authed(): session.clear() return redirect(url_for('views.static_html'))
Python
0.001005
@@ -2854,31 +2854,16 @@ ? %0A%0A%7B0%7D/ -reset_password/ %7B1%7D%0A%0A%22%22%22
c27e97ea959a9863e57ce12b0afc5fa092562548
Create Character.py
Character.py
Character.py
Python
0
@@ -0,0 +1,1182 @@ +import untangle%0A%0Aclass Character(object):%0A def __init__(self, data):%0A assert isinstance(data, dict)%0A self.__dict__ = data%0A%0A%0Adef get_data(character):%0A '''%0A :param character: String, character name%0A :return: Dictionary, character data%0A '''%0A path = 'tests%5C%5C'%0A filepath = path + character + '.chum5'%0A%0A try:%0A c = untangle.parse(filepath)%0A except IOError:%0A print(%22Error: can't find file or read data%22)%0A%0A data = %7B%0A 'Name': c.character.name.cdata,%0A 'imageURL': str(c.character.notes.cdata),%0A%0A 'Charisma': int(c.character.attributes.attribute%5B4%5D.value.cdata),%0A 'Intelligence': int(c.character.attributes.attribute%5B5%5D.value.cdata),%0A%0A 'Hacking': int(c.character.skills.skill%5B37%5D.rating.cdata),%0A 'Seduction': int(c.character.skills.skill%5B37%5D.rating.cdata)%0A %7D%0A%0A return data%0A%0A## LIMITS WILL NEED TO BE CALCULATED%0A# Inherent Limits Add appropriate attribute(s); calculate as listed below %E2%80%94%0A# Mental %5B(Logic x 2) + Intuition + Willpower%5D / 3 (round up) %E2%80%94%0A# Physical %5B(Strength x 2) + Body + Reaction%5D / 3 (round up) %E2%80%94%0A# Social %5B(Charisma x 2) + Willpower + Essence%5D / 3 (round up)%0A##%0A
983d6b12db4050ff7d252e1717adbfe39add2f49
Add missing file
Allura/allura/lib/widgets/auth_widgets.py
Allura/allura/lib/widgets/auth_widgets.py
Python
0
@@ -0,0 +1,876 @@ +import ew as ew_core%0Aimport ew.jinja2_ew as ew%0Afrom ew.core import validator%0A%0Afrom pylons import request%0Afrom formencode import Invalid%0Afrom webob import exc%0A%0Afrom .forms import ForgeForm%0A%0Afrom allura.lib import plugin%0A%0Aclass LoginForm(ForgeForm):%0A submit_text='Login'%0A style='wide'%0A class fields(ew_core.NameList):%0A username = ew.TextField(label='Username')%0A password = ew.PasswordField(label='Password')%0A class hidden_fields(ew_core.NameList):%0A return_to = ew.HiddenField()%0A%0A @validator%0A def validate(self, value, state=None):%0A try:%0A value%5B'username'%5D = plugin.AuthenticationProvider.get(request).login()%0A except exc.HTTPUnauthorized:%0A msg = 'Invalid login'%0A raise Invalid(%0A msg,%0A dict(username=value%5B'username'%5D),%0A None)%0A return value%0A
c349e8df72c09a98fe6b038c763c41008bef70a2
add migration for reimporting universities
hs_dictionary/migrations/0005_reimport_universities.py
hs_dictionary/migrations/0005_reimport_universities.py
Python
0
@@ -0,0 +1,720 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Aimport csv%0Aimport os%0A%0Afrom django.db import migrations, models%0A%0Afrom hs_dictionary.models import University%0A%0A%0Adef forwards(apps, schema_editor):%0A University.objects.all().delete()%0A with open(os.path.dirname(__file__) + %22/world-universities.csv%22) as f:%0A reader = csv.reader(f)%0A for i, line in enumerate(reader):%0A University.objects.create(%0A name=line%5B1%5D,%0A country_code=line%5B0%5D,%0A url=line%5B2%5D%0A )%0A%0A%0Aclass Migration(migrations.Migration):%0A dependencies = %5B%0A ('hs_dictionary', '0004_merge'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(forwards)%0A %5D%0A%0A
d77e8df3fa913e7a60c1870e49a6b6197d7a9125
Add tests for zerver/views/realm_emoji.py.
zerver/tests/test_realm_emoji.py
zerver/tests/test_realm_emoji.py
Python
0
@@ -0,0 +1,1923 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import absolute_import%0A%0Afrom zerver.lib.actions import get_realm, check_add_realm_emoji%0Afrom zerver.lib.test_helpers import AuthedTestCase%0Aimport ujson%0A%0Aclass RealmEmojiTest(AuthedTestCase):%0A%0A def test_list(self):%0A self.login(%[email protected]%22)%0A realm = get_realm('zulip.com')%0A check_add_realm_emoji(realm, %22my_emoji%22, %22https://example.com/my_emoji%22)%0A result = self.client.get(%22/json/realm/emoji%22)%0A self.assert_json_success(result)%0A self.assertEqual(200, result.status_code)%0A content = ujson.loads(result.content)%0A self.assertEqual(len(content%5B%22emoji%22%5D), 1)%0A%0A def test_upload(self):%0A self.login(%[email protected]%22)%0A data = %7B%22name%22: %22my_emoji%22, %22url%22: %22https://example.com/my_emoji%22%7D%0A result = self.client_put(%22/json/realm/emoji%22, info=data)%0A self.assert_json_success(result)%0A self.assertEqual(200, result.status_code)%0A%0A result = self.client.get(%22/json/realm/emoji%22)%0A content = ujson.loads(result.content)%0A self.assert_json_success(result)%0A self.assertEqual(len(content%5B%22emoji%22%5D), 1)%0A%0A def test_upload_exception(self):%0A self.login(%[email protected]%22)%0A data = %7B%22name%22: %22my_em*/oji%22, %22url%22: %22https://example.com/my_emoji%22%7D%0A result = self.client_put(%22/json/realm/emoji%22, info=data)%0A self.assert_json_error(result, u'Invalid characters in Emoji name')%0A%0A def test_delete(self):%0A self.login(%[email protected]%22)%0A realm = get_realm('zulip.com')%0A check_add_realm_emoji(realm, %22my_emoji%22, %22https://example.com/my_emoji%22)%0A result = self.client_delete(%22/json/realm/emoji/my_emoji%22)%0A self.assert_json_success(result)%0A%0A result = self.client.get(%22/json/realm/emoji%22)%0A content = ujson.loads(result.content)%0A self.assert_json_success(result)%0A self.assertEqual(len(content%5B%22emoji%22%5D), 0)%0A
6cb66978e44d447fd210dd92de194659b5f33fb3
Add debug util for WORKSPACE.
bazel/debug_repository.bzl
bazel/debug_repository.bzl
Python
0.000433
@@ -0,0 +1,830 @@ +%22%22%22Debug util for repository definitions.%22%22%22%0A%0Adef debug_repository(repo, *fields):%0A %22%22%22debug_repository(repo) identifies which version of a repository has been%0A defined in the WORKSPACE by printing some of its fields. Example:%0A%0A # at the bottom of the WORKSPACE file%0A load(%22//bazel:debug_repository.bzl%22, %22debug_repository%22)%0A%0A debug_repository(%22org_golang_x_net%22)%0A%0A If needed, you can override the printed fields by passing additional parameters:%0A%0A debug_repository(%22io_grpc_grpc_java%22, %22patches%22, %22urls%22)%0A %22%22%22%0A%0A if len(fields) == 0:%0A fields = %5B%22branch%22, %22commit%22, %22tag%22, %22url%22, %22urls%22%5D%0A%0A rule = native.existing_rule(repo)%0A if rule == None:%0A print(repo, %22not found%22)%0A return%0A%0A for f in fields:%0A if f in rule and len(rule%5Bf%5D) %3E 0:%0A print(repo, f, rule%5Bf%5D)%0A
316bef330c0770739e95f9c1108e07697655d27e
fix when multi python version bug
cobra/__version__.py
cobra/__version__.py
import sys import platform __title__ = 'cobra' __description__ = 'Code Security Audit' __url__ = 'https://github.com/wufeifei/cobra' __issue_page__ = 'https://github.com/wufeifei/cobra/issues/new' __python_version__ = sys.version.split()[0] __platform__ = platform.platform() __version__ = '2.0.0-alpha' __author__ = 'Feei' __author_email__ = '[email protected]' __license__ = 'MIT License' __copyright__ = 'Copyright (C) 2017 Feei. All Rights Reserved' __introduction__ = """ ,---. | | ,---.|---.,---.,---. | | || || ,---| `---``---``---`` `---^ v{version} GitHub: https://github.com/wufeifei/cobra Cobra is a static code analysis system that automates the detecting vulnerabilities and security issue.""".format(version=__version__) __epilog__ = """Usage: {m} -t {td} {m} -t {td} -r cvi-190001,cvi-190002 {m} -t {td} -f json -o /tmp/report.json {m} -t {tg} -f json -o [email protected] {m} -t {tg} -f json -o http://push.to.com/api sudo {m} -H 127.0.0.1 -P 80 """.format(m='./cobra.py', td='tests/vulnerabilities', tg='https://github.com/ethicalhack3r/DVWA')
Python
0.000001
@@ -787,16 +787,23 @@ sage:%0A +python %7Bm%7D -t %7B @@ -808,16 +808,23 @@ %7Btd%7D%0A +python %7Bm%7D -t %7B @@ -854,16 +854,23 @@ 90002%0A +python %7Bm%7D -t %7B @@ -900,24 +900,31 @@ ort.json %0A +python %7Bm%7D -t %7Btg%7D @@ -950,16 +950,23 @@ i.cn %0A +python %7Bm%7D -t %7B @@ -1011,16 +1011,23 @@ %0A sudo +python %7Bm%7D -H 1 @@ -1059,10 +1059,8 @@ (m=' -./ cobr
1483b352683ecf126e1063c3a6fa2f07dcdb7720
add new module.wq
cno/core/gtt.py
cno/core/gtt.py
Python
0.000001
@@ -0,0 +1,2397 @@ +import numpy as np%0Aimport pylab%0Aimport pandas as pd%0Afrom biokit.rtools import RSession%0Afrom cno.core import CNORBase%0Afrom easydev import TempFile%0A__all__ = %5B'GTTBool'%5D%0A%0A%0Aclass GTTBool(CNORBase):%0A %22%22%22%0A%0A%0A ::%0A%0A from cno import *%0A c = cnorbool.CNORbool(cnodata(%22PKN-ToyMMB.sif%22), cnodata(%22MD-ToyMMB.csv%22), verboseR=False)%0A c.optimise(reltol=0.5)%0A c.optimise(reltol=0.5)%0A g = gtt.GTTBool(c._model, c.data, c.models, c.results.scores)%0A d = g.get_gtt()%0A %22%22%22%0A def __init__(self, model, data, models, scores, verboseR=False):%0A %22%22%22%0A Note that once, grouped, the scores should be identical albeit the%0A model size%0A%0A %5Bscores%5Bi%5D for i in grouped.groups.values()%5B10%5D%5D%0A%0A%0A :param model: a instance of :class:%60CNOGraph%60%0A :param data: an instance of :class:%60XMIDAS%60%0A :param models: an instance of compatible :class:%60Models%60%0A :param scores: the scores of each model.%0A%0A%0A%0A%0A%0A %22%22%22%0A CNORBase.__init__(self, verboseR)%0A self.models = models%0A self.scores = scores%0A self.model = model%0A self.data = data # a MIDAS file%0A%0A%0A def _init(self):%0A fhmodel = TempFile()%0A fhdata = TempFile()%0A self.model.to_sif(fhmodel.name)%0A self.data.to_midas(fhdata.name)%0A self.session.run(%22library(CellNOptR)%22)%0A self.session.run('model=readSIF(%22%25s%22)' %25 fhmodel.name)%0A self.session.run('cnolist=CNOlist(%22%25s%22)' %25 fhdata.name)%0A%0A def _get_sim(self, bs):%0A self.session.bs1 = bs%0A script = %22%22%22%0A png()%0A output = cutAndPlot(cnolist, model, list(bs1), plotPDF=F)%0A dev.off()%0A %22%22%22%0A self.session.run(script)%0A res = self.session.output%5B'simResults'%5D%5B0%5D%0A res = list(res%5B't0'%5D.flatten() ) + list(res%5B't1'%5D.flatten())%0A return res%0A%0A def get_gtt(self):%0A print(%22init R library%22)%0A self._init()%0A N = len(self.models)%0A from easydev import progress_bar%0A b = progress_bar(N)%0A d = %7B%7D%0A for i in range(0, N):%0A res = np.array(self._get_sim(self.models.df.ix%5Bi%5D.values))%0A b.animate(i, N)%0A d%5Bi%5D = res%0A%0A df = pd.DataFrame(d).transpose()%0A grouped = df.groupby(list(df.columns))%0A pylab.hist(%5Blen(this) for this in grouped.groups.values()%5D, 100)%0A return %7B'simulation': d, 'grouped':grouped%7D%0A%0A%0A
e307bf72a8aa21088d491c90efd9a731014e63f1
move states into separate file
stacks/states.py
stacks/states.py
Python
0.000004
@@ -0,0 +1,546 @@ +FAILED_STACK_STATES = %5B%0A 'CREATE_FAILED',%0A 'ROLLBACK_FAILED',%0A 'DELETE_FAILED',%0A 'UPDATE_ROLLBACK_FAILED'%0A%5D%0ACOMPLETE_STACK_STATES = %5B%0A 'CREATE_COMPLETE',%0A 'UPDATE_COMPLETE',%0A%5D%0AROLLBACK_STACK_STATES = %5B%0A 'ROLLBACK_COMPLETE',%0A 'UPDATE_ROLLBACK_COMPLETE',%0A%5D%0AIN_PROGRESS_STACK_STATES = %5B%0A 'CREATE_IN_PROGRESS',%0A 'ROLLBACK_IN_PROGRESS',%0A 'DELETE_IN_PROGRESS',%0A 'UPDATE_IN_PROGRESS',%0A 'UPDATE_COMPLETE_CLEANUP_IN_PROGRESS',%0A 'UPDATE_ROLLBACK_IN_PROGRESS',%0A 'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS',%0A%5D%0A
13dfb4f7d4972edbc7ccc0e4f62ea3db1a5b16f4
Add ctx module
srw/ctx.py
srw/ctx.py
Python
0.000001
@@ -0,0 +1,72 @@ +def wd2jd(wd):%0A jd_ref = 2453005.5%0A return (wd / 86400.) + jd_ref%0A
19acf7ad2a14b71f672d02cb8cb47a4393665bc7
Add benchmarks
bin/serialize.py
bin/serialize.py
Python
0.00001
@@ -0,0 +1,809 @@ +%22%22%22Timing serializtion of deeply nested geometry collections.%0A%0ATo and from JSON using dumps and loads from Python's json module. I'm%0Ahappy to report that writing such GeoJSON geometry collections is more%0Aexpensive than parsing them and, at least for Python, deeply nested%0Ageometry collections aren't an asymmetric attack vector.%0A%22%22%22%0A%0Afrom json import dumps, loads%0Aimport timeit%0A%0A%0Ageom = %7B'type': 'Point', 'coordinates': %5B0.0, 0.0%5D%7D%0Afor i in range(100):%0A geom = %7B'type': 'GeometryCollection', 'geometries': %5Bgeom%5D%7D%0A%0Atext = dumps(geom)%0A%0A# Time dumps.%0Aprint(%22Dumps%22)%0Aprint(%0A timeit.timeit(%0A %22dumps(geom)%22, setup=%22from __main__ import dumps, geom%22, number=10000))%0A%0A# Time loads.%0Aprint(%22Loads%22)%0Aprint(%0A timeit.timeit(%0A %22loads(text)%22, setup=%22from __main__ import loads, text%22, number=10000))%0A
7c9b97a81d4c8e41ce81cc881d30323dfb1f9c72
Add layer normalization
chainer/links/normalization/layer_normalization.py
chainer/links/normalization/layer_normalization.py
Python
0.000001
@@ -0,0 +1,1462 @@ +from chainer import functions%0Afrom chainer import initializers%0Afrom chainer import link%0Afrom chainer import links%0A%0A%0Aclass LayerNormalization(link.Chain):%0A%0A %22%22%22Layer normalization layer on outputs of linear functions.%0A%0A This is a link of %22Layer Normalization%22. This layer%0A normalizes, scales and shifts input units with :link:%60~chainer.links.Scale%60.%0A%0A Args:%0A size (int): Size of input units.%0A%0A See: %60Layer Normalization %3Chttps://arxiv.org/abs/1607.06450%3E%60_%0A %22%22%22%0A%0A def __init__(self, size, eps=1e-6, initial_gamma=None, initial_beta=None):%0A super(LayerNormalization, self).__init__(%0A scale=links.Scale(axis=1, W_shape=(size, ), bias_term=True),%0A )%0A if initial_gamma is None:%0A initial_gamma = initializers.One()%0A initializers.init_weight(self.scale.W.data, initial_gamma)%0A if initial_beta is None:%0A initial_beta = initializers.Zero()%0A initializers.init_weight(self.scale.bias.b.data, initial_beta)%0A self.eps = eps%0A%0A def normalize(self, x):%0A size = x.shape%5B1%5D%0A mean = functions.broadcast_to(%0A (functions.sum(x, axis=1) / size)%5B:, None%5D,%0A x.shape)%0A std = functions.broadcast_to(functions.sqrt(%0A functions.sum(functions.square(x - mean), axis=1) / size)%5B:, None%5D,%0A x.shape) + self.eps%0A return (x - mean) / std%0A%0A def __call__(self, x):%0A return self.scale(self.normalize(x))%0A
b88b97c7d56506804fc9eb93ce7074454fc492f3
Add the migration for designations.
base/apps/people/migrations/0002_auto_20141223_0316.py
base/apps/people/migrations/0002_auto_20141223_0316.py
Python
0.000001
@@ -0,0 +1,1077 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import models, migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('people', '0001_initial'),%0A %5D%0A%0A operations = %5B%0A migrations.CreateModel(%0A name='Designation',%0A fields=%5B%0A ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),%0A ('name', models.CharField(max_length=60)),%0A ('romanized_name', models.CharField(max_length=60)),%0A ('started', models.DateField(db_index=True)),%0A ('ended', models.DateField(db_index=True, null=True, blank=True)),%0A ('group', models.ForeignKey(related_name=b'designations', to='people.Group')),%0A %5D,%0A options=%7B%0A 'get_latest_by': 'started',%0A %7D,%0A bases=(models.Model,),%0A ),%0A migrations.AlterOrderWithRespectTo(%0A name='designation',%0A order_with_respect_to='group',%0A ),%0A %5D%0A
3b23e35e58c9269a7fb9275aafadb276ba2b30d0
Problem 12 Completed
project_euler_12.py
project_euler_12.py
Python
0.999244
@@ -0,0 +1,949 @@ +'''%0AShayne Hodge%0A2/15/2014%0A%0AProject Euler Problem 12%0A%0An -%3E n/2 (n is even)%0An -%3E 3n + 1 (n is odd)%0ALongest sequence under one million?%0A'''%0A%0A#currently a brute force implementation with no niceities%0A# including, apparently, spell check in the comments%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0A%0Adef make_hist(results):%0A plt.hist(results, bins=100)%0A%0Adef next_number(n):%0A next = n/2 if n%252 == 0 else (3*n+1)%0A return next%0A%0Adef check_dict(results):%0A pass%0A # check if results in dictionary%0A # return if true else return empty?%0A%0Aending = 1000000%0Aresults = dict()%0Alengths = np.empty((ending-1,1))%0Afor k in range(1, ending):%0A temp = %5Bk%5D%0A n = k%0A while n != 1:%0A n = next_number(n)%0A temp.append(n)%0A results%5Bk%5D = temp%0A lengths%5Bk-1%5D = len(temp)%0A%0Amax_length = np.max(lengths)%0Amax_length_idx = np.argmax(lengths)%0Aprint 'Max length is '+str(max_length)+' found at '+str(max_length_idx+1)+'.'%0Amake_hist(lengths)
f3dbe9bb2aa627b3485c2ed44f889a1bc5463081
Bump to version 3.1.3
rest_framework/__init__.py
rest_framework/__init__.py
""" ______ _____ _____ _____ __ | ___ \ ___/ ___|_ _| / _| | | | |_/ / |__ \ `--. | | | |_ _ __ __ _ _ __ ___ _____ _____ _ __| |__ | /| __| `--. \ | | | _| '__/ _` | '_ ` _ \ / _ \ \ /\ / / _ \| '__| |/ / | |\ \| |___/\__/ / | | | | | | | (_| | | | | | | __/\ V V / (_) | | | < \_| \_\____/\____/ \_/ |_| |_| \__,_|_| |_| |_|\___| \_/\_/ \___/|_| |_|\_| """ __title__ = 'Django REST framework' __version__ = '3.1.2' __author__ = 'Tom Christie' __license__ = 'BSD 2-Clause' __copyright__ = 'Copyright 2011-2015 Tom Christie' # Version synonym VERSION = __version__ # Header encoding (see RFC5987) HTTP_HEADER_ENCODING = 'iso-8859-1' # Default datetime input and output formats ISO_8601 = 'iso-8601'
Python
0
@@ -488,17 +488,17 @@ = '3.1. -2 +3 '%0A__auth
57714fd6838f48920f7093a24ec4d85abf4278ee
Fix merge issue
src/naarad/naarad_imports.py
src/naarad/naarad_imports.py
# coding=utf-8 """ © 2013 LinkedIn Corp. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. """ from naarad.graphing import matplotlib_naarad from naarad.metrics.jmeter_metric import JmeterMetric from naarad.reporting.report import Report #Custom metrics metric_classes = { #'MyMetric' : MyMetricParserClass 'JMETER' : JmeterMetric } graphing_modules = { 'matplotlib' : matplotlib_naarad } <<<<<<< HEAD reporting_modules = { 'report' : Report } ======= important_sub_metrics_import = { 'GC' : ('GC', 'used'), 'SAR-cpuusage' : ('%sys', '%usr') } >>>>>>> upstream/master
Python
0.000001
@@ -801,20 +801,8 @@ %7D%0A -%3C%3C%3C%3C%3C%3C%3C HEAD %0Arep @@ -848,15 +848,8 @@ t%0A%7D%0A -======= %0Aimp @@ -954,28 +954,4 @@ %7D%0A -%3E%3E%3E%3E%3E%3E%3E upstream/master%0A
6de41e5acfe55b6cb7698f81e3031079a530b1af
test for cli mode
tests/test_cli_mode.py
tests/test_cli_mode.py
Python
0.000001
@@ -0,0 +1,335 @@ +import unittest%0Afrom unittest.mock import Mock%0A%0Afrom rawdisk.ui.cli.cli_mode import CliMode, CliShell%0A%0A%0Aclass CliModeTest(unittest.TestCase):%0A def test_initialize_loads_fs_plugins(self):%0A session = Mock()%0A cli = CliShell(session=session)%0A cli.initialize()%0A session.load_plugins.assert_called_once_with()%0A
f007cacdba7bb3e46a6c4c730dde50dc495d9c64
Add hypothesis test for metasync
tests/test_metasync.py
tests/test_metasync.py
# -*- coding: utf-8 -*- import pytest from vdirsyncer.metasync import MetaSyncConflict, metasync from vdirsyncer.storage.memory import MemoryStorage from . import blow_up def test_irrelevant_status(): a = MemoryStorage() b = MemoryStorage() status = {'foo': 'bar'} metasync(a, b, status, keys=()) assert not status def test_basic(monkeypatch): a = MemoryStorage() b = MemoryStorage() status = {} a.set_meta('foo', 'bar') metasync(a, b, status, keys=['foo']) assert a.get_meta('foo') == b.get_meta('foo') == 'bar' a.set_meta('foo', 'baz') metasync(a, b, status, keys=['foo']) assert a.get_meta('foo') == b.get_meta('foo') == 'baz' monkeypatch.setattr(a, 'set_meta', blow_up) monkeypatch.setattr(b, 'set_meta', blow_up) metasync(a, b, status, keys=['foo']) assert a.get_meta('foo') == b.get_meta('foo') == 'baz' monkeypatch.undo() monkeypatch.undo() b.set_meta('foo', None) metasync(a, b, status, keys=['foo']) assert not a.get_meta('foo') and not b.get_meta('foo') def test_conflict(): a = MemoryStorage() b = MemoryStorage() status = {} a.set_meta('foo', 'bar') b.set_meta('foo', 'baz') with pytest.raises(MetaSyncConflict): metasync(a, b, status, keys=['foo']) assert a.get_meta('foo') == 'bar' assert b.get_meta('foo') == 'baz' assert not status def test_conflict_same_content(): a = MemoryStorage() b = MemoryStorage() status = {} a.set_meta('foo', 'bar') b.set_meta('foo', 'bar') metasync(a, b, status, keys=['foo']) assert a.get_meta('foo') == b.get_meta('foo') == status['foo'] == 'bar' @pytest.mark.parametrize('wins', 'ab') def test_conflict_x_wins(wins): a = MemoryStorage() b = MemoryStorage() status = {} a.set_meta('foo', 'bar') b.set_meta('foo', 'baz') metasync(a, b, status, keys=['foo'], conflict_resolution='a wins' if wins == 'a' else 'b wins') assert a.get_meta('foo') == b.get_meta('foo') == status['foo'] == ( 'bar' if wins == 'a' else 'baz' )
Python
0.000074
@@ -18,16 +18,81 @@ -8 -*-%0A%0A +from hypothesis import given%0Aimport hypothesis.strategies as st%0A%0A import p @@ -2162,8 +2162,974 @@ '%0A )%0A +%0A%0Akeys = st.text(min_size=1).filter(lambda x: x.strip() == x)%0A%0Ametadata = st.dictionaries(%0A keys,%0A st.text()%0A)%0A%0A%0A@given(%0A a=metadata, b=metadata,%0A status=metadata, keys=st.sets(keys),%0A conflict_resolution=st.just('a wins') %7C st.just('b wins')%0A)%0Adef test_fuzzing(a, b, status, keys, conflict_resolution):%0A def _get_storage(m, instance_name):%0A s = MemoryStorage(instance_name=instance_name)%0A s.metadata = m%0A return s%0A%0A a = _get_storage(a, 'A')%0A b = _get_storage(b, 'B')%0A%0A winning_storage = (a if conflict_resolution == 'a wins' else b)%0A expected_values = dict((key, winning_storage.get_meta(key))%0A for key in keys)%0A%0A metasync(a, b, status,%0A keys=keys, conflict_resolution=conflict_resolution)%0A%0A for key in keys:%0A assert a.get_meta(key) == b.get_meta(key) == status.get(key, '')%0A if expected_values%5Bkey%5D:%0A assert status%5Bkey%5D == expected_values%5Bkey%5D%0A
be0cb304047c7a410eac577b8aa2765747991100
add script to summarise output
summary.py
summary.py
Python
0.000002
@@ -0,0 +1,584 @@ +%0Aimport string, sys, glob%0A%0Aidir = sys.argv%5B1%5D%0A%0Afl = glob.glob( '%25s/*.txt' %25 idir )%0A%0Aee = %7B%7D%0Afor f in fl:%0A for l in open(f).readlines():%0A if string.find(l, 'FAILED') != -1:%0A bits = string.split(l, ':' )%0A if len(bits) %3E 3:%0A code = bits%5B0%5D%0A msg = bits%5B3%5D%0A if code not in ee.keys():%0A ee%5Bcode%5D = %5B0,msg%5D%0A ee%5Bcode%5D%5B0%5D += 1%0A if ee%5Bcode%5D%5B1%5D != msg:%0A print 'code %25s occurs with multiple messages: %25s, %25s' %25 (code,ee%5Bcode%5D%5B1%5D,msg)%0A else:%0A print bits%0A%0Akeys = ee.keys()%0Akeys.sort()%0Afor k in keys:%0A print k,ee%5Bk%5D%0A%0A%0A%0A%0A
6d90ccd7d6f03630106f78ec7d75666429e26e45
Add an example workloads module
configs/example/arm/workloads.py
configs/example/arm/workloads.py
Python
0.000003
@@ -0,0 +1,3540 @@ +# Copyright (c) 2020 ARM Limited%0A# All rights reserved.%0A#%0A# The license below extends only to copyright in the software and shall%0A# not be construed as granting a license to any other intellectual%0A# property including but not limited to intellectual property relating%0A# to a hardware implementation of the functionality of the software%0A# licensed hereunder. You may use the software subject to the license%0A# terms below provided that you ensure that this notice is replicated%0A# unmodified and in its entirety in all distributions of the software,%0A# modified or unmodified, in source code or in binary form.%0A#%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions are%0A# met: redistributions of source code must retain the above copyright%0A# notice, this list of conditions and the following disclaimer;%0A# redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in the%0A# documentation and/or other materials provided with the distribution;%0A# neither the name of the copyright holders nor the names of its%0A# contributors may be used to endorse or promote products derived from%0A# this software without specific prior written permission.%0A#%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS%0A# %22AS IS%22 AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT%0A# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR%0A# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT%0A# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,%0A# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT%0A# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,%0A# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY%0A# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT%0A# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE%0A# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.%0A#%0A%0Afrom __future__ import print_function%0Afrom __future__ import absolute_import%0A%0Aimport m5%0Afrom m5.objects import *%0Afrom m5.options import *%0A%0Afrom common.SysPaths import binary, disk%0A%0Aclass ArmBaremetal(ArmFsWorkload):%0A %22%22%22 Baremetal workload %22%22%22%0A atags_addr = 0%0A%0A def __init__(self, obj, system, **kwargs):%0A super(ArmBaremetal, self).__init__(**kwargs)%0A%0A self.object_file = obj%0A%0Aclass ArmTrustedFirmware(ArmFsWorkload):%0A %22%22%22%0A Arm Trusted Firmware (TFA) workload.%0A%0A It models the firmware design described at:%0A%0A https://trustedfirmware-a.readthedocs.io/en/latest/design/firmware-design.html%0A%0A The Workload is expecting to find a set of firmare images under%0A the M5_PATH/binaries path. Those images are:%0A * bl1.bin (BL1 = Stage 1 Bootloader)%0A * fip.bin (FIP = Firmware Image Package):%0A BL2, BL31, BL33 binaries compiled under a singe package%0A%0A These are the results of the compilation of Arm Trusted Firmware.%0A https://github.com/ARM-software/arm-trusted-firmware%0A%0A %22%22%22%0A atags_addr = 0%0A%0A def __init__(self, obj, system, **kwargs):%0A super(ArmTrustedFirmware, self).__init__(**kwargs)%0A%0A self.extras = %5B binary('bl1.bin'), binary('fip.bin'), %5D%0A self.extras_addrs = %5B%0A system.realview.bootmem.range.start,%0A system.realview.flash0.range.start%0A %5D%0A%0A # Arm Trusted Firmware will provide a PSCI implementation%0A system._have_psci = True%0A
d38148097b96ecf7681d0e6c5f7dbc0de5c4b16b
Create backpackI.py
LintCode/backpackI.py
LintCode/backpackI.py
Python
0.000006
@@ -0,0 +1,1879 @@ +'''%0AGiven n items with size Ai, an integer m denotes the size of a backpack. How full you can fill this backpack?%0A%0AHave you met this question in a real interview? Yes%0AExample%0AIf we have 4 items with size %5B2, 3, 5, 7%5D, the backpack size is 11, we can select %5B2, 3, 5%5D, so that the max size we can fill this backpack is 10. If the backpack size is 12. we can select %5B2, 3, 7%5D so that we can fulfill the backpack.%0A%0AYou function should return the max size we can fill in the given backpack.%0A%0ANote%0AYou can not divide any item into small pieces.%0A%0AChallenge%0AO(n x m) time and O(m) memory.%0A%0AO(n x m) memory is also acceptable if you do not know how to optimize memory.%0A%0A'''%0A%0Aclass Solution_MLE:%0A # @param m: An integer m denotes the size of a backpack%0A # @param A: Given n items with size A%5Bi%5D%0A # @return: The maximum size%0A def backPack(self, m, A):%0A if not A: return 0%0A n = len(A)%0A dp = %5B%5B0 for i in xrange(m+1)%5D for j in xrange(n+1)%5D%0A for i in xrange(1, n+1):%0A for j in xrange(1, m+1):%0A if j %3C A%5Bi-1%5D:%0A dp%5Bi%5D%5Bj%5D = dp%5Bi-1%5D%5Bj%5D%0A else:%0A dp%5Bi%5D%5Bj%5D = max(dp%5Bi-1%5D%5Bj%5D, dp%5Bi-1%5D%5Bj-A%5Bi-1%5D%5D + A%5Bi-1%5D)%0A return dp%5Bn%5D%5Bm%5D%0A%0A%0Aclass Solution:%0A # @param m: An integer m denotes the size of a backpack%0A # @param A: Given n items with size A%5Bi%5D%0A # @return: The maximum size%0A def backPack(self, m, A):%0A if not A: return 0%0A n = len(A)%0A dp = %5B0 for i in xrange(m+1)%5D%0A for i in xrange(1, n+1):%0A for j in xrange(m, 0, -1):%0A if j %3E= A%5Bi-1%5D:%0A dp%5Bj%5D = max(dp%5Bj%5D, dp%5Bj-A%5Bi-1%5D%5D + A%5Bi-1%5D)%0A return dp%5Bm%5D%0A%0A# dp%5Bi%5D%5Bj%5D means we put the first i items for backpack j %0A# function: if j %3E= A%5Bi-1%5D, dp%5Bi%5D%5Bj%5D = max(dp%5Bi-1%5D%5Bj%5D, dp%5Bi-1%5D%5Bj-A%5Bi-1%5D%5D + A%5Bi-1%5D), put the ith item or not %0A# result: dp%5Bn%5D%5Bm%5D%0A %0A
6a65640d1567d3cf2a9dac232e705e4697022987
add migration for hidden col
migrations/versions/388d0cc48e7c_.py
migrations/versions/388d0cc48e7c_.py
Python
0
@@ -0,0 +1,601 @@ +%22%22%22empty message%0A%0ARevision ID: 388d0cc48e7c%0ARevises: 21a633e449ce%0ACreate Date: 2014-11-13 10:49:45.512414%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '388d0cc48e7c'%0Adown_revision = '21a633e449ce'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0Adef upgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.add_column('post', sa.Column('hidden', sa.Integer(), nullable=True))%0A ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A ### commands auto generated by Alembic - please adjust! ###%0A op.drop_column('post', 'hidden')%0A ### end Alembic commands ###%0A
9d0e85a10b1073000d22500938e3d8d65107f062
Update migrations
migrations/versions/70e630638c64_.py
migrations/versions/70e630638c64_.py
Python
0.000001
@@ -0,0 +1,1611 @@ +%22%22%22empty message%0A%0ARevision ID: 70e630638c64%0ARevises: %0ACreate Date: 2017-05-23 13:19:47.177767%0A%0A%22%22%22%0Afrom alembic import op%0Aimport sqlalchemy as sa%0A%0A%0A# revision identifiers, used by Alembic.%0Arevision = '70e630638c64'%0Adown_revision = None%0Abranch_labels = None%0Adepends_on = None%0A%0A%0Adef upgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.create_table('stations',%0A sa.Column('id', sa.Integer(), nullable=False),%0A sa.Column('description', sa.String(length=255), nullable=True),%0A sa.Column('latitude', sa.String(length=255), nullable=True),%0A sa.Column('longitude', sa.String(length=255), nullable=True),%0A sa.PrimaryKeyConstraint('id'),%0A sa.UniqueConstraint('description')%0A )%0A op.create_table('users',%0A sa.Column('id', sa.Integer(), nullable=False),%0A sa.Column('name', sa.String(length=55), nullable=True),%0A sa.Column('pwdhash', sa.String(), nullable=True),%0A sa.PrimaryKeyConstraint('id')%0A )%0A op.create_index(op.f('ix_users_name'), 'users', %5B'name'%5D, unique=False)%0A op.create_table('association',%0A sa.Column('user_id', sa.Integer(), nullable=True),%0A sa.Column('station_id', sa.Integer(), nullable=True),%0A sa.ForeignKeyConstraint(%5B'station_id'%5D, %5B'stations.id'%5D, ),%0A sa.ForeignKeyConstraint(%5B'user_id'%5D, %5B'users.id'%5D, )%0A )%0A # ### end Alembic commands ###%0A%0A%0Adef downgrade():%0A # ### commands auto generated by Alembic - please adjust! ###%0A op.drop_table('association')%0A op.drop_index(op.f('ix_users_name'), table_name='users')%0A op.drop_table('users')%0A op.drop_table('stations')%0A # ### end Alembic commands ###%0A
c934c77392b98e7625bc5e03ea6c3c7960cdcd5d
Create syslog_parser.py
syslog_parser.py
syslog_parser.py
Python
0.00062
@@ -0,0 +1,1096 @@ +import re%0Aimport sys%0Aimport argparse%0A%0A__author__ = '[email protected]'%0A__version__ = %22$Revision: 1.6 $%22%0A'''%0ATODO%0A- nothing%0A%0AUSAGE%0Apython syslog_parser.py -i syslog.log -o customerxyz.txt%0A%0A'''%0A# This will allow us to pass command line arguments%0A# FOR HELP - python syslog_test.py --h%0Aparser = argparse.ArgumentParser(description='RPZ Syslog Parser')%0Aparser.add_argument('-i', '--input', help='Input file name', required=True)%0Aparser.add_argument('-o', '--output', help='Output file name', required=True)%0Aargs = parser.parse_args()%0A%0Ainput_file = open(str(args.input))%0An = 0%0Aoutput_file = open(str(args.output), %22w%22)%0Afor line in iter(input_file):%0A m = re.search('(?%3C=%5Cs%5C%5BA%5C%5D%5Csvia%5Cs)(%5CS*)(?=%5C%22%5C%22%5C%22$)', line)%0A if m:%0A n = n + 1%0A print m.group(1)%0A output_file.write(m.group(1))%0A output_file.write(%22%5Cn%22)%0A%0Aprint %22%5B+%5D Found %25s domains in : %25s%22 %25 (n, str(args.input))%0Aprint %22%5B+%5D Please check %25s for the output!%22 %25 str(args.output)%0A%0A# # show values ##%0Aprint (%22Input file: %25s%22 %25 args.input )%0Aprint (%22Output file: %25s%22 %25 args.output )%0A%0Aoutput_file.close()%0Ainput_file.close()%0A
375f63b76fccc89ce0ee9b4246e5fb9a2400d1eb
Add Austrian Versicherungsnummer
stdnum/at/vnr.py
stdnum/at/vnr.py
Python
0.000025
@@ -0,0 +1,2451 @@ +# vnr.py - functions for handling Austrian social security numbers%0A# coding: utf-8%0A#%0A# Copyright (C) 2018 Arthur de Jong%0A#%0A# This library is free software; you can redistribute it and/or%0A# modify it under the terms of the GNU Lesser General Public%0A# License as published by the Free Software Foundation; either%0A# version 2.1 of the License, or (at your option) any later version.%0A#%0A# This library is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU%0A# Lesser General Public License for more details.%0A#%0A# You should have received a copy of the GNU Lesser General Public%0A# License along with this library; if not, write to the Free Software%0A# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA%0A# 02110-1301 USA%0A%0A%22%22%22VNR, SVNR, VSNR (Versicherungsnummer, Austrian social security number).%0A%0AThe Austian Versicherungsnummer is a personal identification number used for%0Asocial security. The number is 10 digits long and consists of a 3 digit%0Aserial, a check digit and 6 digits that usually specify the person's birth%0Adate.%0A%0AMore information:%0A%0A* https://de.wikipedia.org/wiki/Sozialversicherungsnummer#%C3%96sterreich%0A%0A%3E%3E%3E validate('1237 010180')%0A'1237010180'%0A%3E%3E%3E validate('2237 010180')%0ATraceback (most recent call last):%0A ...%0AInvalidChecksum: ...%0A%22%22%22%0A%0Afrom stdnum.exceptions import *%0Afrom stdnum.util import clean%0A%0A%0Adef compact(number):%0A %22%22%22Convert the number to the minimal representation. This strips the%0A number of any valid separators and removes surrounding whitespace.%22%22%22%0A return clean(number, ' ')%0A%0A%0Adef calc_check_digit(number):%0A %22%22%22Calculate the check digit. The fourth digit in the number is%0A ignored.%22%22%22%0A weights = (3, 7, 9, 0, 5, 8, 4, 2, 1, 6)%0A return str(sum(w * int(n) for w, n in zip(weights, number)) %25 11)%0A%0A%0Adef validate(number):%0A %22%22%22Check if the number is a valid VAT number. This checks the length,%0A formatting and check digit.%22%22%22%0A number = compact(number)%0A if not number.isdigit() or number.startswith('0'):%0A raise InvalidFormat()%0A if len(number) != 10:%0A raise InvalidLength()%0A if calc_check_digit(number) != number%5B3%5D:%0A raise InvalidChecksum()%0A return number%0A%0A%0Adef is_valid(number):%0A %22%22%22Check if the number is a valid VAT number.%22%22%22%0A try:%0A return bool(validate(number))%0A except ValidationError:%0A return False%0A
33d3f3f0805fb2e34144eec1870442427c2a12b5
Add initial config management interface for the wheel module
salt/wheel/config.py
salt/wheel/config.py
Python
0
@@ -0,0 +1,610 @@ +'''%0AManage the master configuration file%0A'''%0A%0A# Import python libs%0Aimport os%0A%0A# Import third party libs%0Aimport yaml%0A%0A# Import salt libs%0Aimport salt.config%0A%0A%0Adef values():%0A '''%0A Return the raw values of the config file%0A '''%0A data = salt.config.master_config(__opts__%5B'conf_file'%5D)%0A data.pop('aes')%0A data.pop('token_dir')%0A return data%0A%0A%0Adef apply(key, value):%0A '''%0A Set a single key %0A '''%0A path = __opts__%5B'conf_file'%5D%0A if os.path.isdir(path):%0A path = os.path.join(path, 'master')%0A data = values()%0A data%5Bkey%5D = value%0A yaml.dump(data, default_flow_style=False)%0A
bf6bec04c520241eb6c572b70e2219e989806a2e
sumOfN2 time
sumOfN2.py
sumOfN2.py
Python
0.999478
@@ -0,0 +1,270 @@ +import time%0A%0Adef sumOfN2(n):%0A start = time.time()%0A%0A theSum = 0%0A%0A for i in range(1, n+1):%0A theSum = theSum + 1%0A %0A end = time.time()%0A %0A%0A return theSum, end-start%0A%0Afor i in range(5):%0A print(%22Sum is %25d required %2510.7f seconds%22%25sumOfN2(10000))%0A
9e835d341513d7477b05f19ec2b72499b170db40
Add initial libguestfs module
salt/modules/libguestfs.py
salt/modules/libguestfs.py
Python
0
@@ -0,0 +1,516 @@ +'''%0AInteract with virtual machine images via libguestfs%0A%0A:depends: - libguestfs%0A'''%0A%0A# Import Salt libs%0Aimport salt.utils%0A%0Adef __virtual__():%0A '''%0A Only load if libguestfs python bindings are installed%0A '''%0A if salt.utils.which('guestmount'):%0A return 'guestfs'%0A return False%0A%0A%0Adef seed(location, id_='', config=None):%0A '''%0A Seed a vm image before booting it%0A%0A CLI Example::%0A%0A salt '*' guestfs.seed /tmp/image.qcow2%0A '''%0A if config is None:%0A config = %7B%7D%0A %0A %0A
e2c46d78cc4efe2d8778b0580b37ac95299c4ee1
Implement DefaultConfigMixin
utils/mixin.py
utils/mixin.py
Python
0.000001
@@ -0,0 +1,1276 @@ +from itertools import chain%0A%0Aclass DefaultConfigMixin():%0A%0A @property%0A def _default_config(self):%0A if (hasattr(self.bot_config, 'DEFAULT_CONFIG') and%0A self.name in self.bot_config.DEFAULT_CONFIG):%0A return self.bot_config.DEFAULT_CONFIG%5Bself.name%5D%0A%0A def __init__(self, bot, name=None):%0A super().__init__(bot, name=name)%0A default_config = self._default_config%0A if default_config and not hasattr(self, 'config'):%0A self.configure(default_config)%0A%0A def get_configuration_template(self):%0A default_config = self._default_config%0A if default_config:%0A return default_config%0A elif self.CONFIG_TEMPLATE:%0A return self.CONFIG_TEMPLATE%0A%0A def configure(self, configuration):%0A default_config = self._default_config%0A if configuration and default_config:%0A config = dict(chain(%0A default_config.items(),%0A configuration.items()))%0A elif configuration:%0A config = dict(chain(self.CONFIG_TEMPLATE.items(),%0A configuration.items()))%0A elif default_config:%0A config = default_config%0A else:%0A config = self.CONFIG_TEMPLATE%0A%0A self.config = config%0A
1c631f8a6426a50e2e86d77a9b2729e102c5ad32
add DecomposeComponentsFilter
Lib/ufo2ft/filters/decomposeComponents.py
Lib/ufo2ft/filters/decomposeComponents.py
Python
0
@@ -0,0 +1,1372 @@ +from __future__ import (%0A print_function, division, absolute_import, unicode_literals)%0A%0Afrom fontTools.pens.reverseContourPen import ReverseContourPen%0Afrom fontTools.misc.transform import Transform, Identity%0Afrom fontTools.pens.transformPen import TransformPen%0Afrom ufo2ft.filters import BaseFilter%0A%0A%0Aclass DecomposeComponentsFilter(BaseFilter):%0A%0A def filter(self, glyph, glyphSet=None):%0A if not glyph.components:%0A return False%0A _deepCopyContours(glyphSet, glyph, glyph, Transform())%0A glyph.clearComponents()%0A return True%0A%0A%0Adef _deepCopyContours(glyphSet, parent, component, transformation):%0A %22%22%22Copy contours from component to parent, including nested components.%22%22%22%0A%0A for nested in component.components:%0A _deepCopyContours(%0A glyphSet, parent, glyphSet%5Bnested.baseGlyph%5D,%0A transformation.transform(nested.transformation))%0A%0A if component != parent:%0A if transformation == Identity:%0A pen = parent.getPen()%0A else:%0A pen = TransformPen(parent.getPen(), transformation)%0A # if the transformation has a negative determinant, it will%0A # reverse the contour direction of the component%0A xx, xy, yx, yy = transformation%5B:4%5D%0A if xx*yy - xy*yx %3C 0:%0A pen = ReverseContourPen(pen)%0A%0A component.draw(pen)%0A
f4a53d623177c698bc45fa7404e90a944dd1962c
Fix URL in config_webrx.py
config_webrx.py
config_webrx.py
# -*- coding: utf-8 -*- """ config_webrx: configuration options for OpenWebRX This file is part of OpenWebRX, an open-source SDR receiver software with a web UI. Copyright (c) 2013-2015 by Andras Retzler <[email protected]> This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ In addition, as a special exception, the copyright holders state that config_rtl.py and config_webrx.py are not part of the Corresponding Source defined in GNU AGPL version 3 section 1. (It means that you do not have to redistribute config_rtl.py and config_webrx.py if you make any changes to these two configuration files, and use them for running your web service with OpenWebRX.) """ # ==== Server settings ==== web_port=8073 server_hostname="localhost" # If this contains an incorrect value, the web UI may freeze on load (it can't open websocket) max_clients=20 # ==== Web GUI configuration ==== receiver_name="[Callsign]" receiver_location="Budapest, Hungary" receiver_qra="JN97ML" receiver_asl=200 receiver_ant="Longwire" receiver_device="RTL-SDR" receiver_admin="[email protected]" receiver_gps=(47.000000,19.000000) photo_height=350 photo_title="Panorama of Budapest from Schönherz Zoltán Dormitory" photo_desc=""" You can add your own background photo and receiver information.<br /> Receiver is operated by: <a href="mailto:%[RX_ADMIN]">%[RX_ADMIN]</a><br/> Device: %[RX_DEVICE]<br /> Antenna: %[RX_ANT]<br /> Website: <a href="http://localhost" target="_blank">http://localhost</a> """ # ==== sdr.hu listing ==== # If you want your ham receiver to be listed publicly on sdr.hu, then take the following steps: # 1. Register at: http://sdr.hu/register # 2. You will get an unique key by email. Copy it and paste here: sdrhu_key = "" # 3. Set this setting to True to enable listing: sdrhu_public_listing = False # ==== DSP/RX settings ==== dsp_plugin="csdr" fft_fps=9 fft_size=4096 samp_rate = 250000 center_freq = 145525000 rf_gain = 5 ppm = 0 audio_compression="adpcm" #valid values: "adpcm", "none" fft_compression="adpcm" #valid values: "adpcm", "none" start_rtl_thread=True # ==== I/Q sources (uncomment the appropriate) ==== # >> RTL-SDR via rtl_sdr start_rtl_command="rtl_sdr -s {samp_rate} -f {center_freq} -p {ppm} -g {rf_gain} -".format(rf_gain=rf_gain, center_freq=center_freq, samp_rate=samp_rate, ppm=ppm) format_conversion="csdr convert_u8_f" #start_rtl_command="hackrf_transfer -s {samp_rate} -f {center_freq} -g {rf_gain} -l16 -a0 -r hackrf_pipe & cat hackrf_pipe | nc -vvl 127.0.0.1 8888".format(rf_gain=rf_gain, center_freq=center_freq, samp_rate=samp_rate, ppm=ppm) #format_conversion="csdr convert_i8_f" ## To use a HackRF, first run "mkfifo hackrf_pipe" in the OpenWebRX directory. ## You should also use the csdr git repo from here: ## git clone https://github.com/sgentle/csdr ## git checkout origin/signed_char # >> Sound card SDR (needs ALSA) #I did not have the chance to properly test it. #samp_rate = 96000 #start_rtl_command="arecord -f S16_LE -r {samp_rate} -c2 -".format(samp_rate=samp_rate) #format_conversion="csdr convert_s16_f | csdr gain_ff 30" # >> /dev/urandom test signal source #samp_rate = 2400000 #start_rtl_command="cat /dev/urandom | (pv -qL `python -c 'print int({samp_rate} * 2.2)'` 2>&1)".format(rf_gain=rf_gain, center_freq=center_freq, samp_rate=samp_rate) #format_conversion="csdr convert_u8_f" # >> gr-osmosdr signal source using GNU Radio (follow this guide: https://github.com/simonyiszk/openwebrx/wiki/Using-gr-osmosdr-as-signal-source) start_rtl_command="cat /tmp/osmocom_fifo" format_conversion="" #You can use other SDR hardware as well, by giving your own command that outputs the I/Q samples... shown_center_freq = center_freq #you can change this if you use an upconverter client_audio_buffer_size = 5 #increasing client_audio_buffer_size will: # - also increase the latency # - decrease the chance of audio underruns start_freq = center_freq start_mod = "nfm" #nfm, am, lsb, usb, cw iq_server_port = 4951 #TCP port for ncat to listen on. It will send I/Q data over its connections, for internal use in OpenWebRX. It is only accessible from the localhost by default. #access_log = "~/openwebrx_access.log" #Warning! The settings below are very experimental. csdr_dynamic_bufsize = False # This allows you to change the buffering mode of csdr. csdr_print_bufsizes = False # This prints the buffer sizes used for csdr processes. csdr_through = False # Setting this True will print out how much data is going into the DSP chains.
Python
0.00005
@@ -4179,26 +4179,25 @@ i/Using- -gr-osmosdr +GrOsmoSDR -as-sign
b7e024913d1d1bc87306f4a85b8737a8d5c35ec7
add XML validation utility
sbin/validate_xml.py
sbin/validate_xml.py
Python
0.000001
@@ -0,0 +1,1779 @@ +#!/usr/bin/python%0A# -*- coding: ISO-8859-15 -*-%0A# =================================================================%0A#%0A# $Id$%0A#%0A# Authors: Angelos Tzotsos %[email protected]%3E%0A#%0A# Copyright (c) 2011 Angelos Tzotsos%0A#%0A# Permission is hereby granted, free of charge, to any person%0A# obtaining a copy of this software and associated documentation%0A# files (the %22Software%22), to deal in the Software without%0A# restriction, including without limitation the rights to use,%0A# copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the%0A# Software is furnished to do so, subject to the following%0A# conditions:%0A#%0A# The above copyright notice and this permission notice shall be%0A# included in all copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND,%0A# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES%0A# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND%0A# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT%0A# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,%0A# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING%0A# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR%0A# OTHER DEALINGS IN THE SOFTWARE.%0A#%0A# =================================================================%0A%0Aimport os%0Aimport sys%0A%0Afrom lxml import etree%0A%0Aif len(sys.argv) %3C 3:%0A print 'Usage: %25s %3Cxml%3E %3Cxsd%3E' %25 sys.argv%5B0%5D%0A sys.exit(1)%0A%0Aprint 'Validating %25s against schema %25s' %25 (sys.argv%5B1%5D, sys.argv%5B2%5D)%0A%0Aschema = etree.XMLSchema(etree.parse(sys.argv%5B2%5D))%0Aparser = etree.XMLParser(schema=schema)%0A%0Atry:%0A valid = etree.parse(sys.argv%5B1%5D, parser)%0A print 'Valid XML document'%0Aexcept Exception, err:%0A print 'ERROR: %25s' %25 str(err)%0A
cf3ca764c571a51952cd7c98c9752aedc701c3eb
Drop the status column
migrations/versions/0053_perform_drop_status_column.py
migrations/versions/0053_perform_drop_status_column.py
Python
0.000003
@@ -0,0 +1,620 @@ +%22%22%22empty message%0A%0ARevision ID: 0053_perform_drop_status_column%0ARevises: 0052_drop_jobs_status%0ACreate Date: 2016-08-25 15:56:31.779399%0A%0A%22%22%22%0A%0A# revision identifiers, used by Alembic.%0Arevision = '0053_perform_drop_status_column'%0Adown_revision = '0052_drop_jobs_status'%0A%0Afrom alembic import op%0Aimport sqlalchemy as sa%0Afrom sqlalchemy.dialects import postgresql%0A%0A%0Adef upgrade():%0A op.drop_column('jobs', 'status')%0A%0A%0Adef downgrade():%0A op.add_column('jobs', sa.Column('status', postgresql.ENUM('pending', 'in progress', 'finished', 'sending limits exceeded', name='job_status_types'), autoincrement=False, nullable=True))%0A
653221002d97f4ab646b11c01016763550912036
Update word-ladder.py
Python/word-ladder.py
Python/word-ladder.py
# Time: O(n * d), n is length of string, d is size of dictionary # Space: O(d) # # Given two words (start and end), and a dictionary, find the length of shortest transformation sequence from start to end, such that: # # Only one letter can be changed at a time # Each intermediate word must exist in the dictionary # For example, # # Given: # start = "hit" # end = "cog" # dict = ["hot","dot","dog","lot","log"] # As one shortest transformation is "hit" -> "hot" -> "dot" -> "dog" -> "cog", # return its length 5. # # Note: # Return 0 if there is no such transformation sequence. # All words have the same length. # All words contain only lowercase alphabetic characters. # # BFS class Solution: # @param start, a string # @param end, a string # @param dict, a set of string # @return an integer def ladderLength(self, start, end, word_list): distance, cur, visited = 0, [start], set([start]) while cur: _next = [] for word in cur: if word == end: return distance + 1 for i in xrange(len(word)): for j in 'abcdefghijklmnopqrstuvwxyz': candidate = word[:i] + j + word[i + 1:] if candidate not in visited and candidate in word_list: _next.append(candidate) visited.add(candidate) distance += 1 cur = _next return 0 if __name__ == "__main__": print Solution().ladderLength("hit", "cog", set(["hot", "dot", "dog", "lot", "log"])) print Solution().ladderLength("hit", "cog", set(["hot", "dot", "dog", "lot", "log", "cog"]))
Python
0.000003
@@ -696,233 +696,297 @@ tion -:%0A # @param start, a string%0A # @param end, a string%0A # @param dict, a set of +(object):%0A def ladderLength(self, beginWord, endWord, wordList):%0A %22%22%22%0A :type beginWord: str -ing %0A -# @return an integer%0A def ladderLength(self, start, end, word_list):%0A distance, cur, visited = 0, %5Bstart%5D, set(%5Bstart%5D + :type endWord: str%0A :type wordList: List%5Bstr%5D%0A :rtype: int%0A %22%22%22%0A distance, cur, visited, lookup = 0, %5BbeginWord%5D, set(%5BbeginWord%5D), set(wordList )%0A%0A @@ -1086,16 +1086,20 @@ d == end +Word :%0A @@ -1372,17 +1372,14 @@ in -word_list +lookup :%0A @@ -1530,16 +1530,16 @@ _next%0A%0A - @@ -1548,16 +1548,17 @@ turn 0%0A%0A +%0A if __nam
8affa8de7338f08c2bb77e290fd7509440d6eee6
Add test for issue #169
numba/tests/issues/test_issue_169.py
numba/tests/issues/test_issue_169.py
Python
0
@@ -0,0 +1,316 @@ +# -*- coding: utf-8 -*-%0A%0A%22%22%22%0ATest binding of autojit methods.%0A%22%22%22%0A%0Afrom __future__ import print_function, division, absolute_import%0A%0Afrom numba import *%0A%0Aclass A(object):%0A @autojit%0A def a(self, arg):%0A return self * arg%0A%0A def __mul__(self, other):%0A return 10 * other%0A%0Aassert A().a(10) == 100%0A%0A%0A
5617038f4ec48915411ec5ce4bf5ae2df98e9e0e
Add dodo dockerkill command
dodo_commands/extra/standard_commands/dockerkill.py
dodo_commands/extra/standard_commands/dockerkill.py
Python
0.000002
@@ -0,0 +1,1316 @@ +# noqa%0Afrom dodo_commands.system_commands import DodoCommand%0Afrom plumbum.cmd import docker%0Afrom six.moves import input as raw_input%0A%0A%0Aclass Command(DodoCommand): # noqa%0A help = %22%22%0A%0A def _containers(self):%0A result = %5B%5D%0A for line in docker(%22ps%22, %22--format%22, %22%7B%7B.ID%7D%7D %7B%7B.Names%7D%7D %7B%7B.Image%7D%7D%22).split('%5Cn'):%0A if line:%0A cid, name, image = line.split()%0A result.append(dict(name=name, cid=cid, image=image))%0A return result%0A%0A def handle_imp(self, **kwargs): # noqa%0A while True:%0A containers = self._containers()%0A%0A print(%220 - exit%22)%0A for idx, container in enumerate(containers):%0A print(%22%25d - %25s%22 %25 (idx + 1, container%5B'name'%5D))%0A print(%22999 - all of the above%22)%0A%0A print(%22%5CnSelect a container: %22)%0A raw_choice = int(raw_input())%0A kill_all = raw_choice == 999%0A choice = raw_choice - 1%0A%0A if choice == -1:%0A return%0A elif kill_all:%0A pass%0A else:%0A containers = %5Bcontainers%5Bchoice%5D%5D%0A for container in containers:%0A self.runcmd(%0A %5B'docker', 'kill', container%5B'cid'%5D%5D,%0A )%0A if kill_all:%0A return%0A
37ab6b858b6f115bef7a6500a8c81da161e4c659
Add normal list params to list method for telemetry statistics
openstack/telemetry/v2/statistics.py
openstack/telemetry/v2/statistics.py
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from openstack import resource from openstack.telemetry import telemetry_service class Statistics(resource.Resource): id_attribute = 'meter_name' resource_key = 'statistics' base_path = '/meters/%(meter_name)s/statistics' service = telemetry_service.TelemetryService() # Supported Operations allow_list = True # Path Parameter meter_name = resource.prop('meter_name') # Properties aggregate = resource.prop('aggregate') avg = resource.prop('avg') count = resource.prop('count') duration = resource.prop('duration') duration_end = resource.prop('duration_end') duration_start = resource.prop('duration_start') group_by = resource.prop('groupby') max = resource.prop('max') min = resource.prop('min') period = resource.prop('period') period_end = resource.prop('period_end') period_start = resource.prop('period_start') sum = resource.prop('sum') unit = resource.prop('unit') @classmethod def list(cls, session, path_args=None, paginated=False, **params): url = cls._get_url(path_args) for stat in session.get(url, service=cls.service, params=params).body: yield cls.existing(**stat)
Python
0.000007
@@ -1554,16 +1554,41 @@ session, + limit=None, marker=None, path_ar @@ -1595,16 +1595,29 @@ gs=None, +%0A paginat
c350a2ce7a65281c4a8e85c2569f4ae8c5791dc1
Fix deb installl.
package_managers/apt_get/apt_get.bzl
package_managers/apt_get/apt_get.bzl
# Copyright 2017 Google Inc. All rights reserved. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Rules that create an output script to install packages via apt-get.""" CACHE_DIR = "/tmp/install" ARCHIVE_DIR = "." load( "//package_managers:package_manager_provider.bzl", "package_manager_provider", ) load( "@io_bazel_rules_docker//skylib:filetype.bzl", tar_filetype = "tar", ) def _generate_download_commands(ctx): command_str = """# Fetch Index set -ex apt-get update -y # Make partial dir mkdir -p {cache}/{archive}/partial # Install command apt-get install --no-install-recommends -y -q -o Dir::Cache="{cache}" -o Dir::Cache::archives="{archive}" {packages} --download-only # Tar command to only include all the *.deb files and ignore other directories placed in the cache dir. tar -cpf {output}.tar --directory {cache}/{archive} `cd {cache}/{archive} && ls *.deb`""".format( output=ctx.attr.name, cache=CACHE_DIR, archive=ARCHIVE_DIR, packages=' '.join(ctx.attr.packages)) commands = [] # filter all comments from command_str for cmd in command_str.split('\n'): if cmd and not cmd.startswith('#'): commands.append(cmd) return commands def _generate_install_commands(ctx, tar): command_str = """#!/bin/bash set -ex tar -xvf {output} dpkg -i ./*.deb apt-get install -f""".format(output=tar) return command_str.split('\n') def _impl(ctx): if not ctx.attr.packages and not ctx.attr.tar: fail("Cannot install packages. \nEither a list of packages or a tar " + "with debs should be provided") elif ctx.attr.packages and ctx.attr.tar: fail("Cannot specify both list of packages and a tar with debs") shell_file_contents = [] # Shell file commands shell_file_contents.append('#!/bin/bash') shell_file_contents.append('set -ex') download_commands = _generate_download_commands(ctx) if ctx.attr.packages else [] tar_name = ("{0}.tar".format(ctx.attr.name) if ctx.attr.packages else ctx.file.tar.short_path) install_commands = _generate_install_commands(ctx, tar_name) apt_get = package_manager_provider( download_commands = download_commands, install_commands = install_commands, ) shell_file_contents.append('\n'.join(download_commands)) shell_file_contents.append('\n'.join(install_commands)) ctx.actions.write( output = ctx.outputs.executable, content = '\n'.join(shell_file_contents), ) runfiles = ctx.runfiles(files=[]) if ctx.attr.tar: runfiles = ctx.runfiles(files=ctx.attr.tar.files.to_list()) return struct( files = depset([ctx.outputs.executable]), runfiles = runfiles, providers = [apt_get], ) generate_apt_get = rule( attrs = { "packages": attr.string_list( doc = "list of packages to download", mandatory = False, ), "tar": attr.label( doc = "tar with package debs to install", mandatory = False, allow_files = tar_filetype, single_file = True, ), }, executable = True, implementation = _impl, ) """Fetches and Installs packages via apt-get or bundled debs. This rule fetches and installs packages via apt-get or tar with debs. Args: packages: List of packages to fetch and install. tar: Tar with package deb bundled. """
Python
0
@@ -1817,15 +1817,60 @@ -i -./*.deb + --force-depends ./*.deb%0Adpkg --configure -a command %0Aapt
b10765b5f1ea84eed69c083341d9db9eda49cc38
insert missing import
tabular_predDB/convergence_analysis/automated_convergence_tests.py
tabular_predDB/convergence_analysis/automated_convergence_tests.py
import os import csv import argparse import tempfile # import numpy import itertools # import tabular_predDB.python_utils.data_utils as du import tabular_predDB.python_utils.file_utils as fu import tabular_predDB.python_utils.xnet_utils as xu import tabular_predDB.LocalEngine as LE import tabular_predDB.HadoopEngine as HE import tabular_predDB.cython_code.State as State from collections import namedtuple import time import parse_convergence_results as pc def generate_hadoop_dicts(convergence_run_parameters, args_dict): dict_to_write = dict(convergence_run_parameters) dict_to_write.update(args_dict) yield dict_to_write def write_hadoop_input(input_filename, convergence_run_parameters, n_steps, block_size, SEED): # prep settings dictionary convergence_analyze_args_dict = xu.default_analyze_args_dict convergence_analyze_args_dict['command'] = 'convergence_analyze' convergence_analyze_args_dict['SEED'] = SEED convergence_analyze_args_dict['n_steps'] = n_steps convergence_analyze_args_dict['block_size'] = block_size with open(input_filename, 'a') as out_fh: dict_generator = generate_hadoop_dicts(convergence_run_parameters, convergence_analyze_args_dict) for dict_to_write in dict_generator: xu.write_hadoop_line(out_fh, key=dict_to_write['SEED'], dict_to_write=dict_to_write) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('--gen_seed', type=int, default=0) parser.add_argument('--n_steps', type=int, default=500) parser.add_argument('--num_chains', type=int, default=50) parser.add_argument('--block_size', type=int, default=20) parser.add_argument('-do_local', action='store_true') parser.add_argument('-do_remote', action='store_true') # args = parser.parse_args() gen_seed = args.gen_seed n_steps = args.n_steps do_local = args.do_local num_chains = args.num_chains do_remote = args.do_remote block_size = args.block_size script_filename = 'hadoop_line_processor.py' # some hadoop processing related settings # FIXME: need to make sure 'dir' argument exists temp_dir = tempfile.mkdtemp(prefix='convergence_analysis_', dir='convergence_analysis') print 'using dir: %s' % temp_dir # table_data_filename = os.path.join(temp_dir, 'table_data.pkl.gz') input_filename = os.path.join(temp_dir, 'hadoop_input') output_filename = os.path.join(temp_dir, 'hadoop_output') output_path = os.path.join(temp_dir, 'output') parsed_out_file = os.path.join(temp_dir, 'parsed_convergence_output.csv') # Hard code the parameter values for now num_rows_list = [200, 400, 1000] num_cols_list = [8, 16, 32] num_clusters_list = [5,10] num_splits_list = [2, 4] max_mean_list = [0.5, 1, 2] # num_rows_list = [200] # num_cols_list = [8] # num_clusters_list = [5] # num_splits_list = [2,4] # max_mean_list = [1] parameter_list = [num_rows_list, num_cols_list, num_clusters_list, num_splits_list] count = -1 # Iterate over the parameter values and write each run as a line in the hadoop_input file take_product_of = [num_rows_list, num_cols_list, num_clusters_list, num_splits_list, max_mean_list] for num_rows, num_cols, num_clusters, num_splits, max_mean in itertools.product(*take_product_of): if numpy.mod(num_rows, num_clusters) == 0 and numpy.mod(num_cols,num_splits)==0: count = count + 1 for chainindx in range(num_chains): convergence_run_parameters = dict(num_rows=num_rows, num_cols=num_cols, num_views=num_splits, num_clusters=num_clusters, max_mean=max_mean, init_seed=chainindx) write_hadoop_input(input_filename, convergence_run_parameters, n_steps, block_size, SEED=count) n_tasks = len(num_rows_list)*len(num_cols_list)*len(num_clusters_list)*len(num_splits_list)*len(max_mean_list)*num_chains # Create a dummy table data file table_data=dict(T=[],M_c=[],X_L=[],X_D=[]) fu.pickle(table_data, table_data_filename) if do_local: xu.run_script_local(input_filename, script_filename, output_filename, table_data_filename) print 'Local Engine for automated convergence runs has not been completely implemented/tested' elif do_remote: hadoop_engine = HE.HadoopEngine(output_path=output_path, input_filename=input_filename, table_data_filename=table_data_filename, ) xu.write_support_files(table_data, hadoop_engine.table_data_filename, dict(command='convergence_analyze'), hadoop_engine.command_dict_filename) hadoop_engine.send_hadoop_command(n_tasks=n_tasks) was_successful = hadoop_engine.get_hadoop_results() if was_successful: hu.copy_hadoop_output(hadoop_engine.output_path, output_filename) pc.parse_to_csv(output_filename,parsed_out_file) else: print 'remote hadoop job NOT successful' else: hadoop_engine = HE.HadoopEngine() # print what the command would be print HE.create_hadoop_cmd_str(hadoop_engine, n_tasks=n_tasks)
Python
0.000003
@@ -132,16 +132,70 @@ s as du%0A +import tabular_predDB.python_utils.hadoop_utils as hu%0A import t
1b217b7990453b4b20c8d255b27825971a32092c
add testing python script for tring out
python/setup.py
python/setup.py
Python
0
@@ -0,0 +1,138 @@ +from sys import argv%0A%0A# main method as entry point %0Adef main(arg_inputs):%0A pass%0A%0Aif __name__ == %22__main__%22:%0A main(argv%5B1%5D)%0A pass%0A
5dba05e3012b2004fd63f29200ba83b36529da41
add caesar cipher python exercise
Text/caesar_cipher.py
Text/caesar_cipher.py
Python
0.999965
@@ -0,0 +1,1639 @@ +#Global constants for menu choices%0A%0ASHIFT_ONE = 1%0ASHIFT_TWO = 2%0Adef caesar(plaintext, shift):%0A alphabet=%5B%22a%22,%22b%22,%22c%22,%22d%22,%22e%22,%22f%22,%22g%22,%22h%22,%22i%22,%22j%22,%22k%22,%22l%22,%0A %22m%22,%22n%22,%22o%22,%22p%22,%22q%22,%22r%22,%22s%22,%22t%22,%22u%22,%22v%22,%22w%22,%22x%22,%22y%22,%22z%22,%22A%22,%22B%22,%0A %22C%22,%22D%22%22E%22,%22F%22,%22G%22,%22H%22,%22I%22,%22J%22,%22K%22,%22L%22,%22M%22,%22N%22,%22O%22,%22P%22,%0A %22Q%22,%22R%22,%22S%22,%22T%22,%22U%22,%22V%22,%22W%22,%22X%22,%22Y%22,%22Z%22%5D%0A%0A #Create our substitution dictionary%0A dic=%7B%7D%0A for i in range(0,len(alphabet)):%0A dic%5Balphabet%5Bi%5D%5D=alphabet%5B(i+shift)%25len(alphabet)%5D%0A%0A #Convert each letter of plaintext to the corresponding%0A #encrypted letter in our dictionary creating the cryptext%0A caesartext=%22%22%0A for l in plaintext:%0A if plaintext.isupper():%0A uppercase = True%0A else:%0A uppercase = False%0A for l in plaintext:%0A if uppercase:%0A l = l.upper()%0A l=dic%5Bl%5D%0A elif l in dic:%0A l=dic%5Bl%5D%0A caesartext+=l%0A%0A return caesartext%0A%0A#Get choice%0A%0A%0Adef main():%0A user = 0%0A user = get_menu_choice ()%0A if user == SHIFT_ONE:%0A plaintext=input(%22Enter your text to be coded: %22)%0A print (%22Plaintext:%22, plaintext )%0A print (%22Caesartext:%22,caesar(plaintext,1))%0A elif user ==SHIFT_TWO:%0A plaintext=input(%22Enter your text to be coded: %22)%0A print (%22Plaintext:%22, plaintext )%0A print (%22Caesartext:%22,caesar(plaintext,2))%0A%0Adef get_menu_choice():%0A user=int(input(%22For one positive shift, enter 1; for two positive shifts enter 2: %22))%0A return user%0A%0A#%22Now is the time for all good men to come to the aid of their country%22%0A#%22The quick brown fox jumps over the lazy dog%22%0A%0Amain()%0A
440fd9e5825e3ce37907b46248210bab6c564ea6
Improve error checking
pylxd/connection.py
pylxd/connection.py
# Copyright (c) 2015 Canonical Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import httplib import json import os import socket import ssl from . import utils class UnixHTTPConnection(httplib.HTTPConnection): def __init__(self, path, host='localhost', port=None, strict=None, timeout=None): httplib.HTTPConnection.__init__(self, host, port=port, strict=strict, timeout=timeout) self.path = path def connect(self): sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) sock.connect(self.path) self.sock = sock class HTTPSConnection(httplib.HTTPConnection): default_port = 8443 def __init__(self, *args, **kwargs): httplib.HTTPConnection.__init__(self, *args, **kwargs) def connect(self): sock = socket.create_connection((self.host, self.port), self.timeout, self.source_address) if self._tunnel_host: self.sock = sock self._tunnel() (cert_file, key_file) = self._get_ssl_certs() self.sock = ssl.wrap_socket(sock, certfile=cert_file, keyfile=key_file) @staticmethod def _get_ssl_certs(): return (os.path.join(os.environ['HOME'], '.config/lxc/client.crt'), os.path.join(os.environ['HOME'], '.config/lxc/client.key')) class LXDConnection(object): def __init__(self, host=None, port=8443): if host: self.host = host self.port = port self.unix_socket = None else: if 'LXD_DIR' in os.environ: self.unix_socket = os.path.join(os.enviorn['LXD_DIR'], 'unix.socket') else: self.unix_socket = '/var/lib/lxd/unix.socket' self.host, self.port = None, None self.connection = None def get_connection(self): if self.host: return HTTPSConnection(self.host, self.port) return UnixHTTPConnection(self.unix_socket) def get_object(self, *args, **kwargs): self.connection = self.get_connection() self.connection.request(*args, **kwargs) response = self.connection.getresponse() state = response.status data = json.loads(response.read()) if not data: msg = "Null Data" raise Exception(msg) elif state == 200 or (state == 202 and data.get('status_code') == 100): return state, data else: utils.get_lxd_error(state, data) def get_status(self, *args, **kwargs): status = False self.connection = self.get_connection() self.connection.request(*args, **kwargs) response = self.connection.getresponse() state = response.status data = json.loads(response.read()) if not data: msg = "Null Data" raise Exception(msg) elif data.get('error'): status = utils.get_lxd_error(state, data) elif state == 200 or (state == 202 and data.get('status_code') == 100): status = True return status def get_raw(self, *args, **kwargs): self.connection = self.get_connection() self.connection.request(*args, **kwargs) response = self.connection.getresponse() body = response.read() if not body: msg = "Null Body" raise Exception(msg) elif response.status == 200: return body else: msg = "Failed to get raw response" raise Exception(msg)
Python
0.000001
@@ -3513,32 +3513,86 @@ Exception(msg)%0A + elif state == 404:%0A status = False%0A elif dat
f901632651fa0d177a3ba7fc99504aa874eb48b8
add watcher
watcher.py
watcher.py
Python
0.000001
@@ -0,0 +1,410 @@ +class Watcher:%0A def __init__(self, topic, client):%0A self.value = None%0A self.topic = topic%0A self.client = client%0A%0A def set_value(self, new_value):%0A if self.value != new_value:%0A self.value = new_value%0A self.change()%0A%0A def change(self):%0A print('change: %7B%7D to %7B%7D'.format(self.topic, self.value))%0A self.client.publish(self.topic, self.value)%0A
679e969e1cab73139406bdeb5a2f6d03757a89af
Allow Authentication header in CORS
sentry/utils/http.py
sentry/utils/http.py
""" sentry.utils.http ~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import urllib from urlparse import urlparse from sentry.conf import settings from sentry.plugins.helpers import get_option def safe_urlencode(params, doseq=0): """ UTF-8-safe version of safe_urlencode The stdlib safe_urlencode prior to Python 3.x chokes on UTF-8 values which can't fail down to ascii. """ # Snippet originally from pysolr: https://github.com/toastdriven/pysolr if hasattr(params, "items"): params = params.items() new_params = list() for k, v in params: k = k.encode("utf-8") if isinstance(v, basestring): new_params.append((k, v.encode("utf-8"))) elif isinstance(v, (list, tuple)): new_params.append((k, [i.encode("utf-8") for i in v])) else: new_params.append((k, unicode(v))) return urllib.urlencode(new_params, doseq) def is_same_domain(url1, url2): """ Returns true if the two urls should be treated as if they're from the same domain (trusted). """ url1 = urlparse(url1) url2 = urlparse(url2) return url1.netloc == url2.netloc def apply_access_control_headers(response, project=None): """ Provides the Access-Control headers to enable cross-site HTTP requests. You can find more information about these headers here: https://developer.mozilla.org/En/HTTP_access_control#Simple_requests """ origin = settings.ALLOW_ORIGIN or '' if project and origin is not '*': optval = get_option('sentry:origins', project) if optval: origin = ('%s %s' % (origin, ' '.join(optval))).strip() if origin: response['Access-Control-Allow-Origin'] = origin response['Access-Control-Allow-Headers'] = 'X-Sentry-Auth' response['Access-Control-Allow-Methods'] = 'POST' return response
Python
0.000001
@@ -1908,16 +1908,32 @@ try-Auth +, Authentication '%0A
553825a2bd5db860d2842a8f72b0142e36d61ba0
build the very basic web server->app.py
www/app.py
www/app.py
Python
0.000001
@@ -0,0 +1,586 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0A__author__ = 'mookaka'%0A%0Aimport logging%0Aimport asyncio%0Afrom aiohttp import web%0A%0A%0Alogging.basicConfig(level=logging.INFO)%0A%0Adef index(request):%0A return web.Response(body=b'%3Ch1%3EAwesome!%3C/h1%3E')%0A%0Aasync def init(loop):%0A app = web.Application(loop=loop)%0A app.router.add_route('GET', '/', index)%0A svr = await loop.create_server(app.make_handler(), '127.0.0.1', 9000)%0A logging.info('we have run the tiny web server successfully!')%0A return svr%0A%0Aloop = asyncio.get_event_loop()%0Aloop.run_until_complete(init(loop))%0Aloop.run_forever()
e9e845b33891f50834e9b8bfb1796e43e9faac81
Create complete_the_pattern_#9.py
complete_the_pattern_#9.py
complete_the_pattern_#9.py
Python
0.998654
@@ -0,0 +1,513 @@ +#Kunal Gautam%0A#Codewars : @Kunalpod%0A#Problem name: Complete The Pattern #9 - Diamond%0A#Problem level: 6 kyu%0A%0Adef pattern(n):%0A top = '%5Cn'.join(' '*(n-i) + ''.join(str(j%2510) for j in range(1, i+1)) + ''.join(str(j%2510) for j in list(range(1,i))%5B::-1%5D) + ' '*(n-i) for i in range(1, n+1))%0A bottom = '%5Cn'.join(' '*(n-i) + ''.join(str(j%2510) for j in range(1, i+1)) + ''.join(str(j%2510) for j in list(range(1,i))%5B::-1%5D) + ' '*(n-i) for i in list(range(1, n))%5B::-1%5D)%0A return top + '%5Cn' + bottom if bottom else top%0A
37061643f4e416c8926411229a2e4d2737cef2e5
Create sportability2shutterfly.py
sportability2shutterfly.py
sportability2shutterfly.py
Python
0.000619
@@ -0,0 +1,1265 @@ +#!/usr/bin/python%0A#%0A# convert sportability.com player info to shutterfly.com format.%0A#%0Aimport sys%0Aimport csv%0Adef pullContact(list,row,num):%0A if row%5B%22Parent%22+num+%22_FirstName%22%5D != %22%22 and row%5B%22Parent1_LastName%22%5D != %22%22:%0A key=row%5B%22Parent%22+num+%22_FirstName%22%5D+row%5B%22Parent1_LastName%22%5D%0A if key not in list:%0A data = %7B%22FirstName%22:row%5B%22Parent%22+num+%22_FirstName%22%5D,%0A %22LastName%22:row%5B%22Parent%22+num+%22_LastName%22%5D,%0A %22HomePhone%22:row%5B%22Phone%22%5D,%0A %22CellPhone%22:row%5B%22Parent%22+num+%22_Phone%22%5D,%0A %22Email%22:row%5B%22Parent%22+num+%22_Email%22%5D,%0A %22Address%22:row%5B%22Parent%22+num+%22_Address%22%5D,%0A %22City%22:row%5B%22Parent%22+num+%22_City%22%5D,%0A %22State%22:row%5B%22Parent%22+num+%22_State%22%5D,%0A %22Zip%22:row%5B%22Parent%22+num+%22_Zip%22%5D%7D%0A list%5Bkey%5D=data%0A return list%0Adef csv_reader(filename):%0A# with open(filename) as f_obj:%0A# reader = csv.DictReader(f_obj, delimiter=',', quotechar='%7C')%0A reader = csv.DictReader(open (filename))%0A list = %7B%7D%0A for row in reader:%0A list = pullContact(list,row,%221%22)%0A list = pullContact(list,row,%222%22)%0A print list%0Aif __name__ == %22__main__%22:%0A csv_path = %22./playersExtended.csv%22%0A csv_reader(csv_path)%0A
2bc33138e7e110486f98145548b05da65577491c
Fix test set up
src/tests/ggrc/__init__.py
src/tests/ggrc/__init__.py
# Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file> # Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file> # Created By: [email protected] # Maintained By: [email protected] import os import logging from flask.ext.testing import TestCase as BaseTestCase from ggrc import db from ggrc.app import app from ggrc.models import create_db if os.environ.get('TRAVIS', False): db.engine.execute("DROP DATABASE IF EXISTS ggrcdevtest;") db.engine.execute("CREATE DATABASE ggrcdevtest; USE ggrcdevtest;") create_db(use_migrations=True, quiet=True) # Hide errors during testing. Errors are still displayed after all tests are # done. This is for the bad request error messages while testing the api calls. logging.disable(logging.CRITICAL) class TestCase(BaseTestCase): @classmethod def clear_data(cls): ignore_tables = ( "test_model", "roles", "notification_types", "object_types", "options" ) tables = set(db.metadata.tables).difference(ignore_tables) for _ in range(len(tables)): if len(tables) == 0: break # stop the loop once all tables have been deleted for table in reversed(db.metadata.sorted_tables): if table.name not in ignore_tables: try: db.engine.execute(table.delete()) tables.remove(table.name) except: pass db.session.commit() def setUp(self): # this is a horrible hack because db.metadata.sorted_tables does not sort # by dependencies. Events table is before Person table - reversed is bad. self.clear_data() # if getattr(settings, 'MEMCACHE_MECHANISM', False) is True: # from google.appengine.api import memcache # from google.appengine.ext import testbed # self.testbed = testbed.Testbed() # self.testbed.activate() # self.testbed.init_memcache_stub() def tearDown(self): db.session.remove() # if getattr(settings, 'MEMCACHE_MECHANISM', False) is True: # from google.appengine.api import memcache # from google.appengine.ext import testbed # self.testbed.deactivate() def create_app(self): app.config["SERVER_NAME"] = "localhost" app.testing = True app.debug = False return app
Python
0
@@ -968,16 +968,39 @@ options%22 +,%0A %22categories%22, %0A )%0A
b08cbbf353da0e84f0f1a160de5e79d5d05c0ea6
add gensim utils
nlpia/gensim_utils.py
nlpia/gensim_utils.py
Python
0
@@ -0,0 +1,3083 @@ +from __future__ import print_function, unicode_literals, division, absolute_import%0Afrom future import standard_library%0Astandard_library.install_aliases() # noqa%0Afrom builtins import object # noqa%0A%0A# from gensim.models import Word2Vec%0Afrom gensim import corpora%0Afrom gensim import utils%0A%0Afrom nlpia.constants import logging%0Alogger = logging.getLogger(__name__)%0A%0A%0Adef tokens2ngrams(tokens, n=2):%0A tokens = list(tokens)%0A ngrams = %5B%5D%0A for i in range(len(tokens) - n + 1):%0A ngrams.append(' '.join(tokens%5Bi:i + n%5D))%0A return ngrams%0A%0A%0Adef passthrough(*args, **kwargs):%0A return args%5B0%5D if len(args) else list(kwargs.values())%5B0%5D%0A%0A%0Adef return_false(*args, **kwargs):%0A return False%0A%0A%0Adef return_true(*args, **kwargs):%0A return True%0A%0A%0Adef noop(*args, **kwargs):%0A pass%0A%0A%0Adef return_none(*args, **kwargs):%0A pass%0A%0A%0Aclass TweetCorpus(corpora.TextCorpus):%0A ignore_matcher = return_none # compiled regular expression for token matches to skip/ignore%0A num_grams = 2%0A case_normalizer = utils.to_unicode%0A tokenizer = str.split%0A mask = None%0A%0A def get_texts(self):%0A %22%22%22 Parse documents from a .txt file assuming 1 document per line, yielding lists of filtered tokens %22%22%22%0A with self.getstream() as text_stream:%0A for i, line in enumerate(text_stream):%0A if self.mask is not None and not self.mask%5Bi%5D:%0A continue%0A ngrams = %5B%5D%0A for ng in tokens2ngrams(self.tokenizer(self.case_normalizer(line))):%0A if self.ignore_matcher(ng):%0A continue%0A ngrams += %5Bng%5D%0A yield ngrams%0A%0A def __len__(self):%0A %22%22%22 Enables %60len(corpus)%60 %22%22%22%0A if 'length' not in self.__dict__:%0A logger.info(%22Computing the number of lines in the corpus size (calculating number of documents)%22)%0A self.length = sum(1 for doc in self.get_stream())%0A return self.length%0A%0A%0Aclass SMSCorpus(corpora.TextCorpus):%0A ignore_matcher = return_none # compiled regular expression for token matches to skip/ignore%0A num_grams = 2%0A case_normalizer = utils.to_unicode%0A tokenizer = str.split%0A mask = None%0A%0A def get_texts(self):%0A %22%22%22 Parse documents from a .txt file assuming 1 document per line, yielding lists of filtered tokens %22%22%22%0A with self.getstream() as text_stream:%0A for i, line in enumerate(text_stream):%0A if self.mask is not None and not self.mask%5Bi%5D:%0A continue%0A ngrams = %5B%5D%0A for ng in tokens2ngrams(self.tokenizer(self.case_normalizer(line))):%0A if self.ignore_matcher(ng):%0A continue%0A ngrams += %5Bng%5D%0A yield ngrams%0A%0A def __len__(self):%0A %22%22%22 Enables %60len(corpus)%60 %22%22%22%0A if 'length' not in self.__dict__:%0A logger.info(%22Computing the number of lines in the corpus size (calculating number of documents)%22)%0A self.length = sum(1 for doc in self.getstream())%0A return self.length%0A
1e9498ab2a947e10e59ebcc8fa9759c35405e870
Enable --verbose and unify log format
vint/__init__.py
vint/__init__.py
import sys from argparse import ArgumentParser import pkg_resources from vint.linting.linter import Linter from vint.linting.env import build_environment from vint.linting.config.config_container import ConfigContainer from vint.linting.config.config_cmdargs_source import ConfigCmdargsSource from vint.linting.config.config_default_source import ConfigDefaultSource from vint.linting.config.config_global_source import ConfigGlobalSource from vint.linting.config.config_project_source import ConfigProjectSource from vint.linting.policy_set import PolicySet from vint.linting.formatter.formatter import Formatter from vint.linting.formatter.json_formatter import JSONFormatter from vint.linting.formatter.statistic_formatter import StatisticFormatter def main(): env = _build_env(sys.argv) config_dict = _build_config_dict(env) parser = _build_argparser() paths_to_lint = env['file_paths'] if len(paths_to_lint) == 0: print('vint error: nothing to lint\n') parser.print_help() parser.exit(status=1) for path_to_lint in paths_to_lint: if not path_to_lint.exists() or not path_to_lint.is_file(): print('vint error: no such file: `{path}`\n'.format( path=str(path_to_lint))) parser.exit(status=1) violations = _lint_all(paths_to_lint, config_dict) if len(violations) == 0: parser.exit(status=0) _print_violations(violations, config_dict) parser.exit(status=1) def _build_config_dict(env): config = ConfigContainer( ConfigDefaultSource(env), ConfigGlobalSource(env), ConfigProjectSource(env), ConfigCmdargsSource(env), ) return config.get_config_dict() def _build_argparser(): parser = ArgumentParser(prog='vint', description='Lint Vim script') parser.add_argument('-v', '--version', action='version', version=_get_version()) parser.add_argument('-V', '--verbose', action='store_true', help='output verbose message') parser.add_argument('-e', '--error', action='store_true', help='report only errors') parser.add_argument('-w', '--warning', action='store_true', help='report errors and warnings') parser.add_argument('-s', '--style-problem', action='store_true', help='report errors, warnings and style problems') parser.add_argument('-m', '--max-violations', type=int, help='limit max violations count') parser.add_argument('-c', '--color', action='store_true', help='colorize output when possible') parser.add_argument('-j', '--json', action='store_true', help='output json style') parser.add_argument('-t', '--stat', action='store_true', help='output statistic info') parser.add_argument('files', nargs='*', help='file or directory path to lint') return parser def _build_cmdargs(argv): """ Build command line arguments dict to use; - displaying usages - vint.linting.env.build_environment This method take an argv parameter to make function pure. """ parser = _build_argparser() namespace = parser.parse_args(argv[1:]) cmdargs = vars(namespace) return cmdargs def _build_env(argv): """ Build an environment object. This method take an argv parameter to make function pure. """ cmdargs = _build_cmdargs(argv) env = build_environment(cmdargs) return env def _build_linter(config_dict): policy_set = PolicySet() linter = Linter(policy_set, config_dict) return linter def _lint_all(paths_to_lint, config_dict): violations = [] linter = _build_linter(config_dict) for file_path in paths_to_lint: violations += linter.lint_file(file_path) return violations def _get_formatter(config_dict): if 'cmdargs' not in config_dict: return Formatter(config_dict) cmdargs = config_dict['cmdargs'] if 'json' in cmdargs and cmdargs['json']: return JSONFormatter(config_dict) elif 'stat' in cmdargs and cmdargs['stat']: return StatisticFormatter(config_dict) else: return Formatter(config_dict) def _print_violations(violations, config_dict): formatter = _get_formatter(config_dict) output = formatter.format_violations(violations) print(output) def _get_version(): version = pkg_resources.require('vim-vint')[0].version return version
Python
0.000001
@@ -60,16 +60,31 @@ esources +%0Aimport logging %0A%0Afrom v @@ -758,24 +758,72 @@ cFormatter%0A%0A +LOG_FORMAT = 'vint %25(levelname)s: %25(message)s'%0A%0A %0Adef main(): @@ -929,16 +929,47 @@ rser()%0A%0A + _init_logger(config_dict)%0A%0A path @@ -1039,35 +1039,31 @@ -print('vint +logging. error -: +(' nothing @@ -1069,14 +1069,13 @@ to -lint%5Cn +check ')%0A @@ -1255,27 +1255,23 @@ -print('vint +logging. error -: +(' no s @@ -1278,16 +1278,29 @@ uch file + or directory : %60%7Bpath @@ -1305,10 +1305,8 @@ th%7D%60 -%5Cn '.fo @@ -1546,24 +1546,24 @@ onfig_dict)%0A - parser.e @@ -1574,24 +1574,231 @@ status=1)%0A%0A%0A +def _init_logger(config_dict):%0A cmdargs = config_dict%5B'cmdargs'%5D%0A%0A log_level = logging.DEBUG if 'verbose' in cmdargs else logging.WARNING%0A%0A logging.basicConfig(format=LOG_FORMAT, level=log_level)%0A%0A%0A def _build_c
da8bb1fd01a34686474f66cdaf3f7376b1c6c901
Set extra forms for the RecordInline to 3.
src/powerdns_manager/admin.py
src/powerdns_manager/admin.py
# -*- coding: utf-8 -*- # # This file is part of django-powerdns-manager. # # django-powerdns-manager is a web based PowerDNS administration panel. # # Development Web Site: # - http://www.codetrax.org/projects/django-powerdns-manager # Public Source Code Repository: # - https://source.codetrax.org/hgroot/django-powerdns-manager # # Copyright 2012 George Notaras <gnot [at] g-loaded.eu> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from django.contrib import admin from django.db.models.loading import cache from django.contrib import messages from django.contrib.admin import SimpleListFilter from django.utils.translation import ugettext_lazy as _ # Action for # - set change date # - set serial (?) # - set TTL to 300, 3600, 86400 # #def test_action(modeladmin, request, queryset): # messages.add_message(request, messages.INFO, 'The test action was successful.') #test_action.short_description = "Test Action" class OwnDomainListFilter(SimpleListFilter): title = _('domain') parameter_name = 'domain' def lookups(self, request, model_admin): Domain = cache.get_model('powerdns_manager', 'Domain') qs = Domain.objects.filter(created_by=request.user) for namespace in qs: yield (namespace, namespace) def queryset(self, request, queryset): the_domain = self.value() if the_domain: return queryset.filter(domain__name=the_domain, domain__created_by=request.user) return queryset class RecordInline(admin.TabularInline): model = cache.get_model('powerdns_manager', 'Record') fields = ('name', 'type', 'ttl', 'prio', 'content', 'auth', 'date_modified') readonly_fields = ('date_modified', ) extra = 1 class DomainMetadataInline(admin.TabularInline): model = cache.get_model('powerdns_manager', 'DomainMetadata') fields = ('kind', 'content', 'date_modified') readonly_fields = ('date_modified', ) extra = 0 class CryptoKeyInline(admin.TabularInline): model = cache.get_model('powerdns_manager', 'CryptoKey') fields = ('flags', 'active', 'content', 'date_modified') readonly_fields = ('date_modified', ) extra = 0 class DomainAdmin(admin.ModelAdmin): #form = DomainModelForm #actions = [reload_php_stack, ] fieldsets = ( ('', { 'fields' : ('name', 'type', 'master'), #'description' : 'Main virtual host attributes', }), ('Info', { 'classes' : ('collapse',), 'fields' : ('date_created', 'date_modified', ), #'description' : 'Information about the domain.', }), ) readonly_fields = ('date_created', 'date_modified', ) list_display = ('name', 'type', 'master', 'date_created', 'date_modified') list_filter = ('type', ) search_fields = ('name', 'master') inlines = [RecordInline, DomainMetadataInline, CryptoKeyInline] def queryset(self, request): qs = super(DomainAdmin, self).queryset(request) if not request.user.is_superuser: # Non-superusers see the domains they have created qs = qs.filter(created_by=request.user) return qs def save_model(self, request, obj, form, change): if not change: obj.created_by = request.user obj.save() admin.site.register(cache.get_model('powerdns_manager', 'Domain'), DomainAdmin) class TsigKeyAdmin(admin.ModelAdmin): fieldsets = ( ('', { 'fields' : ('name', 'algorithm', 'secret', ), }), ('Info', { 'classes' : ('collapse',), 'fields' : ('date_created', 'date_modified', ), }), ) readonly_fields = ('date_created', 'date_modified') list_display = ('name', 'algorithm', 'date_created', 'date_modified') list_filter = ('algorithm', ) search_fields = ('name', ) def queryset(self, request): qs = super(TsigKeyAdmin, self).queryset(request) if not request.user.is_superuser: # Non-superusers see the records they have created qs = qs.filter(created_by=request.user) return qs def save_model(self, request, obj, form, change): if not change: obj.created_by = request.user obj.save() admin.site.register(cache.get_model('powerdns_manager', 'TsigKey'), TsigKeyAdmin) class SuperMasterAdmin(admin.ModelAdmin): fields = ('ip', 'nameserver', 'account') readonly_fields = ('date_created', 'date_modified') list_display = ('ip', 'nameserver', 'account', 'date_created', 'date_modified') search_fields = ('nameserver', 'account') admin.site.register(cache.get_model('powerdns_manager', 'SuperMaster'), SuperMasterAdmin)
Python
0
@@ -2231,17 +2231,17 @@ extra = -1 +3 %0A%0Aclass
01c0518d88d3b1a6919f9841752eb676bed8f68a
Create test.py
scripts/test.py
scripts/test.py
Python
0.000005
@@ -0,0 +1,20 @@ +print %22hello world%22%0A
ffa1a6711a616582c38ffeeef47df9f6ff272fe2
Create DAGUtilities.py
DAGUtilities.py
DAGUtilities.py
Python
0
@@ -0,0 +1,599 @@ +'''%0ACreated on Apr 6, 2016%0A%0A@author: Noah Higa%0A'''%0Aimport sys%0Afrom Job import Job%0A%0Adef newJob(identity):%0A aNewJob = Job(identity, 1, 1, 1, 1, 1, 1, 1, 1)%0A return aNewJob%0A%0Adef printEdges(G):%0A edges = G.edges()%0A for edge in edges:%0A sys.stdout.write('(')%0A sys.stdout.write(edge%5B0%5D.__str__())%0A sys.stdout.write(',')%0A sys.stdout.write(edge%5B1%5D.__str__())%0A sys.stdout.write(') ')%0A print ' '%0A%0Adef printNodes(G):%0A nodes = G.nodes()%0A for node in nodes:%0A sys.stdout.write(node.__str__())%0A sys.stdout.write(%22; %22)%0A %0A print ' '%0A %0A
569be57e96a97885c4f7d92a3ce524aa47413897
Create for_loops.py
_ptopics/for_loops.py
_ptopics/for_loops.py
Python
0.000012
@@ -0,0 +1,518 @@ +---%0Atopic: %22for loops%22%0Adesc: %22for loops in Python, from basic to advanced%22%0A---%0A%0A# Basic for loop over a list%0A%0A%60%60%60python%0Aschools = %5B%22UCSB%22,%22UCLA%22,%22UCI%22,%22Cal Poly%22%5D%0Afor s in schools:%0A print(s,len(s))%0A%60%60%60%0A%0AOutput:%0A%60%60%60%0AUCSB 4%0AUCLA 4%0AUCI 3%0ACal Poly 8%0A%60%60%60%0A%0A# Basic for loop with counter%0A%0A%60%60%60python%0A%3E%3E%3E for i in range(4):%0A... print(i)%0A... %0A0%0A1%0A2%0A3%0A%3E%3E%3E %0A%60%60%60%0A%0A# For loop over a list using %60range(len(thelist))%60%0A%0A%60%60%60python%0Aschools = %5B%22UCSB%22,%22UCLA%22,%22UCI%22,%22Cal Poly%22%5D%0Afor i in range(len(schools)):%0A print(i,schools%5Bi%5D)%0A%60%60%60%0A
58b5ff7daf0c240ddbb4b83d11b361dcf574ae74
Add mkaudiocd.py for making Audio CD for dosbox
mkaudiocd.py
mkaudiocd.py
Python
0
@@ -0,0 +1,1174 @@ +#!/usr/bin/env python3%0Aimport sys, subprocess, os, multiprocessing, magic, tempfile;%0Afiles = sys.argv%5B1:%5D;%0Atempfiles = %5B%5D;%0A%0Amime = magic.open(magic.MIME);%0Amime.load();%0Afor i in range(len(files)):%0A mime_type = mime.file(files%5Bi%5D).split(';')%5B0%5D;%0A if (mime_type in ('audio/mpeg')):%0A tf = tempfile.NamedTemporaryFile(prefix='%2502d-'%25(i+1), suffix='.wav', delete=False);%0A tf.close();%0A tempfiles.append(tf.name);%0A plame = subprocess.Popen(%5B'/usr/bin/lame', '--decode', files%5Bi%5D, tf.name%5D);%0A if (0 == plame.wait()):%0A files%5Bi%5D = tf.name;%0Amime.close();%0A%0AOUTPUT_AUDIO='audiocd.wav';%0Apwav = subprocess.Popen(%5B'/usr/bin/shntool', 'join'%5D + files + %5B'-o', 'wav', '-O', 'always'%5D);%0Apcue = subprocess.Popen(%0A %5B'/usr/bin/shntool', 'cue'%5D+files,%0A stdin=subprocess.PIPE,%0A stdout=subprocess.PIPE,%0A stderr=subprocess.PIPE%0A);%0Astdout, stderr = pcue.communicate();%0Asys.stderr.write(stderr.decode('UTF-8'));%0Awith open('audiocd.cue', 'w') as f:%0A f.write(stdout.decode('UTF-8').replace('joined.wav', OUTPUT_AUDIO).replace('WAVE', 'BINARY'));%0Apwav.wait();%0Aos.rename('joined.wav', OUTPUT_AUDIO);%0A%0Afor f in tempfiles:%0A os.unlink(f);%0A%0A
0cabf3c4dae3599e2d1627ff41707cf36b4d2ddd
Load all modules by default
acoustics/__init__.py
acoustics/__init__.py
""" Acoustics ========= The acoustics module... """ import acoustics.ambisonics import acoustics.utils import acoustics.octave import acoustics.doppler import acoustics.signal import acoustics.directivity import acoustics.building import acoustics.room import acoustics.standards import acoustics.cepstrum from acoustics._signal import Signal
Python
0
@@ -47,16 +47,37 @@ ..%0A%0A%22%22%22%0A +import acoustics.aio%0A import a @@ -117,37 +117,203 @@ ics. -utils%0Aimport acoustics.octave +atmosphere%0Aimport acoustics.bands%0Aimport acoustics.building%0Aimport acoustics.cepstrum%0Aimport acoustics.criterion%0Aimport acoustics.decibel%0Aimport acoustics.descriptors%0Aimport acoustics.directivity %0Aimp @@ -351,22 +351,25 @@ oustics. -signal +generator %0Aimport @@ -382,45 +382,116 @@ ics. -directivity%0Aimport acoustics.building +imaging%0Aimport acoustics.octave%0Aimport acoustics.power%0Aimport acoustics.quantity%0Aimport acoustics.reflection %0Aimp @@ -531,16 +531,13 @@ cs.s -tandards +ignal %0Aimp @@ -550,24 +550,77 @@ oustics. -cepstrum +turbulence%0A#import acoustics.utils%0Aimport acoustics.weighting %0A%0Afrom a
68a37792fd7c5a197758aff738a69c7ce08b4a8b
Add manhole module
txircd/modules/manhole.py
txircd/modules/manhole.py
Python
0.000001
@@ -0,0 +1,435 @@ +from twisted.conch.manhole_tap import makeService%0A%0Aclass Spawner(object):%0A def __init__(self, ircd):%0A self.manhole = makeService(%7B%0A 'namespace': %7B'ircd': ircd%7D,%0A 'passwd': 'manhole.passwd',%0A 'telnetPort': None,%0A 'sshPort': '65432'%0A %7D)%0A %0A def spawn(self):%0A self.manhole.startService()%0A return %7B%7D%0A%0A def cleanup(self):%0A self.manhole.stopService()%0A
513af3716c596bb67c0f6552824b854b3735858c
Add simple tests for password strength and sensitivity to MINIMUM_ZXCVBN_SCORE setting
corehq/apps/domain/tests/test_password_strength.py
corehq/apps/domain/tests/test_password_strength.py
Python
0
@@ -0,0 +1,1160 @@ +from django import forms%0Afrom django.test import SimpleTestCase, override_settings%0A%0Afrom corehq.apps.domain.forms import clean_password%0A%0A%0Aclass PasswordStrengthTest(SimpleTestCase):%0A%0A @override_settings(MINIMUM_ZXCVBN_SCORE=2)%0A def test_score_0_password(self):%0A self.assert_bad_password(PASSWORDS_BY_STRENGTH%5B0%5D)%0A%0A @override_settings(MINIMUM_ZXCVBN_SCORE=2)%0A def test_score_1_password(self):%0A self.assert_bad_password(PASSWORDS_BY_STRENGTH%5B1%5D)%0A%0A @override_settings(MINIMUM_ZXCVBN_SCORE=2)%0A def test_score_2_password(self):%0A self.assert_good_password(PASSWORDS_BY_STRENGTH%5B2%5D)%0A%0A @override_settings(MINIMUM_ZXCVBN_SCORE=3)%0A def test_sensitivity_to_minimum_zxcvbn_score_setting(self):%0A self.assert_bad_password(PASSWORDS_BY_STRENGTH%5B2%5D)%0A%0A def assert_good_password(self, password):%0A self.assertEqual(clean_password(password), password)%0A%0A def assert_bad_password(self, password):%0A with self.assertRaises(forms.ValidationError):%0A clean_password(password)%0A%0A%0APASSWORDS_BY_STRENGTH = %7B%0A 0: 's3cr3t',%0A 1: 'password7',%0A 2: 'aljfzpo',%0A 3: '1234mna823',%0A 4: ')(%5E#:LKNVA%5E',%0A%7D%0A
e4a5761d997bee3eefad80c6b92c4c4a0c3568fd
Create day_14_part_1.py
day_14_part_1.py
day_14_part_1.py
Python
0.000198
@@ -0,0 +1,653 @@ +import hashlib, re%0Asalt = %22yjdafjpo%22%0Akeys = %5B%5D%0Ai = 0%0A%0Awhile len(keys) %3C 64:%0A first_key = hashlib.md5((salt + str(i)).encode(%22utf-8%22)).hexdigest()%0A # for every 3-gram in key%0A m1 = re.search(r'(.)%5C1%7B2,2%7D', first_key)%0A if m1:%0A for j in range(i + 1, i + 1001):%0A second_key = hashlib.md5((salt + str(j)).encode(%22utf-8%22)).hexdigest()%0A m2 = re.search(r'(.)%5C1%7B4,4%7D', second_key)%0A if m2:%0A if m2.group()%5B:3%5D == m1.group():%0A keys.append((i, first_key))%0A print(i, first_key)%0A break%0A i += 1%0Aprint(%2264th key found at index: %22, keys%5B-1%5D%5B0%5D)%0A
a2ffd8ea0b2b1b7ace6ed5b37dd76d9dd9063d25
Add utility function.
yaka/web/util.py
yaka/web/util.py
Python
0
@@ -0,0 +1,84 @@ +%0Adef get_object_or_404(cls, *args):%0A return cls.query.filter(*args).first_or_404()%0A
cdc311adcd05e5292f61bf5718ba68dceb4121c3
install requirements
tornado/setup.py
tornado/setup.py
import subprocess import sys import setup_util import os from os.path import expanduser home = expanduser("~") cwd = "%s/FrameworkBenchmarks/tornado" % home def start(args): setup_util.replace_text( cwd + "/server.py", "127.0.0.1", args.database_host) subprocess.Popen("python %s/FrameworkBenchmarks/tornado/server.py --port=8000" % home, shell=True, cwd=cwd) subprocess.Popen("python %s/FrameworkBenchmarks/tornado/server.py --port=8001" % home, shell=True, cwd=cwd) subprocess.Popen("python %s/FrameworkBenchmarks/tornado/server.py --port=8002" % home, shell=True, cwd=cwd) subprocess.Popen("python %s/FrameworkBenchmarks/tornado/server.py --port=8003" % home, shell=True, cwd=cwd) subprocess.check_call("sudo /usr/local/nginx/sbin/nginx -c " + home + "/FrameworkBenchmarks/php/deploy/nginx.conf", shell=True) return 0 def stop(): try: subprocess.call("sudo /usr/local/nginx/sbin/nginx -s stop", shell=True) except subprocess.CalledProcessError: #TODO: Better handle exception. pass p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE) out, err = p.communicate() for line in out.splitlines(): if 'server.py' in line: try: pid = int(line.split(None, 2)[1]) os.kill(pid, 9) except OSError: pass return 0
Python
0
@@ -261,16 +261,81 @@ _host)%0A%0A + subprocess.check_call(%22pip install -r %25s/requirements.txt%22)%0A%0A subp
fd6fe20c74463d88b957d22d0f9f2f0316a489cf
add energy_future_csv
amaascore/csv_upload/assets/energy_future.py
amaascore/csv_upload/assets/energy_future.py
Python
0.999796
@@ -0,0 +1,2003 @@ +import logging.config%0Aimport csv%0A%0Afrom amaascore.tools.csv_tools import csv_stream_to_objects%0Afrom amaascore.assets.energy_future import EnergyFuture%0Afrom amaascore.assets.interface import AssetsInterface%0Afrom amaasutils.logging_utils import DEFAULT_LOGGING%0A%0Aclass EnergyFutureUploader(object):%0A%0A def __init__(self):%0A pass%0A%0A @staticmethod%0A def json_handler(orderedDict, params):%0A Dict = dict(orderedDict)%0A for key, var in params.items():%0A Dict%5Bkey%5D=var%0A asset_id = Dict.pop('asset_id', None)%0A asset_status = 'Active'%0A energy_future = EnergyFuture(asset_id=asset_id, asset_status=asset_status, **dict(Dict))%0A return energy_future%0A%0A @staticmethod%0A def upload(asset_manager_id, client_id, csvpath):%0A %22%22%22convert csv file rows to objects and insert;%0A asset_manager_id and client_id from the UI (login)%22%22%22%0A interface = AssetsInterface()%0A logging.config.dictConfig(DEFAULT_LOGGING)%0A logger = logging.getLogger(__name__)%0A params = %7B'asset_manager_id': asset_manager_id, 'client_id': client_id%7D%0A with open(csvpath) as csvfile:%0A energy_futures = csv_stream_to_objects(stream=csvfile, json_handler=EnergyFutureUploader.json_handler, **params)%0A for energy_future in energy_futures:%0A interface.new(energy_future)%0A logger.info('Creating new equity %25s successfully', energy_future.display_name)%0A%0A @staticmethod%0A def download(asset_manager_id, asset_id_list):%0A %22%22%22retrieve the assets mainly for test purposes%22%22%22%0A interface = AssetsInterface()%0A logging.config.dictConfig(DEFAULT_LOGGING)%0A logger = logging.getLogger(__name__)%0A energy_futures = %5B%5D%0A for asset_id in asset_id_list:%0A energy_futures.append(interface.retrieve(asset_manager_id=asset_manager_id, asset_id=asset_id))%0A interface.deactivate(asset_manager_id=asset_manager_id, asset_id=asset_id)%0A return energy_futures%0A%0A
a68b6c46b7bfe16ecf83cc21398d1746275b03e2
add a convert tool
convert_olddb.py
convert_olddb.py
Python
0.000001
@@ -0,0 +1,577 @@ +import sqlalchemy%0Aimport os%0Afrom photomanager.db.dbutils import get_db_session%0Afrom photomanager.db.models import ImageMeta%0A%0A%0Adef do_convert(db_name):%0A db_session = get_db_session(db_name)%0A%0A image_metas = db_session.query(ImageMeta)%0A for meta in image_metas:%0A filename = meta.filename%0A dirname = os.path.dirname(filename)%0A basename = os.path.basename(filename)%0A print(f%22%7Bfilename%7D split to %7Bdirname%7D and %7Bbasename%7D%22)%0A meta.folder = dirname%0A meta.filename = basename%0A%0A db_session.commit()%0A%0A%0A%0Ado_convert(%22/home/zhangchi/data/Photos/pmindex.db%22)%0A
8f897ea096ca4f7e0ee1a920569d18c8bb4a184d
Create SECURITYHUB_ENABLED.py
python-rdklib/SECURITYHUB_ENABLED/SECURITYHUB_ENABLED.py
python-rdklib/SECURITYHUB_ENABLED/SECURITYHUB_ENABLED.py
Python
0.000002
@@ -0,0 +1,2079 @@ +%22%22%22%0A#####################################%0A## Gherkin ##%0A#####################################%0A%0ARule Name:%0A SECURITYHUB_ENABLED%0A%0ADescription:%0A Checks that AWS Security Hub is enabled for an AWS Account. The rule is NON_COMPLIANT if AWS Security Hub is not enabled.%0A%0ARationale:%0A AWS Security Hub gives you a comprehensive view of your high-priority security alerts, and compliance status across AWS accounts.%0A%0AIndicative Severity:%0A Medium%0A%0ATrigger:%0A Periodic%0A%0AReports on:%0A AWS::::Account%0A%0ARule Parameters:%0A None%0A%0AScenarios:%0A Scenario: 1%0A Given: SecurityHub is enabled for an AWS Account.%0A Then: Return COMPLIANT%0A%0A Scenario: 2%0A Given: SecurityHub is not enabled for an AWS Account.%0A Then: Return NON_COMPLIANT%0A%0A%22%22%22%0Aimport botocore%0Afrom rdklib import Evaluator, Evaluation, ConfigRule, ComplianceType%0A%0AAPPLICABLE_RESOURCES = %5B'AWS::::Account'%5D%0A%0Aclass SECURITYHUB_ENABLED(ConfigRule):%0A%0A # Set this to false to prevent unnecessary API calls%0A delete_old_evaluations_on_scheduled_notification = False%0A%0A def evaluate_periodic(self, event, client_factory, valid_rule_parameters):%0A client = client_factory.build_client('securityhub')%0A evaluations = %5B%5D%0A try:%0A security_hub_enabled = client.describe_hub()%0A # Scenario:1 SecurityHub is enabled for an AWS Account.%0A if security_hub_enabled:%0A evaluations.append(Evaluation(ComplianceType.COMPLIANT, event%5B'accountId'%5D, APPLICABLE_RESOURCES%5B0%5D))%0A except botocore.exceptions.ClientError as error:%0A # Scenario:2 SecurityHub is not enabled for an AWS Account.%0A if error.response%5B'Error'%5D%5B'Code'%5D == 'InvalidAccessException':%0A evaluations.append(Evaluation(ComplianceType.NON_COMPLIANT, event%5B'accountId'%5D, APPLICABLE_RESOURCES%5B0%5D))%0A else:%0A raise error%0A return evaluations%0A%0Adef lambda_handler(event, context):%0A my_rule = SECURITYHUB_ENABLED()%0A evaluator = Evaluator(my_rule, APPLICABLE_RESOURCES)%0A return evaluator.handle(event, context)%0A
446f5785a5db301de68adffe9e114b3ebafe0b6f
add tests for removing failed jobs
frappe/tests/test_background_jobs.py
frappe/tests/test_background_jobs.py
Python
0.000009
@@ -0,0 +1,756 @@ +import unittest%0A%0Afrom rq import Queue%0A%0Aimport frappe%0Afrom frappe.core.page.background_jobs.background_jobs import remove_failed_jobs%0Afrom frappe.utils.background_jobs import get_redis_conn%0A%0A%0Aclass TestBackgroundJobs(unittest.TestCase):%0A%09def test_remove_failed_jobs(self):%0A%09%09frappe.enqueue(method=%22frappe.tests.test_background_jobs.fail_function%22)%0A%0A%09%09conn = get_redis_conn()%0A%09%09queues = Queue.all(conn)%0A%0A%09%09for queue in queues:%0A%09%09%09if queue.name == %22default%22:%0A%09%09%09%09fail_registry = queue.failed_job_registry%0A%09%09%09%09self.assertGreater(fail_registry.count, 0)%0A%0A%09%09remove_failed_jobs()%0A%0A%09%09for queue in queues:%0A%09%09%09if queue.name == %22default%22:%0A%09%09%09%09fail_registry = queue.failed_job_registry%0A%09%09%09%09self.assertEqual(fail_registry.count, 0)%0A%0A%0Adef fail_function():%0A%09return 1 / 0%0A
5ba8c63daee6c0cb8667c916e10fd813d2cc8d88
Add in the cmd module, this is simple and can be expanded, although the basic bases are covered
salt/modules/cmd.py
salt/modules/cmd.py
Python
0
@@ -0,0 +1,1321 @@ +'''%0AA module for shelling out%0A%0AKeep in mind that this module is insecure, in that it can give whomever has%0Aaccess to the master root execution access to all salt minions%0A'''%0A%0Aimport subprocess%0Aimport tempfile%0A%0Adef run(cmd):%0A '''%0A Execute the passed command and return the output%0A '''%0A return subprocess.Popen(cmd,%0A shell=True,%0A stdout=subprocess.PIPE,%0A stderr=subprocess.STDOUT).communicate()%5B0%5D%0A%0Adef run_stdout(cmd):%0A '''%0A Execute a command, and only return the standard out%0A '''%0A return subprocess.Popen(cmd,%0A shell=True,%0A stdout=subprocess.PIPE).communicate()%5B0%5D%0A%0Adef run_stderr(cmd):%0A '''%0A Executa a command and only return the %0A '''%0A return subprocess.Popen(cmd,%0A shell=True,%0A stderr=subprocess.PIPE).communicate()%5B0%5D%0A%0Adef exec_code(lang, code):%0A '''%0A Pass in two strings, the first naming the executable language, aka -%0A python2, python3, ruby, perl, lua, etc. the second string containing%0A the code you wish to execute. The stdout and stderr will be returned%0A '''%0A cfn = tempfile.mkstemp()%0A open(cfn, 'w+').write(code)%0A return subprocess.Popen(lang + ' ' + cfn,%0A shell=True,%0A stdout=subprocess.PIPE,%0A stderr=subprocess.STDOUT).communicate()%5B0%5D%0A
e3efa4483f43deb9d2e8515ef3a797c03626f892
add serializtion tests for pandas (#844)
distributed/protocol/tests/test_pandas.py
distributed/protocol/tests/test_pandas.py
Python
0
@@ -0,0 +1,1830 @@ +from __future__ import print_function, division, absolute_import%0A%0Afrom zlib import crc32%0A%0Aimport pandas as pd%0Aimport pandas.util.testing as tm%0Aimport pytest%0A%0Afrom dask.dataframe.utils import assert_eq%0A%0Afrom distributed.protocol import (serialize, deserialize, decompress, dumps,%0A loads, to_serialize)%0Afrom distributed.protocol.utils import BIG_BYTES_SHARD_SIZE%0Afrom distributed.utils import tmpfile%0Afrom distributed.utils_test import slow%0Afrom distributed.protocol.compression import maybe_compress%0A%0Adfs = %5B%0A pd.DataFrame(%7B%7D),%0A pd.DataFrame(%7B'x': %5B1, 2, 3%5D%7D),%0A pd.DataFrame(%7B'x': %5B1., 2., 3.%5D%7D),%0A pd.DataFrame(%7B0: %5B1, 2, 3%5D%7D),%0A pd.DataFrame(%7B'x': %5B1., 2., 3.%5D, 'y': %5B4., 5., 6.%5D%7D),%0A pd.DataFrame(%7B'x': %5B1., 2., 3.%5D%7D, index=pd.Index(%5B4, 5, 6%5D, name='bar')),%0A pd.Series(%5B1., 2., 3.%5D),%0A pd.Series(%5B1., 2., 3.%5D, name='foo'),%0A pd.Series(%5B1., 2., 3.%5D, name='foo',%0A index=%5B4, 5, 6%5D),%0A pd.Series(%5B1., 2., 3.%5D, name='foo',%0A index=pd.Index(%5B4, 5, 6%5D, name='bar')),%0A pd.DataFrame(%7B'x': %5B'a', 'b', 'c'%5D%7D),%0A pd.DataFrame(%7B'x': %5Bb'a', b'b', b'c'%5D%7D),%0A pd.DataFrame(%7B'x': pd.Categorical(%5B'a', 'b', 'a'%5D, ordered=True)%7D),%0A pd.DataFrame(%7B'x': pd.Categorical(%5B'a', 'b', 'a'%5D, ordered=False)%7D),%0A tm.makeCategoricalIndex(),%0A tm.makeCustomDataframe(5, 3),%0A tm.makeDataFrame(),%0A tm.makeDateIndex(),%0A tm.makeMissingDataframe(),%0A tm.makeMixedDataFrame(),%0A tm.makeObjectSeries(),%0A tm.makePeriodFrame(),%0A tm.makeRangeIndex(),%0A tm.makeTimeDataFrame(),%0A tm.makeTimeSeries(),%0A tm.makeUnicodeIndex(),%0A%5D%0A%0A%[email protected]('df', dfs)%0Adef test_dumps_serialize_numpy(df):%0A header, frames = serialize(df)%0A if 'compression' in header:%0A frames = decompress(header, frames)%0A df2 = deserialize(header, frames)%0A%0A assert_eq(df, df2)%0A
9d94f581b803e5050f0bf76436cb97d92184b4fb
add tests for country model
openspending/tests/model/test_country.py
openspending/tests/model/test_country.py
Python
0
@@ -0,0 +1,919 @@ +import json%0Aimport urllib2%0A%0Afrom flask import url_for, current_app%0A%0Afrom openspending.core import db%0Afrom openspending.model.country import Country%0Afrom openspending.tests.base import ControllerTestCase%0A%0Afrom openspending.command.geometry import create as createcountries%0A%0A%0A%0A%0A%0Aclass TestCountryModel(ControllerTestCase):%0A def setUp(self):%0A super(TestCountryModel, self).setUp()%0A createcountries()%0A%0A%0A def tearDown(self):%0A pass%0A%0A def test_all_countries(self):%0A result = Country.get_all_json()%0A assert len(result%5B'data'%5D) == 249%0A assert len(result%5B'data'%5D%5B0%5D%5B'regions'%5D) == 8%0A%0A def test_properties_regions(self):%0A tempobj = Country.by_gid(1)%0A assert len(tempobj.regions.keys()) == 10%0A assert tempobj.label == %22Aruba%22%0A%0A def test_properties_regions(self):%0A tempobj = Country.by_gid(1)%0A assert tempobj.sovereignty == %22Netherlands%22%0A%0A%0A
97d6cce2a5c0c905f0c33c41316c8e65eaed0e08
Update way we synchronize from citybik.es
update-bikestations.py
update-bikestations.py
Python
0
@@ -0,0 +1,1634 @@ +#!/usr/bin/env python%0A%0Afrom multiprocessing.pool import ThreadPool%0Aimport requests%0Aimport json%0A%0Abaseurl = 'http://api.citybik.es/v2/networks/'%0Anetworkids = %5B 'bixi-montreal', 'bixi-toronto', 'capital-bixi', 'hubway',%0A 'capital-bikeshare', 'citi-bike-nyc', 'barclays-cycle-hire' %5D%0A%0Adef process_network(networkid):%0A r = requests.get(baseurl + networkid)%0A network = r.json()%5B'network'%5D%0A%0A # output just the stations that are installed, only the metadata we care%0A # about%0A output_stations = %5B%5D%0A for station in network%5B'stations'%5D:%0A # some networks list %22uninstalled%22 stations. don't want those%0A if not station%5B'extra'%5D.get('installed') or station%5B'extra'%5D%5B'installed'%5D:%0A output_stations.append(%7B'id': station%5B'id'%5D,%0A 'name': station%5B'name'%5D,%0A 'freeBikes': station%5B'free_bikes'%5D,%0A 'emptySlots': station%5B'empty_slots'%5D,%0A 'latitude': station%5B'latitude'%5D,%0A 'longitude': station%5B'longitude'%5D%7D)%0A%0A open('%25s.json' %25 networkid, 'w').write(json.dumps(output_stations))%0A%0A return network%5B'location'%5D%0A%0A%0Apool = ThreadPool()%0Alocations = pool.map(process_network, networkids)%0Awith open('locations.js', 'w') as f:%0A f.write('var networks = %7B')%0A for (i, networkid) in enumerate(networkids):%0A location = locations%5Bi%5D%0A f.write('%22%25s%22: %7B name: %22%25s%22, latitude: %25s, longitude: %25s %7D,' %25 (%0A networkid, location%5B'city'%5D, location%5B'latitude'%5D,%0A location%5B'longitude'%5D))%0A%0A f.write('%7D;')%0A
064c2b53089611e838934c76d8fba19eaad85e75
add cot verify test stub
scriptworker/test/test_cot_verify.py
scriptworker/test/test_cot_verify.py
Python
0
@@ -0,0 +1,473 @@ +#!/usr/bin/env python%0A# coding=utf-8%0A%22%22%22Test scriptworker.cot.verify%0A%22%22%22%0Aimport logging%0Aimport pytest%0Afrom scriptworker.exceptions import CoTError%0Aimport scriptworker.cot.verify as verify%0Afrom . import rw_context%0A%0Aassert rw_context # silence pyflakes%0A%0A# TODO remove once we use%0Aassert CoTError, verify%0Aassert pytest%0A%0Alog = logging.getLogger(__name__)%0A%0A%0A# constants helpers and fixtures %7B%7B%7B1%[email protected]_fixture(scope='function')%0Adef chain_of_trust(rw_context):%0A pass%0A
9376ab25e4b5713b8b354a3a03c37b1e356fa5c2
Create unlock-device.py
unlock-device.py
unlock-device.py
Python
0.000001
@@ -0,0 +1,323 @@ +from com.dtmilano.android.viewclient import ViewClient%0A%0Adevice, serial = ViewClient.connectToDeviceOrExit()%0Aif device.checkConnected():%0A print(%22Device connected - serial: %7B%7D%22.format(serial))%0A print(%22Device is going to be unlocked...%22)%0A device.wake()%0A device.unlock()%0Aelse:%0A print(%22Device is not connected!%22)%0A
47b93bee1ebcf5fcf6ea2ff3ad7eaabb831f692c
Add WATERS_Utils folder
ET_Utils/WATERS_Utils/__init__.py
ET_Utils/WATERS_Utils/__init__.py
Python
0
@@ -0,0 +1 @@ +%0A
d9af619ee1f614d5e1a5cda5d37f5757d91567c9
resource_impala uses resource_sql
blaze/sql.py
blaze/sql.py
from __future__ import absolute_import, division, print_function from datashape.predicates import isscalar import sqlalchemy from sqlalchemy import Table, MetaData from sqlalchemy.engine import Engine from toolz import first, keyfilter from .compute.sql import select from .data.sql import SQL, dispatch from .expr import Expr, Projection, Field, UnaryOp, BinOp, Join from .data.sql import SQL, dispatch from .compatibility import basestring, _strtypes from .resource import resource from .utils import keywords import sqlalchemy as sa __all__ = 'SQL', @dispatch((Field, Projection, Expr, UnaryOp), SQL) def compute_up(t, ddesc, **kwargs): return compute_up(t, ddesc.table, **kwargs) @dispatch((BinOp, Join), SQL, sa.sql.Selectable) def compute_up(t, lhs, rhs, **kwargs): return compute_up(t, lhs.table, rhs, **kwargs) @dispatch((BinOp, Join), sa.sql.Selectable, SQL) def compute_up(t, lhs, rhs, **kwargs): return compute_up(t, lhs, rhs.table, **kwargs) @dispatch((BinOp, Join), SQL, SQL) def compute_up(t, lhs, rhs, **kwargs): return compute_up(t, lhs.table, rhs.table, **kwargs) def engine_of(x): if isinstance(x, Engine): return x if isinstance(x, SQL): return x.engine if isinstance(x, MetaData): return x.bind if isinstance(x, Table): return x.metadata.bind raise NotImplementedError("Can't deterimine engine of %s" % x) @dispatch(Expr, sa.sql.ClauseElement) def post_compute(expr, query, scope=None): """ Execute SQLAlchemy query against SQLAlchemy engines If the result of compute is a SQLAlchemy query then it is likely that the data elements are themselves SQL objects which contain SQLAlchemy engines. We find these engines and, if they are all the same, run the query against these engines and return the result. """ if not all(isinstance(val, (SQL, Engine, Table)) for val in scope.values()): return query engines = set(filter(None, map(engine_of, scope.values()))) if not engines: return query if len(set(map(str, engines))) != 1: raise NotImplementedError("Expected single SQLAlchemy engine") engine = first(engines) with engine.connect() as conn: # Perform query result = conn.execute(select(query)).fetchall() if isscalar(expr.dshape): return result[0][0] if isscalar(expr.dshape.measure): return [x[0] for x in result] return result @dispatch(SQL) def drop(s): s.table.drop(s.engine) @dispatch(SQL, basestring) def create_index(s, column, name=None, unique=False): if name is None: raise ValueError('SQL indexes must have a name') sa.Index(name, getattr(s.table.c, column), unique=unique).create(s.engine) @dispatch(SQL, list) def create_index(s, columns, name=None, unique=False): if name is None: raise ValueError('SQL indexes must have a name') args = name, args += tuple(getattr(s.table.c, column) for column in columns) sa.Index(*args, unique=unique).create(s.engine) @resource.register('(sqlite|postgresql|mysql|mysql\+pymysql)://.+') def resource_sql(uri, *args, **kwargs): if args and isinstance(args[0], _strtypes): table_name, args = args[0], args[1:] return SQL(uri, table_name, *args, **kwargs) else: kwargs = keyfilter(keywords(sqlalchemy.create_engine).__contains__, kwargs) return sqlalchemy.create_engine(uri, *args, **kwargs) @resource.register('impala://.+') def resource_sql(uri, table_name, *args, **kwargs): try: import impala.sqlalchemy except ImportError: raise ImportError("Please install or update `impyla` library") return SQL(uri, table_name, *args, **kwargs) from .compute.pyfunc import broadcast_collect @dispatch(Expr, (SQL, sa.sql.elements.ClauseElement)) def optimize(expr, _): return broadcast_collect(expr)
Python
0.999998
@@ -3522,27 +3522,18 @@ rce_ -sql(uri, table_name +impala(uri , *a @@ -3696,35 +3696,32 @@ return -SQL(uri, table_name +resource_sql(uri , *args,
60f76f01c6961f6aceb3b67643057798aed056c7
Add python script for validating version files on vaadin.com
scripts/ValidateVaadinDownload.py
scripts/ValidateVaadinDownload.py
Python
0.000001
@@ -0,0 +1,2377 @@ +#coding=UTF-8%0A%0Aimport argparse, sys%0Afrom urllib.request import urlopen%0A%0Aparse = argparse.ArgumentParser(description=%22Check vaadin.com version lists%22)%0Aparse.add_argument(%22version%22, help=%22Released Vaadin version number%22)%0A%0Aargs = parse.parse_args()%0Aif hasattr(args, %22echo%22):%0A%09print(args.echo)%0A%09sys.exit(1)%0A%0Aprerelease = None%0A(major, minor, maintenance) = args.version.split(%22.%22, 2)%0A%0Aif %22.%22 in maintenance:%0A%09(maintenance, prerelease) = maintenance.split(%22.%22, 1)%0A%0A# Version without prerelease tag%0Aversion = %22%25s.%25s.%25s%22 %25 (major, minor, maintenance)%0AisPrerelease = prerelease is not None%0A%0Afailed = False%0A%0Avaadin7Latest = %22http://vaadin.com/download/LATEST7%22%0Avaadin7Versions = %22http://vaadin.com/download/VERSIONS_7%22%0Avaadin6Latest = %22http://vaadin.com/download/LATEST%22%0AvaadinPrerelease = %22http://vaadin.com/download/PRERELEASES%22%0A%0Atry:%0A%09latest = urlopen(vaadin7Latest).read().decode().split(%22%5Cn%22)%0A%09releaseRow = %22release/%25s.%25s/%25s%22 %25 (major, minor, version)%0A%0A%09assert (version in latest%5B0%5D) %5E isPrerelease, %22Latest version mismatch. %25s: %25s, was: %25s%22 %25 (%22should not be%22 if isPrerelease else %22should be%22, args.version, latest%5B0%5D)%0A%09assert (releaseRow in latest%5B1%5D) %5E isPrerelease, %22Release row mismatch; %25s: %25s, was %25s%22 %25 (%22should not be%22 if isPrerelease else %22should be%22, releaseRow, latest%5B1%5D)%0Aexcept Exception as e:%0A%09failed = True%0A%09print(%22Latest version was not correctly updated: %25s%22 %25 (e))%0A%0Atry:%0A%09assert %22%25s,%22 %25 (args.version) in urlopen(vaadin7Versions).read().decode().split(%22%5Cn%22), %22Released version not in version list%22%0Aexcept Exception as e:%0A%09if isPrerelease:%0A%09%09print(%22Prerelease version needs to be added manually to versions!%22)%0A%09else:%0A%09%09failed = True%0A%09%09print(e)%0A%0Atry:%0A%09latest = urlopen(vaadin6Latest).read().decode().split(%22%5Cn%22)%0A%09releaseRow = %22release/6.8/6.8.%22%0A%0A%09assert (%226.8.%22 in latest%5B0%5D), %22Latest version mismatch; should be: %25sX, was: %25s%22 %25 (%226.8.%22, latest%5B0%5D)%0A%09assert (releaseRow in latest%5B1%5D), %22Release row mismatch; should be: %25sX, was %25s%22 %25 (releaseRow, latest%5B1%5D)%0Aexcept Exception as e:%0A%09failed = True%0A%09print(%22Latest Vaadin 6 version was updated by release. %25s%22 %25 (e))%0A%0Atry:%0A%09latest = urlopen(vaadinPrerelease).read().decode().split(%22%5Cn%22)%0A%09assert (args.version in latest%5B0%5D) or not isPrerelease, %22%25s: %25s, was: %25s%22 %25 (%22should be%22, args.version, latest%5B0%5D)%0Aexcept Exception as e:%0A%09print(%22Prerelease file was not correctly updated: %25s%22 %25 (e))%0A%0Asys.exit(1 if failed else 0)%0A
665b3372e089fda3dde104b0754efa65a87a9bd2
Test harness for checking wtf the HTTPClientResponseHandler is actually doing with data from the network
Sketches/MPS/Bookmarks/TestHTTPResponseHandler.py
Sketches/MPS/Bookmarks/TestHTTPResponseHandler.py
Python
0
@@ -0,0 +1,560 @@ +#!/usr/bin/python%0A%0Aimport base64%0Afrom Kamaelia.File.ReadFileAdaptor import ReadFileAdaptor %0Afrom Kamaelia.File.Writing import SimpleFileWriter%0Afrom Kamaelia.Chassis.Pipeline import Pipeline%0Afrom TwitterStream import HTTPClientResponseHandler%0Afrom Kamaelia.Util.PureTransformer import PureTransformer%0Afrom Kamaelia.Util.Console import ConsoleEchoer%0A%0APipeline(%0A ReadFileAdaptor(%22tweets.b64.txt%22, readmode=%22line%22),%0A PureTransformer(base64.b64decode),%0A HTTPClientResponseHandler(suppress_header = True),%0A SimpleFileWriter(%22tweets.b64raw.txt%22),%0A).run()%0A
53467bd7d4c9c12b73c66244a91f31f0dbadeeec
Add pagerteam tests file which had been missed despite its existence
hc/front/tests/test_add_pagerteam.py
hc/front/tests/test_add_pagerteam.py
Python
0
@@ -0,0 +1,1041 @@ +from hc.api.models import Channel%0Afrom hc.test import BaseTestCase%0A%0A%0Aclass AddPagerTeamTestCase(BaseTestCase):%0A url = %22/integrations/add_pagerteam/%22%0A%0A def test_instructions_work(self):%0A self.client.login(username=%[email protected]%22, password=%22password%22)%0A r = self.client.get(self.url)%0A self.assertContains(r, %22PagerTeam%22)%0A%0A def test_it_works(self):%0A form = %7B%22value%22: %22http://example.org%22%7D%0A%0A self.client.login(username=%[email protected]%22, password=%22password%22)%0A r = self.client.post(self.url, form)%0A self.assertRedirects(r, %22/integrations/%22)%0A%0A c = Channel.objects.get()%0A self.assertEqual(c.kind, %22pagerteam%22)%0A self.assertEqual(c.value, %22http://example.org%22)%0A self.assertEqual(c.project, self.project)%0A%0A def test_it_rejects_bad_url(self):%0A form = %7B%22value%22: %22not an URL%22%7D%0A%0A self.client.login(username=%[email protected]%22, password=%22password%22)%0A r = self.client.post(self.url, form)%0A self.assertContains(r, %22Enter a valid URL%22)%0A
9f9916d662d1ab130c9685c415c25b19a14733d7
Add example to illustrate different optimization procedures
examples/svm_objectives.py
examples/svm_objectives.py
Python
0
@@ -0,0 +1,1821 @@ +# showing the relation between cutting plane and primal objectives%0A%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0A%0Afrom sklearn.datasets import load_digits%0Afrom sklearn.cross_validation import train_test_split%0A%0Afrom pystruct.problems import CrammerSingerSVMProblem%0Afrom pystruct.learners import (StructuredSVM, OneSlackSSVM,%0A SubgradientStructuredSVM)%0A%0A# do a binary digit classification%0Adigits = load_digits()%0AX, y = digits.data, digits.target%0A%0AX /= X.max()%0A%0AX_train, X_test, y_train, y_test = train_test_split(X, y, random_state=0)%0A%0A# we add a constant 1 feature for the bias%0AX_train_bias = np.hstack(%5BX_train, np.ones((X_train.shape%5B0%5D, 1))%5D)%0A%0Apbl = CrammerSingerSVMProblem(n_features=X_train_bias.shape%5B1%5D, n_classes=10)%0An_slack_svm = StructuredSVM(pbl, verbose=0, check_constraints=False, C=20,%0A max_iter=500, batch_size=10)%0Aone_slack_svm = OneSlackSSVM(pbl, verbose=0, check_constraints=False, C=20,%0A max_iter=1000, tol=0.001)%0Asubgradient_svm = SubgradientStructuredSVM(pbl, C=20, learning_rate=0.01,%0A max_iter=300, decay_exponent=0,%0A momentum=0, verbose=0)%0A%0A# n-slack cutting plane ssvm%0An_slack_svm.fit(X_train_bias, y_train)%0A%0A## 1-slack cutting plane ssvm%0Aone_slack_svm.fit(X_train_bias, y_train)%0A%0A# online subgradient ssvm%0Asubgradient_svm.fit(X_train_bias, y_train)%0A%0A#plt.plot(n_slack_svm.objective_curve_, label=%22n-slack lower bound%22)%0Aplt.plot(n_slack_svm.objective_curve_, label=%22n-slack lower bound%22)%0Aplt.plot(one_slack_svm.objective_curve_, label=%22one-slack lower bound%22)%0Aplt.plot(one_slack_svm.primal_objective_curve_, label=%22one-slack primal%22)%0Aplt.plot(subgradient_svm.objective_curve_, label=%22subgradient%22)%0Aplt.legend()%0Aplt.show()%0A
3a2e9a19feab0c882a9821b7ff555bd1e2693190
Test effect of change in Laplacian.
exp/sandbox/DeltaLaplacianExp.py
exp/sandbox/DeltaLaplacianExp.py
Python
0
@@ -0,0 +1,630 @@ +import numpy %0Aimport scipy.sparse %0Afrom apgl.graph import GraphUtils %0Afrom apgl.util.Util import Util %0A%0Anumpy.set_printoptions(suppress=True, precision=3)%0An = 10%0AW1 = scipy.sparse.rand(n, n, 0.5).todense()%0AW1 = W1.T.dot(W1)%0AW2 = W1.copy()%0A%0AW2%5B1, 2%5D = 1 %0AW2%5B2, 1%5D = 1 %0A%0Aprint(%22W1=%22+str(W1))%0Aprint(%22W2=%22+str(W2))%0A%0AL1 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W1))%0AL2 = GraphUtils.normalisedLaplacianSym(scipy.sparse.csr_matrix(W2))%0A%0AdeltaL = L2 - L1 %0A%0A%0Aprint(%22L1=%22+str(L1.todense()))%0Aprint(%22L2=%22+str(L2.todense()))%0Aprint(%22deltaL=%22+str(deltaL.todense()))%0A%0Aprint(%22rank(deltaL)=%22 + str(Util.rank(deltaL.todense())))