commit
stringlengths
40
40
subject
stringlengths
1
3.25k
old_file
stringlengths
4
311
new_file
stringlengths
4
311
old_contents
stringlengths
0
26.3k
lang
stringclasses
3 values
proba
float64
0
1
diff
stringlengths
0
7.82k
8a1dff9437a4f013a96369a1fe174c505e8636cb
Add missing migration (fix #130)
puput/migrations/0004_auto_20170912_0928.py
puput/migrations/0004_auto_20170912_0928.py
Python
0
@@ -0,0 +1,521 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.5 on 2017-09-12 09:28%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0Aimport django.db.models.manager%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('puput', '0003_add_short_feed_description_to_blog_page'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterModelManagers(%0A name='blogpage',%0A managers=%5B%0A ('extra', django.db.models.manager.Manager()),%0A %5D,%0A ),%0A %5D%0A
eb5cd2704196e8b2231b524d35433a042618ba95
Update unit tests for /bi/
adagios/bi/tests.py
adagios/bi/tests.py
""" This file demonstrates writing tests using the unittest module. These will pass when you run "manage.py test". Replace this with more appropriate tests for your application. """ from django.test import TestCase from django.test.client import Client from adagios.bi import * class TestGraphs(TestCase): def testPNP4NagiosGraph_get_image_url(self): pnp = PNP4NagiosGraph('apc01.acme.com', 'Ping', 'rta') # pnp.get_image_url() class TestBusinessProcess(TestCase): def test_save_and_load(self): """ This test will test load/save of a business process. The procedure is as follows: * Load a business process * Save it * Make changes * Load it again, and verify changes were saved. """ bp_name = 'test_business_process' b = BusinessProcess(bp_name) b.load() # Append a dot to the bp name and save new_display_name = b.display_name or '' + "." b.display_name = new_display_name b.save() # Load bp again b = BusinessProcess(bp_name) b.load() self.assertEqual(b.display_name, new_display_name) def test_add_process(self): """ Test adding new processes to a current BP """ bp_name = 'test' sub_process_name = 'sub_process' sub_process_display_name = 'This is a subprocess of test' b = BusinessProcess(bp_name) b.add_process(sub_process_name, display_name=sub_process_display_name) for i in b.get_processes(): if i.name == sub_process_name and i.display_name == sub_process_display_name: return else: self.assertTrue( False, 'We tried adding a business process but could not find it afterwards') def test_hostgroup_bp(self): bp_name = 'test' hostgroup_name = 'acme-network' b = BusinessProcess(bp_name) b.add_process(hostgroup_name, 'hostgroup') def test_remove_process(self): """ Test removing a subprocess from a businessprocess """ bp_name = 'test' sub_process_name = 'sub_process' sub_process_display_name = 'This is a subprocess of test' b = BusinessProcess(bp_name) b.add_process(sub_process_name, display_name=sub_process_display_name) self.assertNotEqual([], b.processes) b.remove_process(sub_process_name) self.assertEqual([], b.processes) def test_get_all_processes(self): get_all_processes() def test_macros(self): bp = get_business_process('uniq test case', status_method="use_worst_state") macros_for_empty_process = { 'num_problems': 0, 'num_state_0': 0, 'num_state_1': 0, 'num_state_2': 0, 'num_state_3': 0, 'current_state': 3, 'friendly_state': 'unknown', 'percent_problems': 0, 'percent_state_3': 0, 'percent_state_2': 0, 'percent_state_1': 0, 'percent_state_0': 0 } self.assertEqual(3, bp.get_status()) self.assertEqual(macros_for_empty_process, bp.resolve_all_macros()) bp.add_process("always_ok", status_method="always_ok") bp.add_process("always_major", status_method="always_major") macros_for_nonempty_process = { 'num_problems': 1, 'num_state_0': 1, 'num_state_1': 0, 'num_state_2': 1, 'num_state_3': 0, 'current_state': 2, 'friendly_state': 'major problems', 'percent_problems': 50.0, 'percent_state_3': 0.0, 'percent_state_2': 50.0, 'percent_state_1': 0.0, 'percent_state_0': 50.0 } self.assertEqual(2, bp.get_status()) self.assertEqual(macros_for_nonempty_process, bp.resolve_all_macros()) def testPageLoad(self): c = Client() response = c.get('/bi/') self.assertEqual(response.status_code, 200)
Python
0
@@ -3964,32 +3964,301 @@ PageLoad(self):%0A + self.loadPage('/bi')%0A self.loadPage('/bi/add')%0A self.loadPage('/bi/add/subprocess')%0A self.loadPage('/bi/add/graph')%0A%0A def loadPage(self, url):%0A %22%22%22 Load one specific page, and assert if return code is not 200 %22%22%22%0A try:%0A c = Clie @@ -4262,16 +4262,20 @@ lient()%0A + @@ -4295,14 +4295,11 @@ get( -'/bi/' +url )%0A @@ -4296,32 +4296,36 @@ et(url)%0A + self.assertEqual @@ -4350,10 +4350,174 @@ ode, 200 +, %22Expected status code 200 for page %25s%22 %25 url)%0A except Exception, e:%0A self.assertEqual(True, %22Unhandled exception while loading %25s: %25s%22 %25 (url, e) ) -%0A
8a3d757be17d395ba14ae7458036a78d10e3e212
Test to find out how to remove a file a git repo.
holamundo.py
holamundo.py
Python
0
@@ -0,0 +1,102 @@ +#!/usr/bin/env python%0A%0A%0Adef main():%0A print(%22Hola mundo!!%22)%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
17aefac614890b1fe7079a76b803707ae3fbb832
Add perihelion
Project3/analysis/perihelion.py
Project3/analysis/perihelion.py
Python
0.999979
@@ -0,0 +1,2235 @@ +#!/bin/python3%0A# -*- coding: utf-8 -*-%0Afrom runner import Runner%0Aimport numpy as np%0Aimport matplotlib.pyplot as plt%0A%0A%0Aclass Perihelion(Runner):%0A def setup(self):%0A self%5B'number of years'%5D = 100%0A self%5B'do save results'%5D = True%0A self%5B'do save any results'%5D = False%0A self%5B'use all planets'%5D = False%0A self%5B'save period'%5D = 1000%0A self%5B'use planets'%5D = %5B'Sun', 'Mercury'%5D%0A self%5B'steps per year'%5D = 1e7%0A self%5B'freeze sun'%5D = True%0A self%5B'use two body approximation'%5D = False%0A self%5B'use relativistic correction'%5D = True%0A self%5B'method'%5D = 'verlet'%0A self.get_planet('Sun')%5B'position'%5D = %5B0.0, 0.0, 0.0%5D%0A self.get_planet('Sun')%5B'Velocity'%5D = %5B0.0, 0.0, 0.0%5D%0A self.get_planet('Mercury')%5B'velocity'%5D = %5B12.44, 0.0, 0.0%5D%0A self.get_planet('Mercury')%5B'position'%5D = %5B0.0, 0.3075, 0.0%5D%0A%0A def run(self):%0A out, _ = self.run_simulation()%0A print(out)%0A # self.run_analysis()%0A self.getPerihelion()%0A%0A def getPerihelion(self):%0A timenangle = np.loadtxt(%22../data/precession.txt%22);%0A print(timenangle.shape)%0A print(timenangle%5B:, 1%5D - timenangle%5B0, 1%5D)%0A plt.plot(timenangle%5B:, 0%5D, timenangle%5B:, 1%5D, 'o')%0A plt.xlabel(%22Time %5Byr%5D%22)%0A plt.ylabel(%22Precession angle%22)%0A plt.show()%0A%0A def findPerihelion(self):%0A position = self.get_position()%0A r = np.linalg.norm(position%5B:, :, 1%5D - position%5B:, :, 0%5D, axis=1)%0A # plt.plot(r)%0A # plt.show()%0A # argrelextrema()%0A rfunc = interp1d(np.linspace(0,len(r)-1,len(r)),r, kind='slinear')%0A r = rfunc(np.linspace(0,len(r)-1, len(r)))%0A larger = np.roll(r, 1) - r %3E 1e-3%0A smaller = np.roll(r, -1) - r %3E 1e-3%0A minima = np.logical_and(larger, smaller)%0A above_mean = r %3C r.mean()%0A minima = np.logical_and(minima, above_mean)%0A plt.plot(r)%0A plt.plot(r*minima, 'o')%0A plt.show()%0A%0A print(minima.sum())%0A x, y = position%5Bminima, 0:2, 1%5D.T%0A print(x.shape, y.shape)%0A theta = np.arctan2(y, x)%0A plt.plot(theta*180/np.pi)%0A plt.show()%0A%0Aif __name__ == '__main__':%0A with Perihelion() as mercury:%0A mercury.run()%0A
7197116b27fe6b2ba694e49d6d2b54698b5e1505
bump version
flask_github.py
flask_github.py
# -*- coding: utf-8 -*- """ GitHub-Flask ============ Authenticate users in your Flask app with GitHub. """ import logging from urllib import urlencode from urlparse import parse_qs from functools import wraps import requests from flask import redirect, request, json __version__ = '0.3.3' logger = logging.getLogger(__name__) class GitHubError(Exception): """Raised if a request fails to the GitHub API.""" def __str__(self): try: message = self.response.json()['message'] except Exception: message = None return "%s: %s" % (self.response.status_code, message) @property def response(self): """The :class:`~requests.Response` object for the request.""" return self.args[0] class GitHub(object): """ Provides decorators for authenticating users with GitHub within a Flask application. Helper methods are also provided interacting with GitHub API. """ BASE_URL = 'https://api.github.com/' BASE_AUTH_URL = 'https://github.com/login/oauth/' def __init__(self, app=None): if app is not None: self.app = app self.init_app(self.app) else: self.app = None def init_app(self, app): self.client_id = app.config['GITHUB_CLIENT_ID'] self.client_secret = app.config['GITHUB_CLIENT_SECRET'] self.callback_url = app.config['GITHUB_CALLBACK_URL'] self.base_url = app.config.get('GITHUB_BASE_URL', self.BASE_URL) self.session = requests.session() def access_token_getter(self, f): """ Registers a function as the access_token getter. Must return the access_token used to make requests to GitHub on the user's behalf. """ self.get_access_token = f return f def get_access_token(self): raise NotImplementedError def authorize(self, scope=None): """ Redirect to GitHub and request access to a user's data. """ logger.debug("Called authorize()") params = { 'client_id': self.client_id, 'redirect_uri': self.callback_url, } if scope is not None: params['scope'] = scope url = self.BASE_AUTH_URL + 'authorize?' + urlencode(params) logger.debug("Redirecting to %s", url) return redirect(url) def authorized_handler(self, f): """ Decorator for the route that is used as the callback for authorizing with GitHub. This callback URL can be set in the settings for the app or passed in during authorization. """ @wraps(f) def decorated(*args, **kwargs): if 'code' in request.args: data = self._handle_response() else: data = self._handle_invalid_response() return f(*((data,) + args), **kwargs) return decorated def _handle_response(self): """ Handles response after the redirect to GitHub. This response determines if the user has allowed the this application access. If we were then we send a POST request for the access_key used to authenticate requests to GitHub. """ logger.debug("Handling response from GitHub") params = { 'code': request.args.get('code'), 'client_id': self.client_id, 'client_secret': self.client_secret } url = self.BASE_AUTH_URL + 'access_token' logger.debug("POSTing to %s", url) logger.debug(params) response = self.session.post(url, data=params) data = parse_qs(response.content) logger.debug("response.content = %s", data) for k, v in data.items(): if len(v) == 1: data[k] = v[0] return data.get('access_token', None) def _handle_invalid_response(self): pass def raw_request(self, method, resource, params=None, **kwargs): """ Makes a HTTP request and returns the raw :class:`~requests.Response` object. """ if params is None: params = {} if 'access_token' not in params: params['access_token'] = self.get_access_token() url = self.BASE_URL + resource return self.session.request( method, url, params=params, allow_redirects=True, **kwargs) def request(self, method, resource, **kwargs): """ Makes a request to the given endpoint. Keyword arguments are passed to the :meth:`~requests.request` method. If the content type of the response is JSON, it will be decoded automatically and a dictionary will be returned. Otherwise the :class:`~requests.Response` object is returned. """ response = self.raw_request(method, resource, **kwargs) status_code = str(response.status_code) if status_code.startswith('4'): raise GitHubError(response) assert status_code.startswith('2') if response.headers['Content-Type'].startswith('application/json'): return response.json() else: return response def get(self, resource, **kwargs): """Shortcut for ``request('GET', resource)``.""" return self.request('GET', resource, **kwargs) def post(self, resource, data, **kwargs): """Shortcut for ``request('POST', resource)``. Use this to make POST request since it will also encode ``data`` to 'application/x-www-form-urlencoded' format.""" headers = {'Content-Type': 'application/x-www-form-urlencoded'} data = json.dumps(data) return self.request('POST', resource, headers=headers, data=data, **kwargs) def head(self, resource, **kwargs): return self.request('HEAD', resource, **kwargs) def patch(self, resource, **kwargs): return self.request('PATCH', resource, **kwargs) def put(self, resource, **kwargs): return self.request('PUT', resource, **kwargs) def delete(self, resource, **kwargs): return self.request('DELETE', resource, **kwargs)
Python
0
@@ -296,17 +296,17 @@ = '0.3. -3 +4 '%0A%0Alogge
41f68e14fe890cac3de391f7bc4cdd5c2e5b9d75
test B07
spec/Order_B07_spec.py
spec/Order_B07_spec.py
Python
0.000001
@@ -0,0 +1,786 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0Afrom primestg.order.orders import Order%0Afrom expects import expect, equal%0A%0Awith description('Order B07 Generation'):%0A%0A with it('generates expected B07 xml'):%0A expected_result = '%3COrder IdPet=%221234%22 IdReq=%22B07%22 Version=%223.1.c%22%3E%5Cn ' %5C%0A '%3CCnc Id=%22CIR000000000%22%3E%5Cn ' %5C%0A '%3CB07 IPftp=%2210.1.5.206%22/%3E%5Cn ' %5C%0A '%3C/Cnc%3E%5Cn%3C/Order%3E%5Cn'%0A%0A generic_values = %7B%0A 'id_pet': '1234',%0A 'id_req': 'B07',%0A 'cnc': 'CIR000000000',%0A %7D%0A payload = %7B%0A 'IPftp': '10.1.5.206',%0A %7D%0A order = Order('B07')%0A order = order.create(generic_values, payload)%0A expect(order).to(equal(expected_result))%0A
87a00798e0f3033b3ad2b258c7bfeb7efd8ea388
Use helper provided by model's common module.
aleph/model/role.py
aleph/model/role.py
import logging from uuid import uuid4 from flask import current_app from aleph.core import db, url_for, get_config from aleph.data.validate import validate from aleph.model.common import SoftDeleteModel, IdModel log = logging.getLogger(__name__) membership = db.Table('role_membership', db.Column('group_id', db.Integer, db.ForeignKey('role.id')), # noqa db.Column('member_id', db.Integer, db.ForeignKey('role.id')) # noqa ) class Role(db.Model, IdModel, SoftDeleteModel): """A user, group or other access control subject.""" _schema = 'role.json#' __tablename__ = 'role' USER = 'user' GROUP = 'group' SYSTEM = 'system' TYPES = [USER, GROUP, SYSTEM] SYSTEM_GUEST = 'guest' SYSTEM_USER = 'user' foreign_id = db.Column(db.Unicode(2048), nullable=False, unique=True) name = db.Column(db.Unicode, nullable=False) email = db.Column(db.Unicode, nullable=True) api_key = db.Column(db.Unicode, nullable=True) is_admin = db.Column(db.Boolean, nullable=False, default=False) type = db.Column(db.Enum(*TYPES, name='role_type'), nullable=False) permissions = db.relationship("Permission", backref="role") def update(self, data): validate(data, self._schema) self.name = data.get('name', self.name) self.email = data.get('email', self.email) def clear_roles(self): self.roles = [] db.session.add(self) def add_role(self, role): self.roles.append(role) db.session.add(role) db.session.add(self) @classmethod def notifiable(cls): return cls.all_ids().filter(cls.email != None) # noqa @classmethod def by_foreign_id(cls, foreign_id): if foreign_id is not None: return cls.all().filter_by(foreign_id=foreign_id).first() @classmethod def by_api_key(cls, api_key): if api_key is not None: return cls.all().filter_by(api_key=api_key).first() @classmethod def load_or_create(cls, foreign_id, type, name, email=None, is_admin=None): role = cls.by_foreign_id(foreign_id) if role is None: role = cls() role.foreign_id = foreign_id role.name = name role.type = type role.is_admin = False if role.api_key is None: role.api_key = uuid4().hex role.email = email if is_admin is not None: role.is_admin = is_admin # see: https://github.com/pudo/aleph/issues/111 auto_admins = get_config('AUTHZ_ADMINS') or '' auto_admins = [a.lower() for a in auto_admins.split(',')] if email is not None and email.lower() in auto_admins: role.is_admin = True db.session.add(role) db.session.flush() return role @classmethod def load_id(cls, foreign_id, type=None, name=None): """Load a role and return the ID. If type is given and no role is found, a new role will be created. """ if not hasattr(current_app, '_authz_roles'): current_app._authz_roles = {} if foreign_id not in current_app._authz_roles: role = cls.by_foreign_id(foreign_id) if role is None: if type is None: return name = name or foreign_id role = cls.load_or_create(foreign_id, type, name) current_app._authz_roles[foreign_id] = role.id return current_app._authz_roles[foreign_id] def __repr__(self): return '<Role(%r,%r)>' % (self.id, self.foreign_id) def __unicode__(self): return self.name def to_dict(self): data = super(Role, self).to_dict() data.update({ 'api_url': url_for('roles_api.view', id=self.id), 'foreign_id': self.foreign_id, 'is_admin': self.is_admin, 'email': self.email, 'name': self.name, 'type': self.type }) return data Role.members = db.relationship(Role, secondary=membership, primaryjoin=Role.id == membership.c.group_id, secondaryjoin=Role.id == membership.c.member_id, backref="roles")
Python
0
@@ -12,31 +12,8 @@ ing%0A -from uuid import uuid4%0A from @@ -182,16 +182,29 @@ IdModel +, make_textid %0A%0Alog = @@ -2358,19 +2358,21 @@ y = -uu +make_text id -4 () -.hex %0A%0A
45a91a5c32227aabf17b52960d98851cd7608dd1
add qha plot tool (function version)
workflows/tools/plot_quasiparticle_scan.py
workflows/tools/plot_quasiparticle_scan.py
Python
0
@@ -0,0 +1,1078 @@ +from aiida import load_dbenv%0Aload_dbenv()%0A%0Afrom aiida.orm import load_node, load_workflow%0Afrom aiida.orm import Code, DataFactory%0A%0Aimport matplotlib.pyplot as plt%0A%0AStructureData = DataFactory('structure')%0AParameterData = DataFactory('parameter')%0AArrayData = DataFactory('array')%0AKpointsData = DataFactory('array.kpoints')%0A%0Aimport numpy as np%0A%0A#######################%0Awf = load_workflow(1086)%0A#######################%0A%0Athermal_properties = wf.get_result('thermal_properties')%0A%0Aenergy = thermal_properties.get_array('electronic_energies')%0Avolumes = thermal_properties.get_array('volumes')%0Aentropy = thermal_properties.get_array('entropy')%0Acv = thermal_properties.get_array('cv')%0Atemperature = thermal_properties.get_array('temperature')%0A%0Aplt.figure(1)%0A%0Aplt.plot(volumes, energy)%0A%0Aplt.figure(2)%0A%0Afor i, w in enumerate(wf.get_steps()%5B1%5D.get_sub_workflows()):%0A frequencies = %5Bw.get_result('quasiparticle_data').get_dict()%5B'%7B%7D'.format(k)%5D%5B'q_point_0'%5D%5B'4'%5D%5B'frequency'%5D for k in range(100,800,100)%5D%0A plt.plot(volumes, frequencies, label='%7B%7D'.format(temperature%5Bi%5D))%0A%0Aplt.show()%0A%0A
082e7d63192c2e7eaa4210e0c559b145313ecc3a
Add files via upload
server/src/datasource/parse_indepexpends.py
server/src/datasource/parse_indepexpends.py
Python
0
@@ -0,0 +1,611 @@ +from datasource import fec%0D%0Afrom datasource import propublica%0D%0Aimport os%0D%0A%0D%0A%0D%0AFEC_APIKEY = os.getenv('FEC_API_KEY', '')%0D%0AProPublica_APIKEY = os.getenv('PP_API_KEY', '')%0D%0A%0D%0AFecApiObj = fec.FECAPI(FEC_APIKEY)%0D%0Acommittees = FecApiObj.get_committees()%0D%0APPCampFinObj = propublica.CampaignFinanceAPI(ProPublica_APIKEY)%0D%0Adatafile = open(%22IndepExpends.json%22, 'w')%0D%0Afor committee in committees:%0D%0A if(2016 in committee%5B'cycles'%5D):%0D%0A print(committee%5B'committee_id'%5D)%0D%0A indepExpend = PPCampFinObj.get_indep_expends(str(committee%5B'committee_id'%5D))%0D%0A datafile.write(str(indepExpend))%0D%0Adatafile.close()
d8ba95ddb1e469600c735316a1aeafa115399b3c
Add an execution module called key to return minion public key finger
salt/modules/key.py
salt/modules/key.py
Python
0.000001
@@ -0,0 +1,366 @@ +'''%0AFunctions to view the minion's public key information%0A'''%0A%0A# Import python libs%0Aimport os%0A%0A# Import Salt libs%0Aimport salt.utils%0A%0Adef finger():%0A '''%0A Return the minion's public key fingerprint%0A%0A CLI Example::%0A%0A salt '*' key.finger%0A '''%0A return salt.utils.pem_finger(%0A os.path.join(__opts__%5B'pki_dir'%5D, 'minion.pub')%0A )%0A
ce8465e5f0f085bedcd1a84220316c8eab29a493
Add Tensor Flow
python/src/algorithm/coding/setupdate.py
python/src/algorithm/coding/setupdate.py
Python
0.000019
@@ -0,0 +1,434 @@ +n = int(input())%0As = set(map(int, input().split()))%0A%0AN = int(input())%0A%0Afor i in range(N):%0A cmd = input()%0A B = set(map(int, input().split()))%0A if %22symmetric_difference_update%22 in cmd:%0A s.symmetric_difference_update(B)%0A elif %22intersection_update%22 in cmd:%0A s.intersection_update(B)%0A elif %22difference_update%22 in cmd:%0A s.difference_update(B)%0A elif %22update%22 in cmd:%0A s.update(B)%0A%0Aprint(sum(s))%0A
b3889f8ff6d66963d4253d6796c3bb20dc9adbb7
Add external driver and parameter file
scripts/my_Param.py
scripts/my_Param.py
Python
0
@@ -0,0 +1,1523 @@ +#=================================================%0A# Observation%0A#-------------------------------------------------%0AsstObsPath = '/clim_obs/obs/ocn/mo/tos/UKMETOFFICE-HadISST-v1-1/130122_HadISST_sst.nc'%0AtauxObsPath = '/clim_obs/obs/atm/mo/tauu/ERAINT/tauu_ERAINT_198901-200911.nc'%0A%0AsstNameObs = 'sst'%0AtauxNameObs = 'tauu'%0A%0A#=================================================%0A# Models%0A#-------------------------------------------------%0Amodpath = '/work/cmip5/historical/atm/mo/VAR/cmip5.MOD.historical.r1i1p1.mo.atm.Amon.VAR.ver-1.latestX.xml'%0A%0Amodnames = %5B'ACCESS1-0', 'ACCESS1-3', %0A 'BNU-ESM', %0A 'CMCC-CESM', 'CMCC-CM', 'CMCC-CMS', %0A 'CSIRO-Mk3-6-0', 'CanCM4', %0A 'GISS-E2-H-CC', 'GISS-E2-H', 'GISS-E2-R-CC', 'GISS-E2-R', %0A 'HadCM3', 'HadGEM2-AO', 'HadGEM2-CC', 'HadGEM2-ES', %0A 'IPSL-CM5A-LR', %0A 'MIROC-ESM-CHEM', 'MIROC-ESM', 'MIROC4h', 'MIROC5', %0A 'MPI-ESM-LR', 'MPI-ESM-MR', %0A 'inmcm4'%0A %5D%0Amodnames = %5B'IPSL-CM5A-LR'%5D%0A%0A# Variables%0AsstName = 'ts'%0AtauxName= 'tauu'%0A%0A#=================================================%0A# Output%0A#-------------------------------------------------%0Aoutpathdata = '.' # e.g. '/user/directory/output/nc'%0Aoutpathjsons = '.' # e.g. '/user/directory/output/json'%0Aoutnamejson = 'test.json'%0A%0A#=================================================%0A# Output%0A#-------------------------------------------------%0A# Metrics%0Ametrics = %5B'EnsoAmpl', 'EnsoMu'%5D%0A%0A# Variable name and nino box%0AninoBox = 'nino3'%0A
771fc766446e1610a0599102720dc7e0f358e0e6
Add wsgi file
app.wsgi
app.wsgi
Python
0.000001
@@ -0,0 +1,35 @@ +from app import app as application%0A
fc636dbaacb5d2d1ebba1ba7f577ee4ec4deb958
Add synthtool scripts (#3765)
google-cloud-containeranalysis/synth.py
google-cloud-containeranalysis/synth.py
Python
0.000001
@@ -0,0 +1,1329 @@ +# Copyright 2018 Google LLC%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A%0A%22%22%22This script is used to synthesize generated parts of this library.%22%22%22%0A%0Aimport synthtool as s%0Aimport synthtool.gcp as gcp%0A%0Agapic = gcp.GAPICGenerator()%0Acommon_templates = gcp.CommonTemplates()%0A%0Alibrary = gapic.java_library(%0A service='container',%0A version='v1beta1',%0A config_path='/google/devtools/containeranalysis/artman_containeranalysis_v1beta1.yaml',%0A artman_output_name='')%0A%0As.copy(library / 'gapic-google-cloud-containeranalysis-v1beta1/src', 'src')%0As.copy(library / 'grpc-google-cloud-containeranalysis-v1beta1/src', '../../google-api-grpc/grpc-google-cloud-containeranalysis-v1beta1/src')%0As.copy(library / 'proto-google-cloud-containeranalysis-v1beta1/src', '../../google-api-grpc/proto-google-cloud-containeranalysis-v1beta1/src')%0A
1fe3fd59e4000216c4d6694690dc0ba866a66ecb
add bloom_count_intersection.py
scripts/bloom_count_intersection.py
scripts/bloom_count_intersection.py
Python
0.000019
@@ -0,0 +1,1332 @@ +## using bloom filter to count intersection%0A%0Aimport khmer%0Aimport sys%0Aimport screed%0Afrom screed.fasta import fasta_iter%0A%0Afilename = sys.argv%5B1%5D%0AK = int(sys.argv%5B2%5D) # size of kmer%0AHT_SIZE= int(sys.argv%5B3%5D)# size of hashtable%0AN_HT = int(sys.argv%5B4%5D) # number of hashtables%0A%0A%0A%0Aht = khmer.new_hashbits(K, HT_SIZE, N_HT)%0A%0An_unique = 0%0Afor n, record in enumerate(fasta_iter(open(filename))):%0A sequence = record%5B'sequence'%5D%0A seq_len = len(sequence)%0A for n in range(0,seq_len+1-K):%0A kmer = sequence%5Bn:n+K%5D%0A if (not ht.get(kmer)):%0A n_unique+=1%0A ht.count(kmer)%0Aprint filename,'has been consumed.' %0Aprint '# of unique kmers:',n_unique%0Aprint '# of occupied bin:',ht.n_occupied()%0A%0Afilename2 = sys.argv%5B5%5D%0Aht2 = khmer.new_hashbits(K, HT_SIZE, N_HT)%0An_unique = 0%0An_overlap = 0%0Afor n, record in enumerate(fasta_iter(open(filename2))):%0A sequence = record%5B'sequence'%5D%0A seq_len = len(sequence)%0A for n in range(0,seq_len+1-K):%0A kmer = sequence%5Bn:n+K%5D%0A if (not ht2.get(kmer)):%0A n_unique+=1%0A if (ht.get(kmer)):%0A n_overlap+=1%0A ht2.count(kmer)%0A %0Aprint filename2,'has been consumed.' %0Aprint '# of unique kmers:',n_unique%0Aprint '# of occupied bin:',ht2.n_occupied()%0A%0Aprint n_overlap,'unique kmers also appears in ',filename2%0A%0A%0A%0A%0A
4722c73643cbf9cbd63f05736a8469afc4c03443
test project: convert IPAddressField fields to GenericIPAddressField
test_django_admin_bootstrapped/test_django_admin_bootstrapped/models.py
test_django_admin_bootstrapped/test_django_admin_bootstrapped/models.py
from django.db import models class TestMe(models.Model): test_m2m = models.ManyToManyField('self', blank=True, help_text="Lorem dolor") test_ip = models.IPAddressField(help_text="Lorem dolor") test_url = models.URLField(help_text="Lorem dolor") test_int = models.IntegerField(help_text="Lorem dolor") test_img = models.ImageField(upload_to='dummy', blank=True) test_file = models.FileField(upload_to='dummy', blank=True) test_date = models.DateField(help_text="Lorem dolor") test_char = models.CharField(max_length=50, help_text="Lorem dolor") test_bool = models.BooleanField(help_text="Lorem dolor", default=False) test_time = models.TimeField(help_text="Lorem dolor") test_slug = models.SlugField(help_text="Lorem dolor") test_text = models.TextField(help_text="Lorem dolor") test_email = models.EmailField(help_text="Lorem dolor") test_float = models.FloatField(help_text="Lorem dolor") test_bigint = models.BigIntegerField(help_text="Lorem dolor") test_positive_integer = models.PositiveIntegerField(help_text="Lorem dolor") test_decimal = models.DecimalField(max_digits=5, decimal_places=2, help_text="Lorem dolor") test_comma_separated_int = models.CommaSeparatedIntegerField(max_length=100, help_text="Lorem dolor") test_small_int = models.SmallIntegerField(help_text="Lorem dolor") test_nullbool = models.NullBooleanField(help_text="Lorem dolor") test_filepath = models.FilePathField(blank=True, help_text="Lorem dolor") test_positive_small_int = models.PositiveSmallIntegerField(help_text="Lorem dolor") def get_absolute_url(self): return '' class Meta: verbose_name = u'Test me' verbose_name_plural = u'Lot of Test me' class TestMeProxyForFieldsets(TestMe): class Meta: proxy = True verbose_name = u'Test me fieldsets' verbose_name_plural = u'Lot of Test me fieldsets' class TestThat(models.Model): that = models.ForeignKey(TestMe, help_text="Lorem dolor") test_ip = models.IPAddressField(help_text="Lorem dolor") test_url = models.URLField(help_text="Lorem dolor") test_int = models.IntegerField(help_text="Lorem dolor") test_date = models.DateField(help_text="Lorem dolor") test_bool = models.BooleanField(help_text="Lorem dolor", default=True) class Meta: verbose_name = u'Test that' verbose_name_plural = u'Lot of Test that' class TestSortable(models.Model): that = models.ForeignKey(TestMe) position = models.PositiveSmallIntegerField("Position") test_char = models.CharField(max_length=5) class Meta: ordering = ('position', )
Python
0.000003
@@ -148,32 +148,39 @@ est_ip = models. +Generic IPAddressField(h @@ -2048,16 +2048,23 @@ models. +Generic IPAddres
63f91c2459cb98cf0cfb1e60d298944212d9d639
add missing file in symm
symm/addons.py
symm/addons.py
Python
0.000001
@@ -0,0 +1,1362 @@ +#%0A# Author: Qiming Sun %[email protected]%3E%0A#%0A%0Aimport numpy%0Aimport pyscf.lib.logger%0A%0Adef label_orb_symm(mol, irrep_name, symm_orb, mo):%0A nmo = mo.shape%5B1%5D%0A s = mol.intor_symmetric('cint1e_ovlp_sph')%0A mo_s = numpy.dot(mo.T, s)%0A orbsym = %5BNone%5D * nmo%0A for i,ir in enumerate(irrep_name):%0A moso = numpy.dot(mo_s, symm_orb%5Bi%5D)%0A for j in range(nmo):%0A if not numpy.allclose(moso%5Bj%5D, 0, atol=1e-6):%0A if orbsym%5Bj%5D is None:%0A orbsym%5Bj%5D = ir%0A else:%0A raise ValueError('orbital %25d not symmetrized' %25 j)%0A pyscf.lib.logger.debug(mol, 'irreps of each MO %25s', str(orbsym))%0A return orbsym%0A%0Adef symmetrize_orb(mol, irrep_name, symm_orb, mo):%0A s = mol.intor_symmetric('cint1e_ovlp_sph')%0A mo_s = numpy.dot(mo.T, s)%0A mo1 = 0%0A for csym in symm_orb:%0A ovlpso = reduce(numpy.dot, (csym.T, s, csym))%0A sc = numpy.linalg.solve(ovlpso, numpy.dot(mo_s, csym).T)%0A mo1 = mo1 + numpy.dot(csym, sc)%0A return mo1%0A%0Aif __name__ == %22__main__%22:%0A from pyscf import gto%0A from pyscf import scf%0A mol = gto.Mole()%0A mol.build(%0A atom = %5B%5B'H', (0,0,0)%5D, %5B'H', (0,0,1)%5D%5D,%0A basis = %7B'H': 'cc-pvdz'%7D,%0A symmetry = 1%0A )%0A mf = scf.RHF(mol)%0A mf.scf()%0A%0A print label_orb_symm(mol, mol.irrep_name, mol.symm_orb, mf.mo_coeff)%0A
4eaa92fc9b08af21193a71bb996d2b9644bcea09
Allow predicting matches 3 mins in the past
helpers/match_time_prediction_helper.py
helpers/match_time_prediction_helper.py
import datetime import time import pytz import numpy as np from helpers.match_manipulator import MatchManipulator class MatchTimePredictionHelper(object): EPOCH = datetime.datetime.fromtimestamp(0) @classmethod def as_local(cls, time, timezone): return pytz.utc.localize(time).astimezone(timezone) @classmethod def as_utc(cls, time): if time.utcoffset(): return (time - time.utcoffset()).replace(tzinfo=None) return time @classmethod def timestamp(cls, d): return time.mktime(d.timetuple()) @classmethod def compute_average_cycle_time(cls, played_matches, next_unplayed, timezone): """ Compute the average cycle time of the given matches, but only for the current day :param played_matches: The matches for this event that have been played :param next_unplayed: The next match to be played :param timezone: The timezone object, for computing local times :return: The average cycle time, in seconds, or None if not enough info """ cycles = [] # Sort matches by when they were actually played # This should account for out of order replays messing with the computations played_matches.sort(key=lambda x: x.actual_time) # Next match start time (in local time) next_match_start = cls.as_local(next_unplayed.time, timezone) # Find the first played match of the same day as the next match to be played start_of_day = None for i in range(0, len(played_matches)): scheduled_time = cls.as_local(played_matches[i].time, timezone) if scheduled_time.day == next_match_start.day: start_of_day = i break if start_of_day is None: return None # Compute cycle times for matches on this day for i in range(start_of_day + 1, len(played_matches)): cycle = cls.timestamp(played_matches[i].actual_time) - cls.timestamp(played_matches[i - 1].actual_time) # Discard (with 0 weight) outlier cycles that take too long (>150% of the schedule) # We want to bias our average to be low, so we don't "overshoot" our predictions # So we simply discard outliers instead of letting them skew the average # Additionally, discard matches with breaks (like lunch) in between. We find those # when we see a scheduled time between matches larger than 15 minutes scheduled_cycle = cls.timestamp(played_matches[i].time) - cls.timestamp(played_matches[i - 1].time) if scheduled_cycle < 15 * 60 and cycle <= scheduled_cycle * 1.5: # Bias the times towards the schedule cycle = (0.7 * cycle) + (0.3 * scheduled_cycle) cycles.append(cycle) return np.percentile(cycles, 30) if cycles else None @classmethod def predict_future_matches(cls, played_matches, unplayed_matches, timezone, is_live): """ Add match time predictions for future matches """ last_match = played_matches[-1] if played_matches else None next_match = unplayed_matches[0] if unplayed_matches else None if not next_match: # Nothing to predict return last_match_day = cls.as_local(last_match.time, timezone).day if last_match else None average_cycle_time = cls.compute_average_cycle_time(played_matches, next_match, timezone) last = last_match # Only predict up to 10 matches in the future on the same day for i in range(0, min(10, len(unplayed_matches))): match = unplayed_matches[i] scheduled_time = cls.as_local(match.time, timezone) if scheduled_time.day != last_match_day and last_match_day is not None: # Stop, once we exhaust all unplayed matches on this day break # For the first iteration, base the predictions off the newest known actual start time # Otherwise, use the predicted start time of the previously processed match last_predicted = None if last_match: last_predicted = cls.as_local(last_match.actual_time if i == 0 else last.predicted_time, timezone) if last_predicted and average_cycle_time: predicted = last_predicted + datetime.timedelta(seconds=average_cycle_time) else: predicted = match.time # Never predict a match to happen more than 2 minutes ahead of schedule or in the past # However, if the event is not live (we're running the job manually for a single event), # then allow predicted times to be in the past. now = datetime.datetime.now(timezone) if is_live else cls.as_local(cls.EPOCH, timezone) earliest_possible = cls.as_local(match.time + datetime.timedelta(minutes=-2), timezone) match.predicted_time = max(cls.as_utc(predicted), cls.as_utc(earliest_possible), cls.as_utc(now)) last = match MatchManipulator.createOrUpdate(unplayed_matches)
Python
0.000001
@@ -198,16 +198,90 @@ stamp(0) +%0A MAX_IN_PAST = datetime.timedelta(minutes=-3) # One match length, ish %0A%0A @c @@ -4895,16 +4895,34 @@ imezone) + + cls.MAX_IN_PAST if is_l
5339da4272baa6d423a69fd94adb15ede6c7ce26
Allow for missing /etc/adagios/adagios.conf
adagios/settings.py
adagios/settings.py
# Django settings for adagios project. DEBUG = True TEMPLATE_DEBUG = DEBUG # Hack to allow relative template paths import os from glob import glob djangopath = os.path.dirname(__file__) ADMINS = ( # ('Your Name', '[email protected]'), ) MANAGERS = ADMINS DATABASES = { 'default': { 'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'. 'NAME': '', # Or path to database file if using sqlite3. 'USER': '', # Not used with sqlite3. 'PASSWORD': '', # Not used with sqlite3. 'HOST': '', # Set to empty string for localhost. Not used with sqlite3. 'PORT': '', # Set to empty string for default. Not used with sqlite3. } } # Local time zone for this installation. Choices can be found here: # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # although not all choices may be available on all operating systems. # On Unix systems, a value of None will cause Django to use the same # timezone as the operating system. # If running in a Windows environment this must be set to the same as your # system time zone. TIME_ZONE = 'Atlantic/Reykjavik' # Language code for this installation. All choices can be found here: # http://www.i18nguy.com/unicode/language-identifiers.html LANGUAGE_CODE = 'en-us' SITE_ID = 1 # If you set this to False, Django will make some optimizations so as not # to load the internationalization machinery. USE_I18N = True # If you set this to False, Django will not format dates, numbers and # calendars according to the current locale USE_L10N = True # Absolute filesystem path to the directory that will hold user-uploaded files. # Example: "/home/media/media.lawrence.com/" MEDIA_ROOT = "%s/media/" % (djangopath) # URL that handles the media served from MEDIA_ROOT. Make sure to use a # trailing slash if there is a path component (optional in other cases). # Examples: "http://media.lawrence.com", "http://example.com/media/" MEDIA_URL = 'media/' # URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a # trailing slash. # Examples: "http://foo.com/media/", "/media/". #ADMIN_MEDIA_PREFIX = '/media/' # Make this unique, and don't share it with anybody. SECRET_KEY = 'kq_4#kcvpb3oen80nsu&xb1+4)ep33u1l37x37y9_k-^aic5s6' # List of callables that know how to import templates from various sources. TEMPLATE_LOADERS = ( 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', # 'django.template.loaders.eggs.Loader', ) MIDDLEWARE_CLASSES = ( 'django.middleware.common.CommonMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', ) ROOT_URLCONF = 'adagios.urls' TEMPLATE_DIRS = ( # Put strings here, like "/home/html/django_templates" or "C:/www/django/templates". # Always use forward slashes, even on Windows. # Don't forget to use absolute paths, not relative paths. "%s/templates" % (djangopath), ) INSTALLED_APPS = [ 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.sites', 'django.contrib.messages', # Uncomment the next line to enable the admin: # 'django.contrib.admin', # Uncomment the next line to enable admin documentation: # 'django.contrib.admindocs', # 'adagios.okconfig_', 'adagios.objectbrowser', 'adagios.rest', 'adagios.misc', ] #INSTALLED_APPS.append( 'adagios.okconfig_plugin' ) TEMPLATE_CONTEXT_PROCESSORS = ('adagios.context_processors.on_page_load', "django.contrib.auth.context_processors.auth", "django.core.context_processors.debug", "django.core.context_processors.i18n", "django.core.context_processors.media", "django.core.context_processors.static", "django.contrib.messages.context_processors.messages") # Adagios specific configuration options. These are just the defaults, # Anything put in /etc/adagios.d/adagios.conf will overwrite this. nagios_config="/etc/nagios/nagios.cfg" nagios_url="/nagios" nagios_init_script = "/etc/init.d/nagios" nagios_binary = "/usr/bin/nagios" enable_githandler=False enable_loghandler = False warn_if_selinux_is_active = True include="" plugins = {} # Load config files from /etc/adagios/ adagios_configfile = "/etc/adagios/adagios.conf" execfile(adagios_configfile) # if config has any default include, lets include that as well configfiles = glob(include) for configfile in configfiles: execfile(configfile) for k,v in plugins.items(): INSTALLED_APPS.append( v )
Python
0.000001
@@ -141,16 +141,42 @@ ort glob +%0Afrom warnings import warn %0A%0Adjango @@ -1256,16 +1256,18 @@ e zone.%0A +# TIME_ZON @@ -4606,16 +4606,25 @@ s.conf%22%0A +try:%0A execfile @@ -4644,16 +4644,329 @@ igfile)%0A +except IOError, e:%0A # Only raise on errors other than file not found (missing config is OK)%0A if e.errno != 2:%0A raise Exception('Unable to open %25s: %25s' %25 (adagios_configfile, e.strerror))%0A # Warn on missing configs%0A else:%0A warn('Unable to open %25s: %25s' %25 (adagios_configfile, e.strerror))%0A %0A%0A# if c
005872ea37dfdd4b8ab8b16e3c5b0083fb86cdb9
Add settings file
scripts/settings.py
scripts/settings.py
Python
0.000001
@@ -0,0 +1,2097 @@ +#!/usr/bin/env python%0A%0A%0A#===============================================================================%0A# GLOBAL CONSTANTS%0A#===============================================================================%0A%0A# --- Set up GPIO referencing----%0Abroadcom_ref = True%0A%0Aif broadcom_ref:%0A PIN_11 = 17%0A PIN_12 = 18%0A PIN_13 = 27%0A PIN_15 = 22%0A PIN_37 = 26%0A PIN_38 = 20%0A PIN_40 = 21%0Aelse: %0A PIN_11 = 11%0A PIN_12 = 12%0A PIN_13 = 13%0A PIN_15 = 15%0A PIN_37 = 37%0A PIN_38 = 38%0A PIN_40 = 40%0A%0A%0A# --- System set up ---%0AUPDATE_RATE = 300 # seconds%0AW1_DEVICE_PATH = '/sys/bus/w1/devices/'%0ADEBOUNCE_MICROS = 0.250 #seconds%0ASYS_FOLDER = '/home/pi/weather'%0ADATA_FOLDER = '/data/'%0ATICK_DATA = 'tick_count'%0A%0A# --- RRDTool set up ---%0ARRDTOOL_RRD_FILE = 'weather_data.rrd'%0ARRDTOOL_HEARTBEAT = 2 # multiplier%0A%0A# XML filename: Consolidation type, Resolution (minutes), Recording Period (days) %0ARRDTOOL_RRA = %7B'wd_last_1d.xml': ('LAST', 5, 1.17), %0A 'wd_avg_2d.xml': ('AVERAGE', 30, 2), %0A 'wd_avg_1w.xml': ('AVERAGE', 120, 7), %0A 'wd_avg_1m.xml': ('AVERAGE', 240, 31), %0A 'wd_avg_3m.xml': ('AVERAGE', 720, 93), %0A 'wd_avg_1y.xml': ('AVERAGE', 1440, 365), %0A 'wd_min_1y.xml': ('MIN', 1440, 365), %0A 'wd_max_1y.xml': ('MAX', 1440, 365)%7D%0A %0A%0ASENSOR_SET= %7B 'inside_temp': (True, PIN_37, '*C', -50, 100, 'GAUGE'),%0A 'inside_hum': (True, PIN_37, '%25', -1, 101, 'GAUGE'),%0A 'door_open': (True, PIN_40, '', -1, 2, 'GAUGE'),%0A 'precip_rate': (True, PIN_38, 'mm', -5, 50, 'GAUGE'),%0A 'precip_acc': (True, PIN_38, 'mm', -5, 500, 'GAUGE'),%0A 'outside_temp': (True, '28-0414705bceff',%0A '*C', -50, 50, 'GAUGE'),%0A 'sw_status': (True, '', '', -1, 2, 'GAUGE'),%0A 'sw_power': (True, '', 'W', -9999, 9999, 'GAUGE')%7D%0A
746dd90a17d756f5601ddcbbd6c2de6fed9c75d5
add splitter script
scripts/splitter.py
scripts/splitter.py
Python
0.000001
@@ -0,0 +1,612 @@ +import sys%0Aimport os%0Aimport json%0Aimport pdb%0A%0A%0Acontent = %22%22%0Afor line in sys.stdin:%0A content += line%0A%0Adata = json.loads(content)%0A%0Aprint('ok')%0A%0A%0Afor item in data:%0A filename = %22items_data/%7B0%7D.json%22.format(item%5B'_key'%5D)%0A print(%22creating %22.format(filename))%0A if not os.path.exists(os.path.dirname(filename)):%0A try:%0A os.makedirs(os.path.dirname(filename))%0A except OSError as exc: # Guard against race condition%0A if exc.errno != errno.EEXIST:%0A raise%0A%0A with open(filename, 'w') as file_:%0A file_.write(json.dumps(item, indent=4))%0A%0A%0Aprint(len(data))%0A
7a49dfb41888b6afed4ff3dca3987f641e497056
Add PageHandler
handler/page.py
handler/page.py
Python
0
@@ -0,0 +1,452 @@ +#!/usr/bin/python%0A# -*- coding:utf-8 -*-%0A# Powered By KK Studio%0A%0Afrom BaseHandler import BaseHandler%0A%0A# 404 Page%0Aclass Page404Handler(BaseHandler):%0A def get(self):%0A self.render('page/404.html', title=%22404%22)%0A%0A# 500 Page%0Aclass Page500Handler(BaseHandler):%0A def get(self):%0A self.render('page/500.html', title=%22500%22)%0A%0A# Blank Page%0Aclass BlankHandler(BaseHandler):%0A def get(self):%0A self.render('page/blank.html', title=%22Blank%22)
d3248cebcb1ef161dfc706d99b4d361205fc9fbe
Add wsgi file
t10server.wsgi
t10server.wsgi
Python
0.000001
@@ -0,0 +1,45 @@ +from teeminus10_api import app as application
2866c8fbb3549ffd2405c5b13338a3fdf87a6c5d
add checks
dog/checks.py
dog/checks.py
Python
0.000001
@@ -0,0 +1,155 @@ +from discord.ext import commands%0A%0Aowner_id = '97104885337575424'%0A%0Adef is_owner():%0A return commands.check(lambda ctx: ctx.message.author.id == owner_id)%0A
482859488865fe9b1e05a923e7aafeb7e090f049
Create volumeBars.py
python/volumeBars.py
python/volumeBars.py
Python
0.000001
@@ -0,0 +1,434 @@ +#!/usr/bin/env python%0Afrom rgbmatrix import RGBMatrix%0Afrom random import randint%0Aimport time%0A%0Arows = 16%0Achains = 1%0Aparallel = 1%0AledMatrix = RGBMatrix(rows, chains, parallel)%0Aheight = ledMatrix.height%0Awidth = ledMatrix.width%0AnextFrame = ledMatrix.CreateFrameCanvas()%0A%0Awhile True:%0A%09nextFrame.SetPixel(randint(0, width), randint(0, height), randint(0, 255), randint(0, 255), randint(0, 255))%0A%09nextFrame = ledMatrix.swapOnVSync(nextFrame)
15839dd4b37761e49599f6b278f6bd6e6d18b1e5
Add initial rpc implementation example
examples/mailbox/rpc.py
examples/mailbox/rpc.py
Python
0
@@ -0,0 +1,1139 @@ +import sys%0Asys.path.append('.') # NOQA%0A%0Afrom xwing.mailbox import spawn, run, stop%0A%0A%0Aclass Server(object):%0A%0A def hello_world(self):%0A return 'Hello World!'%0A%0A def run(self):%0A async def rpc_server(mailbox, server):%0A while True:%0A function, pid = await mailbox.recv()%0A print('Got call from: ', pid)%0A%0A result = getattr(server, function)()%0A await mailbox.send(pid, result)%0A%0A spawn(rpc_server, self, name='rpc_server')%0A%0A%0Aclass Client(object):%0A%0A def __init__(self, server_pid):%0A self.server_pid = server_pid%0A%0A def call(self, function):%0A async def dispatch(mailbox, function):%0A await mailbox.send(self.server_pid, function, mailbox.pid)%0A result = await mailbox.recv()%0A print(result)%0A%0A spawn(dispatch, function)%0A%0A%0Aif __name__ == '__main__':%0A # python examples/mailbox/rpc.py%0A server = Server()%0A server.run()%0A%0A client = Client('[email protected]')%0A client.call('hello_world')%0A%0A try:%0A run()%0A except KeyboardInterrupt:%0A print('Stopping...')%0A stop()%0A
f09bddb89681fdd03ac190a1caa4847b3da7a61f
add script for reinserting unparsed sentences into the parser output
src/corex/save_unparsables.py
src/corex/save_unparsables.py
Python
0.000003
@@ -0,0 +1,1259 @@ +#!/usr/bin/python%0A%0A# This script takes the input file (one-sentence-per-line) for the %0A# the Berkeleyparser (topological fields model) and compares it to the%0A# parser's output file. Sentences missing in the parser output (unparsables)%0A# in the output are inserted from the parsers input file, one-sentence-per-line)%0A# %0A %0Aimport sys%0Aimport codecs%0Aimport re%0A%0Aoriginal = codecs.open(sys.argv%5B1%5D, 'r', 'utf-8')%0Aparsed = codecs.open(sys.argv%5B2%5D, 'r', 'utf-8')%0Apos_and_token = re.compile('%5C((%5B%5E %5D+ (?:%5B%5E )%5D+%7C%5C)))%5C)')%0A%0A%0A%0A# This takes a line of the Berkeley topological parser's%0A# output, returns a string of tokens separated by whitespace%0A%0Adef get_tokens(line):%0A pt = pos_and_token.findall(line)%0A if len(pt) %3E 0:%0A %09pt = %5Bi.split(%22 %22) for i in pt%5D%0A %09t = %5Bi%5B1%5D for i in pt%5D%0A%09s = %22 %22.join(t)%0A else:%0A%09s = ''%0A return(s)%0A%0A%0A%0A%0Afor oline in original:%0A%09oline = oline.strip()%09%0A%09pline = parsed.readline().strip()%0A%09pline_tokens = get_tokens(pline)%0A%09if oline == pline_tokens:%0A%09%09print(pline.encode('utf-8'))%0A%09else:%09%0A%09%09print(oline.encode('utf-8'))%0A%09%09if not pline_tokens ==%22%22:%0A%09%09%09for ooline in original:%0A%09%09%09%09ooline = ooline.strip()%0A%09%09%09%09if not ooline == pline_tokens:%0A%09%09%09%09%09print(ooline.encode('utf-8'))%0A%09%09%09%09else:%0A%09%09%09%09%09print(pline.encode('utf-8'))%0A%09%09%09%09%09break%0A%09%09%0A%09%0A%0A
7d52d1efaf5bb07bfbb66e78f7c51e92b6c531dd
Use BytesIO. Closes #17
ajaximage/image.py
ajaximage/image.py
import os from PIL import Image, ImageOps try: from StringIO import StringIO except ImportError: from io import StringIO from django.core.files.base import ContentFile from django.core.files.uploadedfile import SimpleUploadedFile def resize(file_, max_width=0, max_height=0, crop=0): max_width = int(max_width) max_height = int(max_height) crop = int(crop) if(max_width is 0 and max_height is 0): return file_ max_width = 9999 if max_width is 0 else max_width max_height = 9999 if max_height is 0 else max_height size = (max_width, max_height) image = Image.open(file_) if(image.mode == 'RGBA'): image.load() background = Image.new('RGB', image.size, (255, 255, 255)) background.paste(image, mask=image.split()[3]) image = background temp = StringIO() if(crop is 1): image = ImageOps.fit(image, size, Image.ANTIALIAS) else: image.thumbnail(size, Image.ANTIALIAS) image.save(temp, 'jpeg') temp.seek(0) return SimpleUploadedFile(file_.name, temp.read(), content_type='image/jpeg')
Python
0
@@ -73,16 +73,22 @@ StringIO + as IO %0Aexcept @@ -119,22 +119,27 @@ import -String +BytesIO as IO%0A%0Afrom @@ -841,22 +841,16 @@ temp = -String IO()%0A%0A
5f9c6e49597abe07a74cd2e7370216bd0fc57cd4
add topology
scripts/topology.py
scripts/topology.py
Python
0.000018
@@ -0,0 +1,1592 @@ +#!/usr/bin/python%0A%0Afrom mininet.net import Mininet%0Afrom mininet.node import Controller, OVSSwitch%0Afrom mininet.cli import CLI%0Afrom mininet.log import setLogLevel%0Aimport sys%0A%0Adef multiControllerNet( number ):%0A %22Create a network from semi-scratch with multiple controllers.%22%0A%0A net = Mininet( controller=Controller, switch=OVSSwitch, build=False )%0A%0A print %22*** Creating (reference) controllers%22%0A c0 = net.addController( 'c0' , port=(7700))%0A%0A s_count = int(number)%0A h_count = s_count * 2%0A%0A# sys.exit(%22END%22);%0A%0A hosts = %5B0%5D * h_count%0A switches = %5B0%5D * s_count%0A%0A for i in range(h_count):%0A hosts%5Bi%5D = net.addHost('h' + str(i))%0A %0A for i in range(s_count):%0A switches%5Bi%5D = net.addSwitch('s' + str(i))%0A%0A print %22*** Creating links between hosts and #switch%22%0A for i in range(s_count):%0A net.addLink( switches%5Bi%5D,hosts%5Bi * 2%5D )%0A net.addLink( switches%5Bi%5D,hosts%5Bi * 2 + 1%5D )%0A%0A print %22*** Creating links between switches%22%0A for i in range(s_count-1):%0A net.addLink( switches%5Bi%5D,switches%5Bi+1%5D )%0A%0A %0A print %22*** Starting network%22%0A net.build()%0A%0A c0.start()%0A %0A for i in range(s_count):%0A switches%5Bi%5D.start( %5Bc0%5D )%0A %0A%0A %0A print %22*** Testing network%22%0A# net.pingAll()%0A%0A print %22*** Running CLI%22%0A CLI( net )%0A%0A print %22*** Stopping network%22%0A net.stop()%0A%0Aif __name__ == '__main__':%0A setLogLevel( 'info' ) # for CLI output%0A%0A if len(sys.argv) %3C 3:%0A print %22Usage: sudo ./topo1.py -s %5Bswitch number%5D%5Cn%22%0A sys.exit(1)%0A elif sys.argv%5B1%5D == %22-s%22:%0A multiControllerNet(sys.argv%5B2%5D)%0A
4ff6b846311a0f7bd6cfcf2e661a7c53061406fe
Add command to print vault info
glaciercmd/command_vault_info.py
glaciercmd/command_vault_info.py
Python
0.000001
@@ -0,0 +1,779 @@ +import boto%0A%0Aclass CommandVaultInfo(object):%0A%0A def execute(self, args, config):%0A glacier_connection = boto.connect_glacier(aws_access_key_id=config.get('configuration', 'aws_key'), aws_secret_access_key=config.get('configuration', 'aws_secret'))%0A%0A try:%0A vault = glacier_connection.get_vault(args%5B2%5D)%0A print %22Vault info:%5Cn%5Ctname=%7B%7D%5Cn%5Ctarn=%7B%7D%5Cn%5Ctcreation_date=%7B%7D%5Cn%5Ctlast_inventory_date=%7B%7D%5Cn%5Ctsize=%7B%7D%5Cn%5Ctnumber_of_archives=%7B%7D%22.format(vault.name, vault.arn, vault.creation_date, vault.last_inventory_date, vault.size, vault.number_of_archives)%0A except:%0A print %22Vaule named '%7B%7D' does not exist.%22.format(args%5B2%5D)%0A%0A def accept(self, args):%0A return len(args) %3E= 3 and args%5B0%5D == 'vault' and args%5B1%5D == 'info'%0A%0Adef command_init():%0A return CommandVaultInfo()%0A
d3f152ffa1d6109ded2cf85c5f8312ee4a26ec92
version devel
src/robotide/version.py
src/robotide/version.py
# Automatically generated by `pavement.py`. VERSION = '1.4'
Python
0.000001
@@ -52,11 +52,22 @@ = ' -1.4 +devel-20150627 '%0A
adcbdc06f0c476bc4c24e8c69d06cffbb6726a9f
Add migration
ovp_organizations/migrations/0023_auto_20170712_1704.py
ovp_organizations/migrations/0023_auto_20170712_1704.py
Python
0.000002
@@ -0,0 +1,611 @@ +# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.5 on 2017-07-12 17:04%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0Aimport django.db.models.deletion%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('ovp_organizations', '0022_auto_20170613_1424'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='organization',%0A name='address',%0A field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='ovp_core.SimpleAddress', verbose_name='address'),%0A ),%0A %5D%0A
582ebd448508625ed2c9f362aaafc3fc46e60df0
Add unit tests for security_scan
functest/tests/unit/features/test_security_scan.py
functest/tests/unit/features/test_security_scan.py
Python
0
@@ -0,0 +1,1289 @@ +#!/usr/bin/env python%0A%0A# Copyright (c) 2017 Orange and others.%0A#%0A# All rights reserved. This program and the accompanying materials%0A# are made available under the terms of the Apache License, Version 2.0%0A# which accompanies this distribution, and is available at%0A# http://www.apache.org/licenses/LICENSE-2.0%0A%0A# pylint: disable=missing-docstring%0A%0Aimport logging%0Aimport unittest%0A%0Afrom functest.opnfv_tests.features import security_scan%0Afrom functest.utils import constants%0A%0A%0Aclass SecurityScanTesting(unittest.TestCase):%0A%0A logging.disable(logging.CRITICAL)%0A%0A def setUp(self):%0A self.sscan = security_scan.SecurityScan()%0A%0A def test_init(self):%0A self.assertEqual(self.sscan.project_name, %22securityscanning%22)%0A self.assertEqual(self.sscan.case_name, %22security_scan%22)%0A self.assertEqual(%0A self.sscan.repo,%0A constants.CONST.__getattribute__(%22dir_repo_securityscan%22))%0A self.assertEqual(%0A self.sscan.cmd, (%0A '. %7B0%7D/stackrc && cd %7B1%7D && '%0A 'python security_scan.py --config config.ini && '%0A 'cd -'.format(%0A constants.CONST.__getattribute__(%22dir_functest_conf%22),%0A self.sscan.repo)))%0A%0A%0Aif __name__ == %22__main__%22:%0A unittest.main(verbosity=2)%0A
25f5ff62e1652e3293d12e3e73e44e7d7c21463c
upgrade incs
bin/upgrade_fortran_inc.py
bin/upgrade_fortran_inc.py
Python
0.000001
@@ -0,0 +1,1537 @@ +#!/usr/bin/env python3%0A# -*- coding: utf8 -*-%0A%0A# py f:%5Cdev%5Cprogs%5Cbin%5Cclean_fortran.py ricks.f90%0A%0A# f:%5Cf90ppr%5Cmoware%5Cf90ppr.exe %3C tmp.f90 %3E out.txt%0A%0Aimport sys, os, subprocess, shutil%0Asys.path.append(r'C:%5Cmsys64%5Cmingw64%5Cbin')%0A%0Af90ppr_exe = r%22F:%5Cf90ppr%5Cmoware%5Cf90ppr%22%0A%0Adef main(fname):%0A%0A # tmpname = 'tmp.f90'%0A if not os.path.isfile(fname):%0A raise Exception(f'%7Bfname%7D not found!')%0A%0A base, ext = os.path.splitext(fname)%0A%0A outname = base+'.ppr'+ext%0A%0A outfile = open(outname,'wb')%0A cmd = %5B f90ppr_exe %5D%0A p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=outfile)%0A # maximum line length (2-132)%0A p.stdin.write(b'$define FPPR_MAX_LINE 132%5Cn')%0A # keywords case: FPPR_LEAVE, FPPR_UPPER, FPPR_LOWER%0A p.stdin.write(b'$define FPPR_KWD_CASE FPPR_LOWER%5Cn')%0A # variables case: FPPR_LEAVE, FPPR_UPPER, FPPR_LOWER%0A p.stdin.write(b'$define FPPR_USR_CASE FPPR_LEAVE%5Cn')%0A # indentation (0-60)%0A p.stdin.write(b'$define FPPR_STP_INDENT 4%5Cn')%0A # input format: 0=free format%0A p.stdin.write(b'$define FPPR_FXD_IN 1%5Cn')%0A # output format: 0=free format%0A p.stdin.write(b'$define FPPR_FXD_OUT 0%5Cn')%0A with open(fname,'rb') as infile:%0A for l in infile.readlines():%0A p.stdin.write(l)%0A p.stdin.close()%0A retcode = p.wait()%0A print(f'retcode=%7Bretcode%7D')%0A outfile.close()%0A%0A # overwrite file%0A shutil.copy(outname, fname)%0A # remove temporary%0A if os.path.isfile(outname):%0A os.remove(outname)%0A%0A%0Aif __name__==%22__main__%22:%0A%0A f = sys.argv%5B1%5D%0A main(f)%0A
0fe4a3c3a1d31230c9b5c931ff1e33584f1ccd4e
Create maximum-length-of-pair-chain.py
Python/maximum-length-of-pair-chain.py
Python/maximum-length-of-pair-chain.py
Python
0.998344
@@ -0,0 +1,971 @@ +# Time: O(nlogn)%0A# Space: O(1)%0A%0A# You are given n pairs of numbers.%0A# In every pair, the first number is always smaller than the second number.%0A#%0A# Now, we define a pair (c, d) can follow another pair (a, b)%0A# if and only if b %3C c. Chain of pairs can be formed in this fashion.%0A#%0A# Given a set of pairs, find the length longest chain which can be formed.%0A# You needn't use up all the given pairs. You can select pairs in any order.%0A#%0A# Example 1:%0A# Input: %5B%5B1,2%5D, %5B2,3%5D, %5B3,4%5D%5D%0A# Output: 2%0A# Explanation: The longest chain is %5B1,2%5D -%3E %5B3,4%5D%0A# Note:%0A# The number of given pairs will be in the range %5B1, 1000%5D.%0A%0Aclass Solution(object):%0A def findLongestChain(self, pairs):%0A %22%22%22%0A :type pairs: List%5BList%5Bint%5D%5D%0A :rtype: int%0A %22%22%22%0A pairs.sort(key=lambda x: x%5B1%5D)%0A cnt, i = 0, 0%0A for j in xrange(len(pairs)):%0A if j == 0 or pairs%5Bi%5D%5B1%5D %3C pairs%5Bj%5D%5B0%5D:%0A cnt += 1%0A i = j%0A return cnt%0A
4f586f16eaf3e06d347bf9976a02005c70cd7e13
Create installTests.py
installTests.py
installTests.py
Python
0.000001
@@ -0,0 +1,168 @@ +import unittest%0Aimport install%0A%0A%0Aclass TestOperationWrapperMethods(unittest.TestCase):%0A%0A def setUp(self):%0A # TODO: Write Tests%0A self.test_dataset = %22%22%0A
95182581beebbd181b20b23ee02657cb18347dd6
update spec: update read_spectrum.py: add read_spectrum for elodie
bopy/spec/read_spectrum.py
bopy/spec/read_spectrum.py
Python
0
@@ -0,0 +1,1353 @@ +# -*- coding: utf-8 -*-%0A%22%22%22%0A%0AAuthor%0A------%0ABo Zhang%0A%0AEmail%0A-----%[email protected]%0A%0ACreated on%0A----------%0A- Tue Mar 8 15:26:00 2016 read_spectrum%0A%0AModifications%0A-------------%0A-%0A%0AAims%0A----%0A- read various kinds of spectra%0A%0A%22%22%22%0A%0Aimport os%0Aimport numpy as np%0Afrom astropy.io import fits%0Afrom .spec import Spec%0A%0A%0Adef reconstruct_wcs_coord_from_fits_header(hdr, dim=1):%0A %22%22%22 reconstruct wcs coordinates (e.g., wavelenght array) %22%22%22%0A # assert dim is not larger than limit%0A assert dim %3C= hdr%5B'NAXIS'%5D%0A%0A # get keywords%0A crval = hdr%5B'CRVAL%25d' %25 dim%5D%0A cdelt = hdr%5B'CDELT%25d' %25 dim%5D%0A crpix = hdr%5B'CRPIX%25d' %25 dim%5D%0A naxis = hdr%5B'NAXIS%25d' %25 dim%5D%0A%0A # reconstruct wcs coordinates%0A coord = np.arange(1 - crpix, naxis + 1 - crpix) * cdelt + crval%0A return coord%0A%0A%0Adef read_spectrum_elodie_r42000(fp):%0A %22%22%22 read spectrum from ELODIE library (R42000) %22%22%22%0A # assert the file exists%0A assert os.path.exists(fp)%0A%0A # read fits%0A hl = fits.open(fp)%0A%0A # reconstruct wave array%0A wave = reconstruct_wcs_coord_from_fits_header(hl%5B0%5D.header, dim=1)%0A # flux%0A flux = hl%5B0%5D.data%0A # flux err%0A flux_err = hl%5B2%5D.data%0A # flux ivar%0A flux_ivar = 1 / flux_err ** 2.%0A%0A # reconstruct spec%0A sp = Spec(data=%5Bwave, flux, flux_ivar, flux_err%5D,%0A names=%5B'wave', 'flux', 'flux_ivar', 'flux_err'%5D)%0A return sp%0A
39bb6cd51ce5351bfd93adac7b083a52b25590f8
Create 6kyu_vending_machine.py
Solutions/6kyu/6kyu_vending_machine.py
Solutions/6kyu/6kyu_vending_machine.py
Python
0.000057
@@ -0,0 +1,1198 @@ +class VendingMachine():%0A%0A def __init__(self, items, money):%0A self.items = dict(enumerate(items))%0A self.money = money%0A%0A def vend(self, selection, item_money):%0A try:%0A n,v = %5B(n,self.items%5Bn%5D) for n in self.items %0A if (self.items%5Bn%5D%5B'code'%5D).lower() == selection.lower()%5D%5B0%5D %0A except:%0A return %22Invalid selection! : Money in vending machine = %7B:.2f%7D%22.format(self.money)%0A %0A if item_money %3C v%5B'price'%5D:%0A %0A return %22Not enough money!%22%0A %0A if v%5B'quantity'%5D %3C= 0:%0A %0A return %22%7B%7D: Out of stock!%22.format(v%5B'name'%5D)%0A %0A if item_money %3E v%5B'price'%5D:%0A %0A change = item_money - v%5B'price'%5D%0A v%5B'quantity'%5D = v%5B'quantity'%5D - 1%0A self.money += (-change + item_money)%0A self.items%5Bn%5D = v%0A %0A return %22Vending %7B%7D with %7B:.2f%7D change.%22.format(v%5B'name'%5D, change)%0A %0A else:%0A %0A v%5B'quantity'%5D = v%5B'quantity'%5D - 1%0A self.money += item_money%0A self.items%5Bn%5D = v%0A %0A return %22Vending %7B%7D%22.format(v%5B'name'%5D)%0A
9d88196f37757f26dde89bdb5430d76cc4b96fd3
Fix for Python3.4 build, requiring a __hash__ to work.
sknn/dataset.py
sknn/dataset.py
# -*- coding: utf-8 -*- from __future__ import (absolute_import, unicode_literals, print_function) from pylearn2.space import Space, CompositeSpace, VectorSpace from pylearn2.utils import safe_zip from pylearn2.datasets.dataset import Dataset from pylearn2.utils.iteration import (FiniteDatasetIterator, resolve_iterator_class) import functools import theano class FastVectorSpace(VectorSpace): """ More efficient version of the VectorSpace input that doesn't do any validation. This is used to speed up training times by default; when your data needs debugging, specify the ``debug=True`` flag in your MLP. """ @functools.wraps(VectorSpace._validate) def _validate(self, is_numeric, batch): pass def __eq__(self, other): return (type(other) in (FastVectorSpace, VectorSpace) and self.dim == other.dim and self.sparse == other.sparse and self.dtype == other.dtype) class SparseDesignMatrix(Dataset): """ SparseDesignMatrix is a type of Dataset used in training by PyLearn2 that takes a numpy/scipy sparse matrix and calls ``.todense()`` as the batches are passed out of the iterator. This is used internally by :class:`sknn.mlp.MultiLayerPerceptron` and transparently based on the data that's passed to the function ``fit()``. """ def __init__(self, X, y): self.X = X self.y = y self.data_n_rows = self.X.shape[0] self.num_examples = self.data_n_rows self.fancy = False self.stochastic = False X_space = VectorSpace(dim=self.X.shape[1]) X_source = 'features' dim = self.y.shape[-1] if self.y.ndim > 1 else 1 y_space = VectorSpace(dim=dim) y_source = 'targets' space = CompositeSpace((X_space, y_space)) source = (X_source, y_source) self.data_specs = (space, source) self.X_space = X_space def get_num_examples(self): return self.num_examples def get_data_specs(self): """ Returns the data_specs specifying how the data is internally stored. This is the format the data returned by `self.get_data()` will be. """ return self.data_specs def get_data(self): """ Returns ------- data : numpy matrix or 2-tuple of matrices Returns all the data, as it is internally stored. The definition and format of these data are described in `self.get_data_specs()`. """ return (self.X, self.y) @functools.wraps(Dataset.iterator) def iterator(self, mode=None, batch_size=None, num_batches=None, rng=None, data_specs=None, return_tuple=False): """ Method inherited from `pylearn2.datasets.dataset.Dataset`. """ self.mode = mode self.batch_size = batch_size self._return_tuple = return_tuple # TODO: If there is a view_converter, we have to use it to convert # the stored data for "features" into one that the iterator can return. space, source = data_specs or (self.X_space, 'features') assert isinstance(space, CompositeSpace),\ "Unexpected input space for the data." sub_spaces = space.components sub_sources = source conv_fn = lambda x: x.todense().astype(theano.config.floatX) convert = [] for sp, src in safe_zip(sub_spaces, sub_sources): convert.append(conv_fn if src in ('features', 'targets') else None) assert mode is not None,\ "Iteration mode not provided for %s" % str(self) mode = resolve_iterator_class(mode) subset_iterator = mode(self.X.shape[0], batch_size, num_batches, rng) return FiniteDatasetIterator(self, subset_iterator, data_specs=data_specs, return_tuple=return_tuple, convert=convert)
Python
0
@@ -730,42 +730,254 @@ -pass%0A%0A def __eq__(self, other): +%22%22%22%0A Short-circuit the entire validation if the user has specified it's not necessary.%0A %22%22%22%0A pass%0A%0A def __eq__(self, other):%0A %22%22%22%0A Equality should work between Fast and slow VectorSpace instances.%0A %22%22%22 %0A @@ -1161,16 +1161,184 @@ dtype)%0A%0A + def __hash__(self):%0A %22%22%22%0A Override necessary for Python 3.x.%0A %22%22%22%0A return hash((type(VectorSpace), self.dim, self.sparse, self.dtype))%0A%0A %0Aclass S
5e96dd2846660f14e1d7b691ba928da63b699f19
Add support for Spotify
services/spotify.py
services/spotify.py
Python
0
@@ -0,0 +1,1055 @@ +from oauthlib.common import add_params_to_uri%0Aimport foauth.providers%0A%0A%0Aclass Spotify(foauth.providers.OAuth2):%0A # General info about the provider%0A provider_url = 'https://spotify.com/'%0A docs_url = 'https://developer.spotify.com/web-api/endpoint-reference/'%0A category = 'Music'%0A%0A # URLs to interact with the API%0A authorize_url = 'https://accounts.spotify.com/authorize'%0A access_token_url = 'https://accounts.spotify.com/api/token'%0A api_domain = 'api.spotify.com'%0A%0A available_permissions = %5B%0A (None, 'Read your publicly available information'),%0A ('playlist-modify', 'Manage your public playlists'),%0A ('playlist-modify-private', 'Manage all your playlists (even private)'),%0A ('playlist-read-private', 'Access your private playlists'),%0A ('user-read-private', 'Access your name, image and subscription details'),%0A ('user-read-email', 'Get your real email address'),%0A %5D%0A%0A def get_user_id(self, key):%0A r = self.api(key, self.api_domain, u'/v1/me')%0A return r.json()%5Bu'id'%5D%0A
062c4bc134f77f9279d18774b954a06566f99c5a
Add logger
src/acquisition/covidcast/logger.py
src/acquisition/covidcast/logger.py
Python
0.00002
@@ -0,0 +1,3157 @@ +%22%22%22Structured logger utility for creating JSON logs in Delphi pipelines.%22%22%22%0Aimport logging%0Aimport sys%0Aimport threading%0Aimport structlog%0A%0A%0Adef handle_exceptions(logger):%0A %22%22%22Handle exceptions using the provided logger.%22%22%22%0A def exception_handler(etype, value, traceback):%0A logger.exception(%22Top-level exception occurred%22,%0A exc_info=(etype, value, traceback))%0A%0A def multithread_exception_handler(args):%0A exception_handler(args.exc_type, args.exc_value, args.exc_traceback)%0A%0A sys.excepthook = exception_handler%0A threading.excepthook = multithread_exception_handler%0A%0A%0Adef get_structured_logger(name=__name__,%0A filename=None,%0A log_exceptions=True):%0A %22%22%22Create a new structlog logger.%0A%0A Use the logger returned from this in indicator code using the standard%0A wrapper calls, e.g.:%0A%0A logger = get_structured_logger(__name__)%0A logger.warning(%22Error%22, type=%22Signal too low%22).%0A%0A The output will be rendered as JSON which can easily be consumed by logs%0A processors.%0A%0A See the structlog documentation for details.%0A%0A Parameters%0A ---------%0A name: Name to use for logger (included in log lines), __name__ from caller%0A is a good choice.%0A filename: An (optional) file to write log output.%0A %22%22%22%0A # Configure the underlying logging configuration%0A handlers = %5Blogging.StreamHandler()%5D%0A if filename:%0A handlers.append(logging.FileHandler(filename))%0A%0A logging.basicConfig(%0A format=%22%25(message)s%22,%0A level=logging.INFO,%0A handlers=handlers%0A )%0A%0A # Configure structlog. This uses many of the standard suggestions from%0A # the structlog documentation.%0A structlog.configure(%0A processors=%5B%0A # Filter out log levels we are not tracking.%0A structlog.stdlib.filter_by_level,%0A # Include logger name in output.%0A structlog.stdlib.add_logger_name,%0A # Include log level in output.%0A structlog.stdlib.add_log_level,%0A # Allow formatting into arguments e.g., logger.info(%22Hello, %25s%22,%0A # name)%0A structlog.stdlib.PositionalArgumentsFormatter(),%0A # Add timestamps.%0A structlog.processors.TimeStamper(fmt=%22iso%22),%0A # Match support for exception logging in the standard logger.%0A structlog.processors.StackInfoRenderer(),%0A structlog.processors.format_exc_info,%0A # Decode unicode characters%0A structlog.processors.UnicodeDecoder(),%0A # Render as JSON%0A structlog.processors.JSONRenderer()%0A %5D,%0A # Use a dict class for keeping track of data.%0A context_class=dict,%0A # Use a standard logger for the actual log call.%0A logger_factory=structlog.stdlib.LoggerFactory(),%0A # Use a standard wrapper class for utilities like log.warning()%0A wrapper_class=structlog.stdlib.BoundLogger,%0A # Cache the logger%0A cache_logger_on_first_use=True,%0A )%0A%0A logger = structlog.get_logger(name)%0A%0A if log_exceptions:%0A handle_exceptions(logger)%0A%0A return logger%0A
934f4ccfc4e34c5486c3d5a57b429742eb9b5915
add algorithms.ml to make format for machine learning
algorithms/ml.py
algorithms/ml.py
Python
0.000004
@@ -0,0 +1,1262 @@ +#!/usr/bin/env python%0A# -*- coding: UTF-8 -*-%0A%0A%22%22%22%0AMachine learning algorithms.%0A%22%22%22%0A%0Aimport sys%0A%0Afrom optparse import OptionParser%0A%0Afrom jcvi.apps.base import ActionDispatcher, debug%0Adebug()%0A%0A%0Adef main():%0A%0A actions = (%0A ('libsvm', 'convert csv file to LIBSVM format'),%0A )%0A p = ActionDispatcher(actions)%0A p.dispatch(globals())%0A%0A%0Adef libsvm(args):%0A %22%22%22%0A %25prog libsvm csvfile prefix.ids%0A%0A Convert csv file to LIBSVM format. %60prefix.ids%60 contains the prefix mapping.%0A Ga -1%0A Gr 1%0A%0A So the feature in the first column of csvfile get scanned with the prefix%0A and mapped to different classes. Formatting spec:%0A%0A http://svmlight.joachims.org/%0A %22%22%22%0A from jcvi.formats.base import DictFile%0A%0A p = OptionParser(libsvm.__doc__)%0A opts, args = p.parse_args(args)%0A%0A if len(args) != 2:%0A sys.exit(not p.print_help())%0A%0A csvfile, prefixids = args%0A d = DictFile(prefixids)%0A fp = open(csvfile)%0A fp.next()%0A for row in fp:%0A atoms = row.split()%0A klass = atoms%5B0%5D%0A kp = klass.split(%22_%22)%5B0%5D%0A klass = d.get(kp, %220%22)%0A feats = %5B%22%7B0%7D:%7B1%7D%22.format(i + 1, x) for i, x in enumerate(atoms%5B1:%5D)%5D%0A print %22 %22.join(%5Bklass%5D + feats)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
2ca07d4a8893196bbf304bcdac16688505e6123a
Add a management command to register webhooks
shopify/webhooks/management/commands/webhookregister.py
shopify/webhooks/management/commands/webhookregister.py
Python
0.000001
@@ -0,0 +1,250 @@ +from django.core.management.base import NoArgsCommand%0A%0Afrom webhooks.models import Webhook%0A%0A%0Aclass Command(NoArgsCommand):%0A help = 'Register all created Shopify webhooks'%0A%0A def handle_noargs(self, **options):%0A Webhook.objects.register()%0A
4aafeac9c238ffb8dc448c87f18abfd7f1f0c9d7
store data dir info
gemini/anno_info.py
gemini/anno_info.py
Python
0
@@ -0,0 +1,205 @@ +#!/usr/bin/env python%0A%22%22%22%0AStore the path for GEMINI data-dir%0A%22%22%22%0A%0Afrom gemini.config import read_gemini_config%0A%0Aconfig = read_gemini_config()%0Aanno_dirname = config%5B%22annotation_dir%22%5D%0A%0Aprint anno_dirname%0A%0A%0A%0A
9d6a053441505fae600915e24a263de798843fbb
Add test_weapon class
test_weapon.py
test_weapon.py
Python
0.000001
@@ -0,0 +1,1094 @@ +import unittest%0Aimport weapon%0A%0A%0Aclass TestWeapon(unittest.TestCase):%0A def setUp(self):%0A self.w = weapon.Weapon('bow', 30, 1.0, 1)%0A self.w2 = weapon.Weapon('bow', 30, 2.0, 1)%0A%0A def test_weapon_init(self):%0A self.assertEqual('bow', self.w.type)%0A self.assertEqual(30, self.w.damage)%0A self.assertEqual(1.0, self.w.critical_strike_percent)%0A self.assertEqual(1, self.w.tier)%0A%0A def test_weapon_init2(self):%0A self.assertEqual('bow', self.w2.type)%0A self.assertEqual(30, self.w2.damage)%0A self.assertEqual(0.0, self.w2.critical_strike_percent)%0A self.assertEqual(1, self.w2.tier)%0A%0A def test_weapon_init_with_incorrect_argument(self):%0A self.assertEqual(0.0, self.w2.critical_strike_percent)%0A%0A def test_weapon_to_string(self):%0A self.assertEqual('bow%5Cn30 damage%5Cn100%25 critical strike percent',%0A str(self.w))%0A%0A def test_critical_hit(self):%0A self.assertTrue(self.w.critical_hit())%0A self.assertFalse(self.w2.critical_hit())%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
7a4b4a116a10f389f6d14321547fa1966b262c0d
Add Hacker News
sources/misc.py
sources/misc.py
Python
0
@@ -0,0 +1,2737 @@ +# -*- coding: utf-8 -*-%0A%0A# Copyright (c) 2020 Clarence Ho (clarenceho at gmail dot com)%0A#%0A# Permission is hereby granted, free of charge, to any person obtaining a copy%0A# of this software and associated documentation files (the %22Software%22), to deal%0A# in the Software without restriction, including without limitation the rights%0A# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A# copies of the Software, and to permit persons to whom the Software is%0A# furnished to do so, subject to the following conditions:%0A#%0A# The above copyright notice and this permission notice shall be included in%0A# all copies or substantial portions of the Software.%0A#%0A# THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0A# SOFTWARE.%0A%0Aimport re%0Aimport datetime%0Afrom lxml import html%0Afrom lxml import etree%0Aimport traceback%0A%0Afrom logger import logger%0Afrom fetcher import read_http_page%0A%0Afrom .base import BaseSource%0Afrom .base import RSSBase%0Afrom .base import RDFBase%0A%0Aclass HackerNews(BaseSource):%0A def get_id(self):%0A return 'hackernews'%0A%0A def get_desc(self):%0A return 'Hacker News'%0A%0A def get_articles(self):%0A # Although the source is in RSS, the daily items are consolidated as CDATA.%0A # Parse and break them down instead of using RSSBase%0A rss_url = 'http://www.daemonology.net/hn-daily/index.rss'%0A resultList = %5B%5D%0A try:%0A doc = html.document_fromstring(read_http_page(rss_url))%0A for item in doc.xpath('//rss/channel/item'):%0A title = item.xpath('title')%5B0%5D.text if len(item.xpath('title')) %3E 0 else 'Daily Hacker News'%0A resultList.append(self.create_section(title))%0A%0A description = item.xpath('description')%5B0%5D if len(item.xpath('description')) %3E 0 else None%0A if description is not None:%0A for article in description.xpath('ul/li/span%5B@class=%22storylink%22%5D/a'):%0A if article.text and article.get('href'):%0A resultList.append(self.create_article(article.text.strip(), article.get('href')))%0A%0A except Exception as e:%0A logger.exception('Problem processing Hacker News: ' + str(e))%0A logger.exception(traceback.format_exception(etype=type(e), value=e, tb=e.__traceback__))%0A%0A return resultList%0A
cd48829eb08df62d8222128b33a7c00b9ca2ed8a
Add files via upload
interpro_go_extraction_direct.py
interpro_go_extraction_direct.py
Python
0
@@ -0,0 +1,1855 @@ +#!/usr/bin/env python2%0A%0A# Daniel Elsner%0A%0A# 26.09.2016%0A%0A# ake the GO ID directly from Interproscan, without the need of previous cutting and grepping.%0A%0A# Input: The interproscan-output.tsv file%0A%0Aimport sys%0A%0Awith open(sys.argv%5B1%5D, 'r') as readfile:%0A id_list_content = list(readfile)%0A %0Aoutdict=%7B%7D%0A%0A# make a dict, this prevents duplicate entries and makes access easy%0A %0Afor i in range(len(id_list_content)):%0A %0A if %22GO%22 in id_list_content%5Bi%5D:%0A # only if there is a GO entry, otherwise there is nothing to split%0A %0A inputs = id_list_content%5Bi%5D.split('%5Ct')%0A p, j = inputs%5B0%5D, inputs%5B13%5D%0A #from the entry line, get the Gene Name and the GO IDs%0A %0A outdict%5Bp%5D = set()%0A # create a set, this spares us from checking for duplicates and just keeps everything once%0A %0A else:%0A pass%0A #if there is no GO entry, pass the line%0A %0A %0Afor i in range(len(id_list_content)):%0A %0A if %22GO%22 in id_list_content%5Bi%5D:%0A %0A # only if there is a GO entry, otherwise there is nothing to split%0A %0A inputs = id_list_content%5Bi%5D.split('%5Ct')%0A p, j = inputs%5B0%5D, inputs%5B13%5D%0A #from the entry line, get the Gene Name and the GO IDs%0A %0A if '%7C' in str(j):%0A for n in str(j).split('%7C'):%0A outdict%5Bp%5D.add(n.strip())%0A %0A # individual GOs are separated by %22%7C%22, for each of them add them to the set, automatically checking if it is already there.%0A %0A else:%0A outdict%5Bp%5D.add(str(j.strip()))%0A # create a set, this spares us from checking for duplicates and just keeps everything once%0A else:%0A pass%0A #if there is no GO entry, pass the line%0A%0Afor i in range(len(outdict)):%0A print str(outdict.keys()%5Bi%5D) + %22%5Ct%22 + ', '.join(outdict.values()%5Bi%5D)%0A
ed578177781ff1d4aeb0b7abb7d5f11fc5a7c626
Create copy of WeakList and set it to raise exception instead of removing item from list
grammpy/WeakList.py
grammpy/WeakList.py
Python
0
@@ -0,0 +1,2606 @@ +#!/usr/bin/env python%0A%22%22%22%0A:Author Patrik Valkovic%0A:Created 31.08.2017 12:11%0A:Licence GNUv3%0APart of grammpy%0A%0A%0AOriginal implementation: https://github.com/apieum/weakreflist%0A%22%22%22%0A%0Aimport weakref%0Afrom .exceptions import TreeDeletedException%0A%0A__all__ = %5B%22WeakList%22%5D%0A%0A%0Adef is_slice(index):%0A return isinstance(index, slice)%0A%0A%0Aclass WeakList(list):%0A def __init__(self, items=list()):%0A list.__init__(self, self._refs(items))%0A%0A def value(self, item):%0A if isinstance(item, weakref.ReferenceType):%0A if item() is None:%0A raise TreeDeletedException()%0A return item()%0A return item%0A%0A def ref(self, item):%0A try:%0A item = weakref.ref(item)%0A finally:%0A return item%0A%0A def __contains__(self, item):%0A return list.__contains__(self, self.ref(item))%0A%0A def __getitem__(self, index):%0A items = list.__getitem__(self, index)%0A return type(self)(self._values(items)) if is_slice(index) else self.value(items)%0A%0A def __setitem__(self, index, item):%0A items = self._refs(item) if is_slice(index) else self.ref(item)%0A return list.__setitem__(self, index, items)%0A%0A def __iter__(self):%0A return iter(self%5Bindex%5D for index in range(len(self)))%0A%0A def __reversed__(self):%0A reversed_self = type(self)(self)%0A reversed_self.reverse()%0A return reversed_self%0A%0A def append(self, item):%0A list.append(self, self.ref(item))%0A%0A def remove(self, item):%0A return list.remove(self, self.ref(item))%0A%0A def remove_all(self, item):%0A item = self.ref(item)%0A while list.__contains__(self, item):%0A list.remove(self, item)%0A%0A def index(self, item, start=None, stop=None):%0A return list.index(self, self.ref(item), start=start, stop=stop)%0A%0A def count(self, item):%0A return list.count(self, self.ref(item))%0A%0A def pop(self, index=-1):%0A return self.value(list.pop(self, self.ref(index)))%0A%0A def insert(self, index, item):%0A return list.insert(self, index, self.ref(item))%0A%0A def extend(self, items):%0A return list.extend(self, self._refs(items))%0A%0A def __iadd__(self, other):%0A return list.__iadd__(self, self._refs(other))%0A%0A def _refs(self, items):%0A return map(self.ref, items)%0A%0A def _values(self, items):%0A return map(self.value, items)%0A%0A def _sort_key(self, key=None):%0A return self.value if key is None else lambda item: key(self.value(item))%0A%0A def sort(self, *, key=None, reverse=False):%0A return list.sort(self, key=self._sort_key(key), reverse=reverse)%0A
ee859881af0633d4d2d88015c907cfa856516dbe
Create TwoSum II for Lint
lintcode/000-000-Two-Sum-II/TwoSumII.py
lintcode/000-000-Two-Sum-II/TwoSumII.py
Python
0
@@ -0,0 +1,425 @@ +class Solution:%0A # @param nums, an array of integer%0A # @param target, an integer%0A # @return an integer%0A def twoSum2(self, nums, target):%0A # Write your code here%0A nums.sort()%0A i, j = 0, len(nums) - 1%0A res = 0%0A while i %3C j:%0A if nums%5Bi%5D + nums%5Bj%5D %3C= target:%0A i += 1%0A else:%0A res += j - i%0A j -= 1%0A return res%0A
52e8a378d8a31989c9d93ef83eabbe6df339f915
Add data migration to add category components for VPC.
src/waldur_mastermind/marketplace/migrations/0083_offering_component.py
src/waldur_mastermind/marketplace/migrations/0083_offering_component.py
Python
0
@@ -0,0 +1,1643 @@ +# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0Afrom waldur_mastermind.marketplace_openstack import STORAGE_TYPE, RAM_TYPE, CORES_TYPE, PACKAGE_TYPE%0A%0A%0Adef create_category_components(apps, schema_editor):%0A CATEGORY_TITLE = 'Private clouds'%0A%0A Category = apps.get_model('marketplace', 'Category')%0A CategoryComponent = apps.get_model('marketplace', 'CategoryComponent')%0A OfferingComponent = apps.get_model('marketplace', 'OfferingComponent')%0A%0A try:%0A vpc_category = Category.objects.get(title=CATEGORY_TITLE)%0A except Category.DoesNotExist:%0A return%0A%0A storage_gb_cc, _ = CategoryComponent.objects.get_or_create(%0A category=vpc_category,%0A type=STORAGE_TYPE,%0A name='Storage',%0A measured_unit='GB'%0A )%0A%0A ram_gb_cc, _ = CategoryComponent.objects.get_or_create(%0A category=vpc_category,%0A type=RAM_TYPE,%0A name='RAM',%0A measured_unit='GB'%0A )%0A%0A cores_cc, _ = CategoryComponent.objects.get_or_create(%0A category=vpc_category,%0A type=CORES_TYPE,%0A name='Cores',%0A measured_unit='cores'%0A )%0A%0A components = OfferingComponent.objects.filter(offering__type=PACKAGE_TYPE, parent=None)%0A%0A components.filter(type=STORAGE_TYPE).update(parent=storage_gb_cc)%0A components.filter(type=RAM_TYPE).update(parent=ram_gb_cc)%0A components.filter(type=CORES_TYPE).update(parent=cores_cc)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('marketplace', '0082_orderitem_activated'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(create_category_components),%0A %5D%0A
df0772b3ae02ff0180f18410cf4350b493db9cb4
Create fold_stereonet_fisher_mean.py
fold_stereonet_fisher_mean.py
fold_stereonet_fisher_mean.py
Python
0.003638
@@ -0,0 +1,1067 @@ +#Definition of inputs and outputs%0A#==================================%0A##%5BMes scripts GEOL%5D=group%0A##entree=vector%0A##dip_dir=field entree%0A##dip=field entree%0A%0A#Algorithm body%0A#==================================%0Afrom qgis.core import *%0Afrom apsg import *%0A%0A%0Alayer = processing.getObject(entree)%0Adipdir = layer.fieldNameIndex(dip_dir)%0Adip = layer.fieldNameIndex(dip)%0A%0A%0Aif layer.selectedFeatureCount():%0A print %22ok%22, layer.selectedFeatureCount()%0A g= Group(%5BVec3(Fol(elem.attributes()%5Bdipdir%5D,elem.attributes()%5Bdip%5D)) for elem in layer.selectedFeatures()%5D,name='plis')%0Aelse:%0A g= Group(%5BVec3(Fol(elem.attributes()%5Bdipdir%5D,elem.attributes()%5Bdip%5D)) for elem in layer.getFeatures()%5D,name='plis')%0A%0A%0A%0A# mean vector%0Aresultat= %22mean vector: %22 + str(int(round(g.R.aslin.dd%5B1%5D))) + %22 - %22 + str(int(round(g.R.aslin.dd%5B0%5D)))%0As = StereoNet()%0As.line(g.aslin, 'b.',markersize=18)%0As.line(g.R.aslin,'g%5E',markersize=18)%0As.cone(g.R.aslin, g.fisher_stats%5B'a95'%5D, 'r') %0As.cone(g.R.aslin, g.fisher_stats%5B'csd'%5D, 'k')%0Aa = s.ax%0Aa.set_title(resultat, y=1.06, size=14, color='g')%0As.show()%0A%0A%0A%0A
8ce4b91c9f1eca911809bc2e8c315ea24eac10ae
Add scheduler example
schedule.py
schedule.py
Python
0.000001
@@ -0,0 +1,353 @@ +import schedule%0Aimport time%0A%0Adef job():%0A print(%22I'm working...%22)%0A%0Aschedule.every(10).minutes.do(job)%0Aschedule.every().hour.do(job)%0Aschedule.every().day.at(%2210:30%22).do(job)%0Aschedule.every(5).to(10).minutes.do(job)%0Aschedule.every().monday.do(job)%0Aschedule.every().wednesday.at(%2213:15%22).do(job)%0A%0Awhile True:%0A schedule.run_pending()%0A time.sleep(1)%0A
9ab752bc96c1ad8d6e718cbf87f247aba4ab76a9
Create MiddleCharacter.py
Edabit/MiddleCharacter.py
Edabit/MiddleCharacter.py
Python
0.000001
@@ -0,0 +1,488 @@ +#!/usr/bin/env python3%0A'''%0ACreate a function that takes a string and returns the middle character(s). With conditions.%0A'''%0Adef get_middle(word):%0A%09if len(word) %3C= 2:%0A%09%09return word%0A%09elif len(word) %25 2 == 0:%0A%09%09return word%5B(len(word) // 2) - 1%5D + word%5B(len(word) // 2)%5D%0A%09else:%0A%09%09return word%5B(len(word) // 2)%5D%0A %0A #Alternative Solutions%0Adef get_middle(word):%0A return word%5B(len(word)-1)//2:(len(word)+2)//2%5D%0A%0Adef get_middle(word):%0A while len(word) %3E 2:%0A word = word%5B1:-1%5D%0A return word%0A
0cff0d69f0d2f52f950be37f95c8f261a9741ae7
Create KAKAO_DATA_PREPARE_NEW.py
KAKAO_DATA_PREPARE_NEW.py
KAKAO_DATA_PREPARE_NEW.py
Python
0
@@ -0,0 +1,1842 @@ +import h5py%0Afrom scipy.spatial import distance%0Aimport scipy.misc%0Aimport numpy as np %0A%0Apath = './Desktop/COVER_SONG/chroma_data_training/CP_1000ms_training_s2113_d2113_170106223452.h5'%0A%0Af1 = h5py.File(path)%0AdatasetNames=%5Bn for n in f1.keys()%5D%0A%0AX = f1%5B'X'%5D%0AidxDis_train = f1%5B'idxDis_train'%5D%0AidxDis_validate = f1%5B'idxDis_validate'%5D%0AidxSim_train = f1%5B'idxSim_train'%5D%0AidxSim_validate = f1%5B'idxSim_validate'%5D%0A%0A%0Adef oti(cover1,cover2,chroma_dim): %0A cover1_mean = np.sum(cover1,axis=0)/np.max(np.sum(cover1,axis=0)) %0A cover2_mean = np.sum(cover2,axis=0)/np.max(np.sum(cover2,axis=0))%0A dist_store = np.zeros(chroma_dim)%0A for i in range(0,chroma_dim):%0A cover2_mean_shifted = np.roll(cover2_mean, i) %0A dist = np.dot(cover1_mean,cover2_mean_shifted) %0A dist_store%5Bi%5D = dist %0A oti = np.argmax(dist_store)%0A cover2_shifted = np.roll(cover2, oti, axis=1)%0A return cover1, cover2_shifted%0A%0A%0A%0Adef simple_matrix(X,Y):%0A%09XX = oti(X,Y,12)%5B0%5D%0A%09YY = oti(X,Y,12)%5B1%5D%0A%09M = %5B%5B0 for col in range(180)%5D for row in range(180)%5D%0A%09for i in range(180):%0A%09%09for j in range(180):%0A%09%09%09M%5Bi%5D%5Bj%5D = distance.euclidean(XX%5Bi,:%5D,YY%5Bj,:%5D)%0A%09return np.asarray(M)%0A%0A%0A%0A%0A# np.shape(idxSim_train)%5B0%5D%0Afor i in range(np.shape(idxSim_train)%5B0%5D):%0A%09a=%5BidxSim_train%5Bi%5D%5B0%5D, idxSim_train%5Bi%5D%5B1%5D%5D%0A%09scipy.misc.imsave('./Desktop/KAKAO_ALL_PAIR_TRAIN/'+'%7B:0=4%7D'.format((int)(min(a)))+'_'+'%7B:0=4%7D'.format((int)(max(a)))+'_S.jpg',simple_matrix(X%5Bmin(a)-1%5D,X%5Bmax(a)-1%5D))%0A%09%09%0A%09print((str)(i)+'th complete')%0A%0A# np.shape(idxDis_train)%5B0%5D%0Afor i in range(np.shape(idxDis_train)%5B0%5D):%0A%09a=%5BidxDis_train%5Bi%5D%5B0%5D, idxDis_train%5Bi%5D%5B1%5D%5D%0A%09scipy.misc.imsave('./Desktop/KAKAO_ALL_PAIR_TRAIN/'+'%7B:0=4%7D'.format((int)(min(a)))+'_'+'%7B:0=4%7D'.format((int)(max(a)))+'_D.jpg',simple_matrix(X%5Bmin(a)-1%5D,X%5Bmax(a)-1%5D))%0A%09%09%0A%09print((str)(i)+'th complete')%0A%0A%0A# 1175 x 1175 pair (180 by 180 matrix) complete%0A%0A%0A
3de9ab07b67bd37e418cba16318aa813326793bb
Create createREFgenomesForPhasing.py
createREFgenomesForPhasing.py
createREFgenomesForPhasing.py
Python
0
@@ -0,0 +1 @@ +%0A
c87779ed6e0163503c01efd3a3913b547954d73d
Create convcsv.py
convcsv.py
convcsv.py
Python
0.000412
@@ -0,0 +1,1657 @@ +#!/usr/bin/python%0A#%0A# convert spreadsheet data, removing multiple spaces%0A#%0Aimport os, sys, getopt, shutil, glob, re, traceback, json, csv%0Adef handle_exception():%0A traceback.print_exc()%0A os._exit(1)%0Adef addRow(lst,row):%0A key = row%5B9%5D%0A if key in lst:%0A setlst = lst%5Bkey%5D%0A setlst.append(row)%0A else:%0A setlst=%5Brow%5D%0A lst%5Bkey%5D=setlst%0A return lst%0Adef getRow(filename):%0A try:%0A lst = %7B%7D%0A with open(filename,%22rb%22) as csvfile:%0A rdr = csv.reader(csvfile, delimiter=',', quotechar='%22')%0A for row in rdr:%0A hdr=row%0A break%0A for row in rdr:%0A row=re.sub(%22%5Cs%7B2,%7D%22 , %22 %22, row)%0A key = row%5B1%5D.lower()%0A if %22almaden%22 in key:%0A lst=addRow(lst,row)%0A elif %22san jose%22 in key:%0A lst=addRow(lst,row)%0A elif %22arc%22 in key:%0A lst=addRow(lst,row)%0A csvfile.close()%0A return lst%0A except:%0A traceback.print_exc()%0A#%0A# argv%5B0%5D = NAME%0A# argv%5B1%5D = IP%0A#%0Adef main(argv):%0A try:%0A if len(argv)%3C1:%0A print '%7B%22STATUS%22:%22FAIL%22, %22MSG%22:%22MISSING ARGS%22 %7D'%0A os._exit(2)%0A lst=getRow(argv%5B0%5D)%0A for name in lst:%0A #print name%0A machines=lst%5Bname%5D%0A for machine in machines:%0A print machine%5B9%5D+%22,%22+machine%5B13%5D+%22,%22+machine%5B11%5D+%22,%22+machine%5B12%5D%0A break%0A for machine in machines:%0A print %22 %22+machine%5B3%5D+%22,%22+machine%5B2%5D%0A #print lst%0A except:%0A handle_exception()%0Aif __name__ == %22__main__%22:%0A main(sys.argv%5B1:%5D)%0A
1c81643eaed91b4171a4e68699d930e5ef3688db
Add negative API tests for policy validation
senlin/tests/tempest/api/policies/test_policy_validate_negative.py
senlin/tests/tempest/api/policies/test_policy_validate_negative.py
Python
0.000005
@@ -0,0 +1,2750 @@ +# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0Aimport copy%0Afrom tempest.lib import decorators%0Afrom tempest.lib import exceptions%0Afrom tempest import test%0A%0Afrom senlin.tests.tempest.api import base%0Afrom senlin.tests.tempest.common import constants%0A%0A%0Aclass TestPolicyValidateNegativeBadRequest(base.BaseSenlinAPITest):%0A%0A @test.attr(type=%5B'negative'%5D)%0A @decorators.idempotent_id('4b55bb3e-12d6-4728-9b53-9db5094ac8b5')%0A def test_policy_validate_with_empty_body(self):%0A params = %7B%0A %7D%0A # Verify badrequest exception(400) is raised.%0A self.assertRaises(exceptions.BadRequest,%0A self.client.validate_obj,%0A 'policies', params)%0A%0A @test.attr(type=%5B'negative'%5D)%0A @decorators.idempotent_id('a1c35d93-2d19-4a72-919f-cfd70f5cbf06')%0A def test_policy_validate_no_spec(self):%0A params = %7B%0A 'policy': %7B%0A %7D%0A %7D%0A # Verify badrequest exception(400) is raised.%0A self.assertRaises(exceptions.BadRequest,%0A self.client.validate_obj,%0A 'policies', params)%0A%0A @test.attr(type=%5B'negative'%5D)%0A @decorators.idempotent_id('6073da36-ee3e-4925-bce1-6c9a158e710d')%0A def test_policy_validate_policy_type_incorrect(self):%0A spec = copy.deepcopy(constants.spec_scaling_policy)%0A spec%5B'type'%5D = 'senlin.policy.bogus'%0A params = %7B%0A 'policy': %7B%0A 'spce': spec%0A %7D%0A %7D%0A # Verify badrequest exception(400) is raised.%0A self.assertRaises(exceptions.BadRequest,%0A self.client.validate_obj,%0A 'policies', params)%0A%0A @test.attr(type=%5B'negative'%5D)%0A @decorators.idempotent_id('1e1833ea-4a67-4ac1-b6e2-f9afff51c945')%0A def test_policy_validate_spec_validation_failed(self):%0A spec = copy.deepcopy(constants.spec_scaling_policy)%0A spec%5B'properties'%5D%5B'bogus'%5D = 'foo'%0A params = %7B%0A 'policy': %7B%0A 'spce': spec%0A %7D%0A %7D%0A # Verify badrequest exception(400) is raised.%0A self.assertRaises(exceptions.BadRequest,%0A self.client.validate_obj,%0A 'policies', params)%0A
f040351dd3397ba7297b69b2468b2b37589c0d8f
Add task to get stats about files
games/management/commands/get_installer_urls.py
games/management/commands/get_installer_urls.py
Python
0.000001
@@ -0,0 +1,1668 @@ +import json%0Afrom collections import defaultdict%0A%0Afrom django.core.management.base import BaseCommand%0Afrom common.util import load_yaml%0Afrom games import models%0A%0A%0Aclass Command(BaseCommand):%0A def handle(self, *args, **kwargs):%0A self.stdout.write(%22Installer stats%5Cn%22)%0A installers = models.Installer.objects.all()%0A url_stats = defaultdict(list)%0A for installer in installers:%0A slug = installer.slug%0A installer_content = load_yaml(installer.content)%0A try:%0A files = installer_content.get(%22files%22, %5B%5D)%0A except AttributeError:%0A print(%22Deleting installer %25s%22 %25 installer)%0A installer.delete()%0A continue%0A if files is None:%0A print(%22Deleting installer %25s%22 %25 installer)%0A installer.delete()%0A continue%0A for url_dict in files:%0A fileid = next(iter(url_dict))%0A try:%0A url = url_dict%5Bfileid%5D%0A except TypeError:%0A print(%22Deleting installer %25s%22 %25 installer)%0A installer.delete()%0A continue%0A if isinstance(url, str):%0A if url.startswith(%22N/A%22):%0A continue%0A url_stats%5Burl%5D.append(slug)%0A elif isinstance(url, dict):%0A if url%5B%22url%22%5D.startswith(%22N/A%22):%0A continue%0A url_stats%5Burl%5B%22url%22%5D%5D.append(slug)%0A%0A with open(%22installer-files.json%22, %22w%22) as installer_files:%0A json.dump(url_stats, installer_files, indent=2)%0A
cff5035ad469adc46ed9cf446bb95d9a1e07bd77
Fix inline template
judge/templatetags/smart_math.py
judge/templatetags/smart_math.py
from HTMLParser import HTMLParser from django.template import Library from django.conf import settings import re register = Library() MATHTEX_CGI = 'http://www.forkosh.com/mathtex.cgi'#settings.get('MATHTEX_CGI', 'http://www.forkosh.com/mathtex.cgi') inlinemath = re.compile(r'~(.*?)~|\\\((.*?)\\\)') def inline_template(match): math = match.group(1) or match.group(2) return r''' <span> <img src="%s?\textstyle %s"/> <span style="display:none">\( %s \)</span> </span> ''' % (MATHTEX_CGI, math, math) displaymath = re.compile(r'\$\$(.*?)\$\$|\\\[(.*?)\\\]') def display_template(match): math = match.group(1) or match.group(2) return r''' <span> <img class="tex-image" src="%s?\displaystyle %s" alt="%s"/> <div class="tex-text" style="display:none">\[ %s \]</div> </span> ''' % (MATHTEX_CGI, math, math, math) class MathHTMLParser(HTMLParser): def __init__(self): HTMLParser.__init__(self) self.new_page = [] self.data_buffer = [] def purge_buffer(self): if self.data_buffer: buffer = ''.join(self.data_buffer) buffer = inlinemath.sub(inline_template, buffer) buffer = displaymath.sub(display_template, buffer) self.new_page.append(buffer) del self.data_buffer[:] def handle_starttag(self, tag, attrs): self.purge_buffer() self.new_page.append('<%s%s>' % (tag, ' '.join([''] + ['%s="%s"' % p for p in attrs]))) def handle_endtag(self, tag): self.purge_buffer() self.new_page.append('</%s>' % tag) def handle_data(self, data): self.data_buffer.append(data) def handle_entityref(self, name): self.data_buffer.append('&%s;' % name) def handle_charref(self, name): self.data_buffer.append('&#%s;' % name) @register.filter(name='smart_math', is_safe=True) def math(page): parser = MathHTMLParser() parser.feed(page) return ''.join(parser.new_page)
Python
0.000001
@@ -401,16 +401,34 @@ %3Cimg + class=%22tex-image%22 src=%22%25s @@ -454,16 +454,33 @@ %3Cspan + class=%22tex-text%22 style=%22
12445164d5a7651ddcc381f5e602577d8372fe6a
Add is_eq_size script
is_eq_size.py
is_eq_size.py
Python
0.000077
@@ -0,0 +1,559 @@ +#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0A%0Aimport click%0Afrom PIL import Image%0Afrom utils import get_file_list%0A%0A%[email protected]()%[email protected]('path', type=click.Path(exists=True))%0Adef is_eq_size(path):%0A %22%22%22%0A Test all pictures in folder (recursive) for size equality.%0A %22%22%22%0A files = get_file_list(path)%0A sizes = %5BImage.open(f).size for f in files%5D%0A%0A if all(s == sizes%5B0%5D for s in sizes):%0A print 'all pictures have same size'%0A else:%0A print 'not all pictures have same size'%0A%0A%0Aif __name__ == '__main__':%0A is_eq_size()%0A
ad052e71145296897c1510752c0f3403b9cb45a4
add 1st py file
hello.py
hello.py
Python
0
@@ -0,0 +1,54 @@ +print('Hello, Python!');%0A%0Aname = input();%0Aprint(name);
73f2260e0e5ae3534f13664063808abbe73b1d72
add a new extractor, from json files
bin/extract_json.py
bin/extract_json.py
Python
0
@@ -0,0 +1,1635 @@ +#!/usr/bin/env python%0A# -*- coding:utf-8 -*-%0A%0A# Copyright 2015 Pablo Santiago Blum de Aguiar %[email protected]%3E. All rights%0A# reserved. Use of this source code is governed by Apache License, Version 2.0,%0A# that can be found on https://opensource.org/licenses/Apache-2.0%0A%0Aimport json%0Aimport sys%0A%0A%0Adef main(argv):%0A '''Usage: extract_json.py %3Cjson-file%3E %3Ckey%3E%5B.%3Ckey%3E...%5D'''%0A%0A def usage(json_dict=None):%0A print main.__doc__%0A if json_dict:%0A print 'Available keys:%5Cn %7B%7D'.format(%0A '%5Cn '.join(sorted(json_dict.keys()))%0A )%0A%0A if len(argv) %3C 1:%0A usage()%0A return 1%0A%0A if not sys.stdin.isatty():%0A try:%0A json_dict = json.load(sys.stdin)%0A except Exception as e:%0A print 'Could not read from STDIN: %7B%7D'.format(e)%0A return 2%0A keys = %5Bx for x in argv%5B1%5D.split('.')%5D if len(argv) %3E 1 else %5B%5D%0A else:%0A with open(sys.argv%5B1%5D) as json_file:%0A try:%0A json_dict = json.load(json_file)%0A except Exception as e:%0A print 'Could not read %7B%7D: %7B%7D'.format(sys.argv%5B1%5D, e)%0A return 2%0A keys = %5Bx for x in argv%5B2%5D.split('.')%5D if len(argv) %3E 2 else %5B%5D%0A%0A if not keys:%0A usage(json_dict)%0A return 3%0A%0A for key in keys:%0A try:%0A idx = int(key)%0A key = idx%0A except:%0A pass%0A try:%0A json_dict = json_dict%5Bkey%5D%0A except:%0A json_dict = ''%0A%0A print(json_dict.encode('utf8') if json_dict else '')%0A%0A%0Aif __name__ == '__main__':%0A status = main(sys.argv)%0A sys.exit(status)%0A
9c5031abd52152508e4ad2e06e685d1df193b279
fix forced push
cocos/layer/base_layers.py
cocos/layer/base_layers.py
# ---------------------------------------------------------------------------- # cocos2d # Copyright (c) 2008 Daniel Moisset, Ricardo Quesada, Rayentray Tappa, Lucio Torre # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # * Neither the name of cocos2d nor the names of its # contributors may be used to endorse or promote products # derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. # ---------------------------------------------------------------------------- """Layer class and subclasses A `Layer` has as size the whole drawable area (window or screen), and knows how to draw itself. It can be semi transparent (having holes and/or partial transparency in some/all places), allowing to see other layers behind it. Layers are the ones defining appearance and behavior, so most of your programming time will be spent coding Layer subclasses that do what you need. The layer is where you define event handlers. Events are propagated to layers (from front to back) until some layer catches the event and accepts it. """ __docformat__ = 'restructuredtext' from cocos.director import * from cocos import cocosnode from cocos import scene __all__ = [ 'Layer', 'MultiplexLayer'] class Layer(cocosnode.CocosNode, scene.EventHandlerMixin): """Class that handles events and other important game's behaviors""" is_event_handler = False #! if true, the event handlers of this layer will be registered. defaults to false. def __init__( self ): super( Layer, self ).__init__() self.scheduled_layer = False x,y = director.get_window_size() self.transform_anchor_x = x/2 self.transform_anchor_y = y/2 def push_handlers(self): if self.is_event_handler: director.window.push_handlers( self ) for child in self.get_children(): if isinstance(child, Layer): child.push_handlers() def remove_handlers(self): if self.is_event_handler: director.window.remove_handlers( self ) for child in self.get_children(): if isinstance(child, Layer): child.remove_handlers() def on_enter(self): super(Layer, self).on_enter() scn = self.get_ancestor(scene.Scene) if not scene: return if scn._handlers_enabled: director.window.push_handlers( self ) def on_exit(self): super(Layer, self).on_exit() scn = self.get_ancestor(scene.Scene) if not scene: return if scn._handlers_enabled: director.window.remove_handlers( self ) # # MultiplexLayer class MultiplexLayer( Layer ): """A Composite layer that only enables one layer at the time. This is useful, for example, when you have 3 or 4 menus, but you want to show one at the time""" def __init__( self, *layers ): super( MultiplexLayer, self ).__init__() self.layers = layers self.enabled_layer = 0 self.add( self.layers[ self.enabled_layer ] ) def switch_to( self, layer_number ): """Switches to another Layer that belongs to the Multiplexor. :Parameters: `layer_number` : Integer MUST be a number between 0 and the quantities of layers - 1. The running layer will receive an "on_exit()" call, and the new layer will receive an "on_enter()" call. """ if layer_number < 0 or layer_number >= len( self.layers ): raise Exception("Multiplexlayer: Invalid layer number") # remove self.remove( self.layers[ self.enabled_layer ] ) self.enabled_layer = layer_number self.add( self.layers[ self.enabled_layer ] )
Python
0.000003
@@ -3644,32 +3644,74 @@ ndlers_enabled:%0A + if self.is_event_handler:%0A dire @@ -3931,32 +3931,74 @@ ndlers_enabled:%0A + if self.is_event_handler:%0A dire
81a38564379af16f4ea2d64572e517a6657f4450
add first test for NormalIndPower and normal_power
statsmodels/stats/tests/test_power.py
statsmodels/stats/tests/test_power.py
Python
0
@@ -0,0 +1,1757 @@ +# -*- coding: utf-8 -*-%0A%22%22%22Tests for statistical power calculations%0A%0ANote:%0A test for ttest power are in test_weightstats.py%0A tests for chisquare power are in test_gof.py%0A%0ACreated on Sat Mar 09 08:44:49 2013%0A%0AAuthor: Josef Perktold%0A%22%22%22%0A%0Aimport numpy as np%0Afrom numpy.testing import assert_almost_equal%0A%0Aimport statsmodels.stats.power as smp%0A%0A%0Adef test_normal_power_explicit():%0A # a few initial test cases for NormalIndPower%0A sigma = 1%0A d = 0.3%0A nobs = 80%0A alpha = 0.05%0A res1 = smp.normal_power(d, nobs/2., 0.05)%0A res2 = smp.NormalIndPower().power(d, nobs, 0.05)%0A res3 = smp.NormalIndPower().solve_power(effect_size=0.3, nobs1=80, alpha=0.05, beta=None)%0A res_R = 0.475100870572638%0A assert_almost_equal(res1, res_R, decimal=13)%0A assert_almost_equal(res2, res_R, decimal=13)%0A assert_almost_equal(res3, res_R, decimal=13)%0A%0A%0A norm_pow = smp.normal_power(-0.01, nobs/2., 0.05)%0A norm_pow_R = 0.05045832927039234%0A #value from R: %3Epwr.2p.test(h=0.01,n=80,sig.level=0.05,alternative=%22two.sided%22)%0A assert_almost_equal(norm_pow, norm_pow_R, decimal=13)%0A%0A norm_pow = smp.NormalIndPower().power(0.01, nobs, 0.05, alternative=%221s%22)%0A norm_pow_R = 0.056869534873146124%0A #value from R: %3Epwr.2p.test(h=0.01,n=80,sig.level=0.05,alternative=%22greater%22)%0A assert_almost_equal(norm_pow, norm_pow_R, decimal=13)%0A%0A # Note: negative effect size is same as switching one-sided alternative%0A # TODO: should I switch to larger/smaller instead of %22one-sided%22 options%0A norm_pow = smp.NormalIndPower().power(-0.01, nobs, 0.05, alternative=%221s%22)%0A norm_pow_R = 0.0438089705093578%0A #value from R: %3Epwr.2p.test(h=0.01,n=80,sig.level=0.05,alternative=%22less%22)%0A assert_almost_equal(norm_pow, norm_pow_R, decimal=13)%0A
f24c8376847b0226f3d3f674af2f568367f15234
add data structure for parens problem
src/linked_list.py
src/linked_list.py
Python
0.000002
@@ -0,0 +1,2277 @@ +%22%22%22Singly-Linked List in Python.%22%22%22%0A%0A%0Aclass Node(object):%0A %22%22%22docstring for LinkedList.%22%22%22%0A%0A def __init__(self, data, next_item=None):%0A %22%22%22Init for instance of a node.%22%22%22%0A self.data = data%0A self.next_item = next_item%0A%0A%0Aclass LinkedList(object):%0A %22%22%22Class for head of Linked List.%22%22%22%0A%0A def __init__(self, data=None):%0A %22%22%22Initialize the head node.%22%22%22%0A self.head = None%0A if data:%0A try:%0A for item in data:%0A self.push(item)%0A except TypeError:%0A self.head = Node(data)%0A%0A def push(self, data=None):%0A %22%22%22Create new node in front of head.%22%22%22%0A new_head = Node(data, self.head)%0A self.head = new_head%0A%0A def pop(self):%0A %22%22%22Remove the first value off the head of the list and return it.%22%22%22%0A if self.head is None:%0A raise IndexError('Cannot pop from an empty list.')%0A new_head = self.head.next_item%0A old_head = self.head.data%0A self.head = new_head%0A return old_head%0A%0A def size(self):%0A %22%22%22Count the objects in linked list.%22%22%22%0A count = 0%0A curr = self.head%0A while curr:%0A count += 1%0A curr = curr.next_item%0A return count%0A%0A def search(self, val):%0A %22%22%22Iterate through the linked list to find instance containing val.%22%22%22%0A curr = self.head%0A result = None%0A try:%0A while val != curr.data:%0A curr = curr.next_item%0A else:%0A result = curr%0A except AttributeError:%0A pass%0A return result%0A%0A def remove(self, node):%0A %22%22%22Remove a given node in the list.%22%22%22%0A curr = self.head%0A previous = None%0A while curr:%0A if curr == node:%0A break%0A previous = curr%0A curr = previous.next_item%0A if previous is None:%0A self.head = curr.next_item%0A else:%0A previous.next_item = curr.next_item%0A%0A def display(self):%0A %22%22%22Return a string of the linked list.%22%22%22%0A curr = self.head%0A return_tuple = ()%0A while curr:%0A return_tuple = return_tuple + (curr.data, )%0A curr = curr.next_item%0A return return_tuple%0A
9992a4ff90156a1c5678303530c2feeaecf700d6
Create a_deco.py
src/misc/a_deco.py
src/misc/a_deco.py
Python
0.00033
@@ -0,0 +1,959 @@ +import os%0Aimport sys%0Aimport linecache%0A%0A%0Adef trace(func):%0A %22%22%22%0A A trace decorator%0A from: https://zhuanlan.zhihu.com/p/20175869%0A%0A :param func:%0A :return:%0A %22%22%22%0A def globaltrace(frame, why, arg):%0A if why == %22call%22:%0A return localtrace%0A return None%0A%0A def localtrace(frame, why, arg):%0A if why == %22line%22:%0A filename = frame.f_code.co_filename%0A line_no = frame.f_lineno%0A b_name = os.path.basename(filename)%0A tmp = linecache.getline(filename, line_no)%0A print(%22%7B0%7D(%7B1%7D):%7B2%7D%22.format(b_name, line_no, tmp), end='')%0A return localtrace%0A%0A def _func(*args, **kwargs):%0A sys.settrace(globaltrace)%0A result = func(*args, **kwargs)%0A sys.settrace(None)%0A return result%0A return _func%0A%0A%0A@trace%0Adef foo(i):%0A string = %22Hello world!%22%0A print(string)%0A print(string%5Bi%5D)%0A os.system(%22cls%22)%0A%0A%0Aif __name__ == %22__main__%22:%0A foo(-1)%0A
463502a251111199da130e508929a35b2f126f4e
Add columns to User model
bookmarks/models.py
bookmarks/models.py
Python
0.000001
@@ -0,0 +1,543 @@ +from sqlalchemy import Column, Integer, String%0Afrom bookmarks.database import Base%0A%0A%0Aclass User(Base):%0A __tablename__ = 'users'%0A id = Column(Integer, primary_key=True)%0A username = Column(String(50), unique=True, nullable=False)%0A name = Column(String(120))%0A email = Column(String(256), unique=True, nullable=False)%0A%0A def __init__(self, name=None, username=None, email=None):%0A self.username = username%0A self.name = name%0A self.email = email%0A%0A def __repr__(self):%0A return '%3CUser %25r%3E' %25 (self.name)%0A
0c77666c259ba78899863bbbe482a857102c19be
add settings module
hackerearth/settings.py
hackerearth/settings.py
Python
0.000034
@@ -0,0 +1,377 @@ +%0A%0A# v3 API endpoints of HackerEarth Code Checker API%0ACOMPILE_API_ENDPOINT = 'https://api.hackerearth.com/v3/code/compile'%0ARUN_API_ENDPOINT = 'https://api.hackerearth.com/v3/code/run'%0A%0A# Max run time of a program in seconds%0ARUN_TIME_UPPER_LIMIT = 5%0A%0A%0A# Max memory consumption allowed for a program%0AMEMORY_UPPER_LIMIT = 1024*256%0A%0A# please keep this secret%0ACLIENT_SECRET = ''%0A%0A%0A%0A
36af113eb363ddf25f96ab53e41db0ea7f3bb481
add a python scripts to generate user weibo file from weibo.txt
src/main/python/data_aggregation.py
src/main/python/data_aggregation.py
Python
0
@@ -0,0 +1,1695 @@ +import sys, os%0A%0Adef generateData(inputData, outputDir, userLimit):%0A%09print %22Generate person weibo to folder: %22 + outputDir%0A%09if not os.path.isdir(outputDir):%0A os.mkdir(outputDir)%0A print 'Directory created at: ' + outputDir%0A%09currentID = %22%22%0A%09userNum = 0%0A%09outputFile = None%0A%09l = inputData.readline()%0A%09while l:%0A %09line = l.strip()%0A %09if line: %0A%09%09%09fields = line.split(%22%5Ct%22)%0A%09%09%09if len(fields) %3C 6:%0A%09%09%09%09print %22Broken line found: %22 + line%0A%09%09%09%09l = inputData.readline()%0A%09%09%09%09continue%0A%09%09%09if fields%5B1%5D != currentID:%0A%09%09%09%09userNum += 1%0A%09%09%09%09if userNum %3E userLimit:%0A%09%09%09%09%09break%0A%09%09%09%09print %22Find weibo for %22 + str(userNum) + %22 user: %22 + fields%5B1%5D%0A%09%09%09%09currentID = fields%5B1%5D%0A%09%09%09%09fileName = outputDir + %22/%22 + currentID%0A%09%09%09%09print %22Create a new file: %22 + fileName%0A%09%09%09%09outputFile = file(fileName,%22w%22)%0A%09%09%09outputFile.write(fields%5B5%5D + %22%5Cn%22) %0A %09l = inputData.readline()%0A%09print %22Generate user weibo %22 + str(userNum - 1)%0A%0Adef usage():%0A%09print %22Two parameter is required to run the scripts: input file and output folder%5Cn%22%0A%09print %22One parameter is optional: the limited number of user need generate, default will generate all the user weibo data in the input file%5Cn%22%0A%0Aif __name__ == %22__main__%22:%0A%0A %09if len(sys.argv) %3C 3: # Expect more then two argument: the input data file and output folder%0A%09%09usage()%0A %09sys.exit(2)%0A %09try:%0A %09inputData = file(sys.argv%5B1%5D,%22r%22)%0A %09except IOError:%0A %09sys.stderr.write(%22ERROR: Cannot read inputfile %25s.%5Cn%22 %25 arg)%0A %09sys.exit(1)%0A%09userCount = sys.maxint%0A%09if len(sys.argv) %3E= 4:%0A%09%09userCount = int(sys.argv%5B3%5D)%0A%09%09print %22Generate weibo user: %22 + str(userCount)%0A%09generateData(inputData, sys.argv%5B2%5D, userCount)%0A%09%0A
8c78679bc9875c698f639a0c45a5208b43162f4e
comment obsolete ff prefs script.
setup/nodes/review/set_ff_prefs.py
setup/nodes/review/set_ff_prefs.py
#!/usr/bin/python import os import ConfigParser home_dir = os.path.expanduser('~') print "home dir:", home_dir profiles_path= os.path.join(home_dir, ".mozilla","firefox","profiles.ini") print "profiles_path:", profiles_path # read ini file config = ConfigParser.RawConfigParser() config.read([profiles_path]) profiles = [s for s in config.sections() if s !='General'] if len(profiles)>1: print "more than one profile, you fix it." print profiles else: d=dict(config.items(profiles[0])) settings_path= os.path.join(home_dir, ".mozilla","firefox",d['path'],"user.js") config=""" user_pref("capability.policy.policynames", "localfilelinks"); user_pref("capability.policy.localfilelinks.sites", "http://localhost:8080","http://veyepar.nextdayvideo.com:8080"); user_pref("capability.policy.localfilelinks.checkloaduri.enabled", "allAccess"); """ print "writing to", settings_path open(settings_path,'w').write(config)
Python
0
@@ -12,16 +12,381 @@ python%0A%0A +%22%22%22%0AAllow a web page to access local files.%0AThis makes it easier to preview title screens and video files.%0A%0AFF stores profiles in ~/.mozilla/firefox/profiles.ini%0AFF settings are set by creating a .js file that sets things on startup%0A%0A1. count number of FF profiles.%0A If more than 1, give up.%0A2. get profile dir%0A3. create user.js that sets custom settings.%0A%22%22%22%0A%0A import o
bb11ab050fe9a7bb0ffe83419eb0e87390f7deac
Add registration method for TB
hopsutil/tensorboard.py
hopsutil/tensorboard.py
Python
0
@@ -0,0 +1,932 @@ +%22%22%22%0AUtility functions to retrieve information about available services and setting up security for the Hops platform.%0A%0AThese utils facilitates development by hiding complexity for programs interacting with Hops services.%0A%22%22%22%0A%0Aimport socket%0Aimport subprocess%0Aimport os%0Aimport hdfs%0A%0Adef register(logdir):%0A%0A #find free port%0A s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0A s.bind(('',0))%0A addr, port = s.getsockname()%0A s.close()%0A%0A #let tb bind to port%0A subprocess.Popen(%5Bos.getenv(%22PYSPARK_PYTHON%22), %22tensorboard%22, %22--logdir=%25s%22%25logdir, %22--port=%25d%22%25port, %22--debug%22%5D)%0A tb_url = %22http://%7B0%7D:%7B1%7D%22.format(addr, port)%0A%0A #dump tb host:port to hdfs%0A hops_user = os.environ%5B%22USER%22%5D;%0A hops_user_split = hops_user.split(%22__%22);%0A project = hops_user_split%5B0%5D;%0A hdfs_handle = hdfs.get()%0A hdfs_handle.dump(tb_url, %22hdfs:///Projects/%22 + project + %22/Resources/.jupyter.tensorboard%22, user=hops_user)%0A%0A%0A%0A
b22cfb4c6b8c0c0c3751078b720313d0e2baff1d
Test API call
src/filmyBot.py
src/filmyBot.py
Python
0.000001
@@ -0,0 +1,1298 @@ +import time,json,requests%0Aimport os%0Afrom slackclient import SlackClient%0A%0A# get the Slack API token as an environment variable%0ASLACK_BOT_TOKEN = os.environ%5B%22SLACK_BOT_TOKEN%22%5D%0ACHANNEL_NAME = %22test2%22%0ABOT_ID = %22U53TE8XSS%22%0A%0ASLACK_BOT_NAME = %22%3C@%22 + BOT_ID + %22%3E%22%0A%0Adef main():%0A print(SLACK_BOT_NAME)%0A # Create the slackclient instance%0A sc = SlackClient(SLACK_BOT_TOKEN)%0A%0A response = requests.get(%22http://www.omdbapi.com/?t=The+Dark+Knight&plot=full%22)%0A%0A data = response.json()%0A%0A # Connect to slack%0A if sc.rtm_connect():%0A # Send first message%0A #sc.rtm_send_message(CHANNEL_NAME, %22I'm ALIVE!!!%22)%0A%0A while True:%0A # Read latest messages%0A for slack_message in sc.rtm_read():%0A message = slack_message.get(%22text%22)%0A user = slack_message.get(%22user%22)%0A print(message, user)%0A%0A if(message and user):%0A if(SLACK_BOT_NAME in message):%0A print(%22done!%22)%0A sc.rtm_send_message(CHANNEL_NAME, data%5B%22Plot%22%5D)%0A sc.rtm_send_message(CHANNEL_NAME, sc.api_call(%22users.list%22))%0A else:%0A sc.rtm_send_message(CHANNEL_NAME, %22%22)%0A %0A%0Aif __name__ == '__main__':%0A main()
6f2ab55d0b83c33fad322101e7214425efd10829
add colors to module
comoving_rv/plot.py
comoving_rv/plot.py
Python
0
@@ -0,0 +1,139 @@ +colors = dict()%0Acolors%5B'line_marker'%5D = '#3182bd'%0Acolors%5B'gp_model'%5D = '#ff7f0e'%0Acolors%5B'not_black'%5D = '#333333'%0Acolors%5B'fit'%5D = '#2ca25f'%0A
963b1ab24767acb5253b9fe2f29749d8656b2918
index file added
index.py
index.py
Python
0.000001
@@ -0,0 +1,2093 @@ +#!/usr/bin/env python%0Aimport web%0Aimport page%0Aimport upload%0Aimport utils%0A#from google.appengine.ext import db%0Aimport logging%0Afrom Cheetah.Template import Template%0Aimport os%0A%0Aurls = (%0A '/page', page.app_page,%0A '/upload', upload.app_upload,%0A '/login', %22login%22,%0A '/(%5Cd+)-(?:%5B%5Cw%7C-%5D+)%5C.html', %22index%22,%0A %22/(.*)%22, %22index%22%0A)%0A%0Aclass login:%0A def GET(self):%0A #utils.login() %0A #return '%3Cform action=%22/login%22 method=%22POST%22%3E%3Cinput type=%22text%22 name=%22email%22 value=%22ryan%22 /%3E%3Cinput type=%22submit%22 /%3E%3C/form%3E'%0A path = os.path.join(os.path.dirname(__file__), 'templates/login.html')%0A template_values = %7B 'user':'test',%7D%0A tmpl = Template( file = path, searchList = (template_values,) )%0A return tmpl%0A%0A def POST(self):%0A if (utils.login() is None):%0A raise web.seeother('/login')%0A else:%0A raise web.seeother('/index.html')%0A %0A#class Page(db.Model):%0A# id = db.IntegerProperty()%0A# title = db.StringProperty()%0A# tags = db.StringListProperty()%0A# content = db.TextProperty()%0A# owner = db.IntegerProperty(default=666)%0A%0Aclass redirect:%0A def GET(self,page_name):%0A if utils.set_page_id(page_name):%0A web.redirect(%22/index.html%22)%0A else:%0A return %22FAIL%22%0A%0Aclass index:%0A def GET(self,page_name):%0A if page_name == %22w%22: %0A return 'test'%0A #page = Page()%0A #page.id = 1%0A #page.title = %22Random Stuff%22%0A #page.tags = %5B%22test%22,%22ryan%22,%22links%22%5D%0A #page.content = '%7B%22name%22: %22Untitled%22, %22order%22: %22%22, %22components%22: %7B%7D, %22last_id%22:0 %7D'%0A #page.put()%0A else:%0A #path = os.path.join(os.path.dirname(__file__), 'static/index.html')%0A path = os.path.join(os.path.dirname(__file__), 'templates/index.html')%0A template_values = %7B 'page_name':page_name,%7D%0A tmpl = Template( file = path, searchList = (template_values,) )%0A return tmpl%0A%0Aapp = web.application(urls, globals())%0Aif __name__ == '__main__':%0A logging.getLogger().setLevel(logging.DEBUG)%0A app.run()%0A%0A
b91eb0b8b5bd66ea0bf090e6c6e71232c81d6e7a
Add mount.py
kiwi/mount.py
kiwi/mount.py
Python
0.000002
@@ -0,0 +1,590 @@ +def mountpoint(path):%0A try:%0A subprocess.check_call(%5B'mountpoint', path%5D,%0A stdout=subprocess.PIPE, stderr=subprocess.PIPE)%0A except subprocess.CalledProcessError:%0A return False%0A%0A return True%0A%0Adef unmount(path):%0A subprocess.check_call(%5B'umount', path%5D)%0A%0Adef mount(src, dst, mkdir=False, force=False):%0A if mkdir: subprocess.check_call(%5B'mkdir', '-p', dst%5D)%0A%0A if mountpoint(dst):%0A logger.warning('Destination %25s is already a mountpoint' %25 dst)%0A if force: unmount(dst)%0A else: return%0A%0A subprocess.check_call(%5B'mount', src, dst%5D)%0A%0A
cc1a799da671fbbbdd0406eeebc8c5a801a099d5
Add extension test
tests/test_extension.py
tests/test_extension.py
Python
0.000001
@@ -0,0 +1,645 @@ +%22%22%22%0Atests.test_extension%0A====================%0A%0ATests for extension%0A%0A%22%22%22%0Aimport json%0A%0Afrom flask import Flask%0Afrom flask_swag import Swag%0A%0A%0Adef test_extension():%0A %22%22%22Basic test for flask extension.%22%22%22%0A app = Flask(__name__)%0A app.config%5B'SWAG_TITLE'%5D = %22Test application.%22%0A app.config%5B'SWAG_API_VERSION'%5D = '1.0.1'%0A%0A swag = Swag(app)%0A%0A with app.test_request_context('/swagger/swagger.json'):%0A swagger_json = app.generate_swagger()%0A%0A client = app.test_client()%0A response = client.get('/swagger/swagger.json')%0A assert 200 == response.status_code%0A assert swagger_json == json.loads(response.data.decode('utf-8'))%0A
4181f69bda52c4cbec7ac1d7529d44e26ede61d1
create object base classes.
pygeobase/object_base.py
pygeobase/object_base.py
Python
0
@@ -0,0 +1,3236 @@ +# Copyright (c) 2015, Vienna University of Technology, Department of Geodesy%0A# and Geoinformation. All rights reserved.%0A%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions are met:%0A# * Redistributions of source code must retain the above copyright%0A# notice, this list of conditions and the following disclaimer.%0A# * Redistributions in binary form must reproduce the above copyright%0A# notice, this list of conditions and the following disclaimer in the%0A# documentation and/or other materials provided with the distribution.%0A# * Neither the name of the Vienna University of Technology, Department of%0A# Geodesy and Geoinformation nor the names of its contributors may be%0A# used to endorse or promote products derived from this software without%0A# specific prior written permission.%0A%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS %22AS IS%22%0A# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE%0A# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE%0A# ARE DISCLAIMED. IN NO EVENT SHALL VIENNA UNIVERSITY OF TECHNOLOGY,%0A# DEPARTMENT OF GEODESY AND GEOINFORMATION BE LIABLE FOR ANY%0A# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES%0A# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;%0A# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND%0A# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT%0A# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS%0A# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.%0A%0A%0Aclass TS(object):%0A %22%22%22%0A The TS class represents the base object of a time series.%0A %22%22%22%0A def __init__(self, gpi, data, metadata):%0A %22%22%22%0A Initialization of the image object.%0A%0A Parameters%0A ----------%0A gpi : int%0A Grid point index associated with the time series%0A data : pandas.DataFrame%0A Pandas DataFrame that holds data for each variable of the time%0A series%0A metadata : dict%0A dictionary of numpy arrays that hold the metadata%0A %22%22%22%0A self.gpi = gpi%0A self.data = data%0A self.metadata = metadata%0A%0A%0Aclass Image(object):%0A %22%22%22%0A The Image class represents the base object of an image.%0A %22%22%22%0A def __init__(self, data, metadata, lon, lat, timestamp):%0A %22%22%22%0A Initialization of the image object.%0A%0A Parameters%0A ----------%0A data : dict%0A dictionary of numpy arrays that holds the image data for each%0A variable of the dataset%0A metadata : dict%0A dictionary of numpy arrays that hold the metadata%0A lon : numpy.array or None%0A array of longitudes, if None self.grid will be assumed%0A lat : numpy.array or None%0A array of latitudes, if None self.grid will be assumed%0A timestamp : datetime.datetime%0A exact timestamp of the image%0A %22%22%22%0A%0A self.data = data%0A self.metadata = metadata%0A self.lon = lon%0A self.lat = lat%0A self.timestamp = timestamp%0A
88ec4243ff78fe511331461b7563bd49f7124fe2
Add tuple.
tuple/tuple.py
tuple/tuple.py
Python
0.000014
@@ -0,0 +1,106 @@ +#!/usr/local/bin/python%0Ax=(42,)%0Aprint x%0Ay=3*(3,)%0Aprint y%0Az=tuple(%22hello%22)%0Ai=1,2,3%0Aprint i%5B2%5D%0Aprint i%5B0:2%5D%0A
24788b106b9cdd70e7240dc3eccac82fba290c85
Add test for yaml enviroment
tests/util/test_yaml.py
tests/util/test_yaml.py
"""Test Home Assistant yaml loader.""" import io import unittest from homeassistant.util import yaml class TestYaml(unittest.TestCase): """Test util.yaml loader.""" def test_simple_list(self): """Test simple list.""" conf = "config:\n - simple\n - list" with io.StringIO(conf) as f: doc = yaml.yaml.safe_load(f) assert doc['config'] == ["simple", "list"] def test_simple_dict(self): """Test simple dict.""" conf = "key: value" with io.StringIO(conf) as f: doc = yaml.yaml.safe_load(f) assert doc['key'] == 'value' def test_duplicate_key(self): """Test simple dict.""" conf = "key: thing1\nkey: thing2" try: with io.StringIO(conf) as f: yaml.yaml.safe_load(f) except Exception: pass else: assert 0
Python
0
@@ -57,16 +57,26 @@ unittest +%0Aimport os %0A%0Afrom h @@ -904,8 +904,696 @@ ssert 0%0A +%0A def test_enviroment_variable(self):%0A %22%22%22Test config file with enviroment variable.%22%22%22%0A os.environ%5B%22PASSWORD%22%5D = %22secret_password%22%0A conf = %22password: !env_var PASSWORD%22%0A with io.StringIO(conf) as f:%0A doc = yaml.yaml.safe_load(f)%0A assert doc%5B'password'%5D == %22secret_password%22%0A del os.environ%5B%22PASSWORD%22%5D%0A%0A def test_invalid_enviroment_variable(self):%0A %22%22%22Test config file with no enviroment variable sat.%22%22%22%0A conf = %22password: !env_var PASSWORD%22%0A try:%0A with io.StringIO(conf) as f:%0A yaml.yaml.safe_load(f)%0A except Exception:%0A pass%0A else:%0A assert 0%0A
2b0a96791ad43ef1f27b610233dd34027cf83c75
Create currency-style.py
CiO/currency-style.py
CiO/currency-style.py
Python
0.000003
@@ -0,0 +1,298 @@ +import re%0A%0A%0Adef checkio(text):%0A numbers = re.findall('(?%3C=%5C$)%5B%5E %5D*%5Cd', text)%0A for old in numbers:%0A new = old.replace('.', ',')%0A if ',' in new and len(new.split(',')%5B-1%5D) == 2:%0A new = '.'.join(new.rsplit(',', 1))%0A text = text.replace(old, new)%0A return text%0A
a46f960e811123a137e4e5fe4350f6a850e9b33e
Create average-of-levels-in-binary-tree.py
Python/average-of-levels-in-binary-tree.py
Python/average-of-levels-in-binary-tree.py
Python
0.004195
@@ -0,0 +1,1272 @@ +# Time: O(n)%0A# Space: O(h)%0A%0A# Given a non-empty binary tree,%0A# return the average value of the nodes on each level in the form of an array.%0A#%0A# Example 1:%0A# Input:%0A# 3%0A# / %5C%0A# 9 20%0A# / %5C%0A# 15 7%0A# Output: %5B3, 14.5, 11%5D%0A# Explanation:%0A# The average value of nodes on level 0 is 3,%0A# on level 1 is 14.5, and on level 2 is 11. Hence return %5B3, 14.5, 11%5D.%0A#%0A# Note:%0A# The range of node's value is in the range of 32-bit signed integer.%0A%0A# Definition for a binary tree node.%0A# class TreeNode(object):%0A# def __init__(self, x):%0A# self.val = x%0A# self.left = None%0A# self.right = None%0A%0Aclass Solution(object):%0A def averageOfLevels(self, root):%0A %22%22%22%0A :type root: TreeNode%0A :rtype: List%5Bfloat%5D%0A %22%22%22%0A result = %5B%5D%0A q = collections.deque(%5Broot%5D)%0A while q:%0A total, count = 0, 0%0A next_q = collections.deque(%5B%5D)%0A while q:%0A n = q.popleft()%0A total += n.val;%0A count += 1%0A if n.left:%0A next_q.append(n.left)%0A if n.right:%0A next_q.append(n.right)%0A q, next_q = next_q, q%0A result.append(float(total) / count)%0A return result%0A
ee076055f11638b8711658972dda8c4d4b40f666
Enforce max length on project name (#3982)
src/sentry/web/forms/add_project.py
src/sentry/web/forms/add_project.py
from __future__ import absolute_import from django import forms from django.utils.translation import ugettext_lazy as _ from sentry.models import AuditLogEntry, AuditLogEntryEvent, Project from sentry.signals import project_created from sentry.utils.samples import create_sample_event BLANK_CHOICE = [("", "")] class AddProjectForm(forms.ModelForm): name = forms.CharField(label=_('Name'), max_length=200, widget=forms.TextInput(attrs={ 'placeholder': _('i.e. API, Frontend, My Application Name'), }), help_text=_('Using the repository name generally works well.'), ) class Meta: fields = ('name',) model = Project def __init__(self, organization, *args, **kwargs): forms.ModelForm.__init__(self, *args, **kwargs) self.organization = organization def save(self, actor, team, ip_address): project = super(AddProjectForm, self).save(commit=False) project.team = team project.organization = team.organization project.save() AuditLogEntry.objects.create( organization=project.organization, actor=actor, ip_address=ip_address, target_object=project.id, event=AuditLogEntryEvent.PROJECT_ADD, data=project.get_audit_log_data(), ) project_created.send(project=project, user=actor, sender=self) create_sample_event(project, platform='javascript') return project
Python
0
@@ -408,11 +408,10 @@ gth= -200 +64 ,%0A
96f224a6b80720a88fefc8530aea113f975ef110
Add new layout window command
new_layout.py
new_layout.py
Python
0
@@ -0,0 +1,305 @@ +import sublime, sublime_plugin%0A%0Aclass NewLayoutCommand(sublime_plugin.TextCommand):%0A def run(self, edit, **args):%0A self.view.window().run_command(%22set_layout%22, args)%0A self.view.window().run_command(%22focus_group%22, %7B %22group%22: 0 %7D)%0A self.view.window().run_command(%22move_to_group%22, %7B %22group%22: 1 %7D )%0A
62ff128888bce33cf87e083a921ddac65a2f1879
Add regression test for #3951
spacy/tests/regression/test_issue3951.py
spacy/tests/regression/test_issue3951.py
Python
0.000001
@@ -0,0 +1,585 @@ +# coding: utf8%0Afrom __future__ import unicode_literals%0A%0Aimport pytest%0Afrom spacy.matcher import Matcher%0Afrom spacy.tokens import Doc%0A%0A%[email protected]%0Adef test_issue3951(en_vocab):%0A %22%22%22Test that combinations of optional rules are matched correctly.%22%22%22%0A matcher = Matcher(en_vocab)%0A pattern = %5B%0A %7B%22LOWER%22: %22hello%22%7D,%0A %7B%22LOWER%22: %22this%22, %22OP%22: %22?%22%7D,%0A %7B%22OP%22: %22?%22%7D,%0A %7B%22LOWER%22: %22world%22%7D,%0A %5D%0A matcher.add(%22TEST%22, None, pattern)%0A doc = Doc(en_vocab, words=%5B%22Hello%22, %22my%22, %22new%22, %22world%22%5D)%0A matches = matcher(doc)%0A assert len(matches) == 0%0A
8436253648c67205de23db8797c9fcc7c2172b3e
add the actual test
test/test_slice.py
test/test_slice.py
Python
0.001366
@@ -0,0 +1,310 @@ +# -*- Mode: Python -*-%0A# vi:si:et:sw=4:sts=4:ts=4%0A%0A'''%0ATests related to slices.%0A'''%0A%0Aimport unittest%0Aimport common%0A%0Aclass SliceTestCase:#(common.TestCase):%0A '''%0A test that slices work.%0A '''%0A def test_slice(self):%0A self.check('test_slice')%0A %0Aif __name__ == '__main__':%0A unittest.main()%0A
8ca0e88b7df79461f401e7c46c822f16223ddd0b
Create solution.py
hackerrank/algorithms/implementation/easy/between_two_sets/py/solution.py
hackerrank/algorithms/implementation/easy/between_two_sets/py/solution.py
Python
0.000018
@@ -0,0 +1,671 @@ +#!/bin/python3%0A%0Aimport sys%0A%0A# Hackerrank Python3 environment does not provide math.gcd%0A# as of the time of writing. We define it ourselves.%0Adef gcd(n, m):%0A while m %3E 0:%0A n, m = m, n %25 m%0A return n%0A%0Adef lcm(x, y):%0A return (x * y) // gcd(x, y)%0A%0Adef between(s1, s2):%0A import functools%0A%0A cd = functools.reduce(gcd, s2)%0A cm = functools.reduce(lcm, s1)%0A%0A return tuple(x for x in range(cm, cd + 1) if cd %25 x == 0 and x %25 cm == 0)%0A%0An, m = input().strip().split(' ')%0An, m = %5Bint(n),int(m)%5D%0Aa = %5Bint(a_temp) for a_temp in input().strip().split(' ')%5D%0Ab = %5Bint(b_temp) for b_temp in input().strip().split(' ')%5D%0A%0Abtw = between(a, b)%0A%0Aprint(len(btw))%0A
325465d18e963400b427f259547d4292a47368c9
Use Django nose for tests.
oneflow/settings/snippets/common_development.py
oneflow/settings/snippets/common_development.py
# # Include your development machines hostnames here. # # NOTE: this is not strictly needed, as Django doesn't enforce # the check if DEBUG==True. But Just in case you wanted to disable # it temporarily, this could be a good thing to have your hostname # here. # # If you connect via http://localhost:8000/, everything is already OK. TEMPLATE_CONTEXT_PROCESSORS += ( 'django.core.context_processors.debug', ) ALLOWED_HOSTS += [ 'localhost', 'chani.licorn.org', 'leto.licorn.org', 'gurney.licorn.org' ]
Python
0
@@ -518,8 +518,95 @@ .org'%0A%5D%0A +%0AINSTALLED_APPS += ('django_nose', )%0A%0ATEST_RUNNER = 'django_nose.NoseTestSuiteRunner'%0A%0A
3e9289f142efd0769beff97cddfcbcbede40f85a
add a half written Qkkk
pacfiles/Qkkk.py
pacfiles/Qkkk.py
Python
0.999936
@@ -0,0 +1,1231 @@ +#!/usr/bin/env python3%0Aimport pyalpm%0Aimport pycman%0Aimport tarfile%0Aimport sys, os, os.path%0A%0Apacmanconf = pycman.config.init_with_config(%22/etc/pacman.conf%22)%0Arootdir = pacmanconf.rootdir%0A%0Adef local_database():%0A%09handle = pacmanconf%0A%09localdb = handle.get_localdb()%0A%09packages = localdb.pkgcache%0A%09syncdbs = handle.get_syncdbs()%0A%09db = dict()%0A%09for pkg in packages:%0A%09%09for syncdb in syncdbs:%0A%09%09%09if syncdb.get_pkg(pkg.name) is not None:%0A%09%09%09%09db%5Bpkg.name%5D = syncdb.get_pkg(pkg.name)%0A%09return db%0A%0Adef get_pkgfiles(package):%0A%09db = local_database()%0A%09pkg = db%5Bpackage%5D.filename%0A%09result = %5B%5D%0A%09for d in pacmanconf.cachedirs:%0A%09%09p = os.path.join(d, pkg)%0A%09%09if os.path.exists(p):%0A%09%09%09result.append(p)%0A%09return result%0A%0Adef error_file(file, pkgfile, pkgname):%0A%09print(f'%22%7B%7Bfile%7D%7D%22 in %7B%7Bpkgfile%7D%7D of %7B%7Bpkgname%7D%7D mismatch')%0A%0Adef check_pkgfile(pkgname, pkgfile):%0A%09with tarfile.open(pkgfile) as tar:%0A%09%09for fn in tar:%0A%09%09%09fnpath = os.path.join(rootdir, fn.name)%0A%09%09%09if fn.isdir():%0A%09%09%09%09if not os.path.isdir(fnpath):%0A%09%09%09%09%09error_file(fnpath, pkgfile, pkgname)%0A%09%09%09# else if fn.issym():%0A%09%09%09# %09if not os.path.issym(fnpath):%0A%0Adef main():%0A%09for pkgname in sys.args:%0A%09%09for pkgfile in get_pkgfiles(pkgname):%0A%09%09%09check_pkgfile(pkgname, pkgfile)%0A%0Aif __name__ == '__main__':%0A%09main()%0A
701acbccc764101e00eef35dfff81dda5c5437a3
Create pages_in_dict.py
pages_in_dict.py
pages_in_dict.py
Python
0.000001
@@ -0,0 +1,525 @@ +import codecs%0Aimport os%0Aimport re%0A%0Aletters = %5B%5D%0Ano_letters = %5B%5D%0Anumber_of = %7B%7D%0A%0Apages = os.listdir(%22.%22)%0A%0A%0Afor page in pages:%0A if page.endswith('.html'):%0A if page%5B0:3%5D not in letters:%0A letters.append(page%5B0:3%5D)%0A f = codecs.open(page, 'r', 'utf-8-sig')%0A text = f.read()%0A #n = re.findall('Page %5B0-9%5D*? of %5B0-9%5D%5B0-9%5D', text, flags=re.U)%0A #number_of%5Bpage%5B0:3%5D%5D = n%5B-1:-2%5D%0A%0Afor i in range(161, 206):%0A if str(i) not in letters:%0A no_letters.append(str(i))%0A%0Aprint no_letters%0A
39bf0b2ab6f89cfe3450102699a5bbeaf235011a
Create 4.py
4.py
4.py
Python
0.000001
@@ -0,0 +1,1296 @@ +#!/usr/bin/env python%0A%0AMAX_TRI = 999999L%0A%0Atriangles = %5B%5D%0A%0Adef next_pos(mn, pos):%0A if mn %3E triangles%5BMAX_TRI - 1%5D:%0A return -1%0A else:%0A maxv = MAX_TRI - 1%0A minv = 0%0A mid = minv + (maxv - minv) / 2%0A while triangles%5Bmid%5D != mn and minv %3C maxv:%0A if triangles%5Bmid%5D %3C mn :%0A minv = mid + 1%0A else :%0A maxv = mid - 1%0A mid = minv + (maxv - minv) / 2%0A return mid%0A%0Adef gen_triangles(offset):%0A triangles%5B:%5D = %5B%5D%0A i = 1L + offset * MAX_TRI%0A bound = i + MAX_TRI%0A print %22Generating %25i through %25i %22 %25 (i, bound)%0A while i %3C= bound:%0A triangles.append((i * (i + 1L)) / 2L)%0A i += 1L%0A print %22Max value = %25i %22 %25 (triangles%5BMAX_TRI - 1%5D)%0A%0Adef pe321():%0A offset = 0L%0A #total = 0L%0A #count = 0L%0A #pos = 0L%0A n = 1L%0A #mn = 0L%0A%0A gen_triangles(offset)%0A offset = total = count = mn = 0L%0A n = 1L%0A while count %3C 41:%0A mn = 2L * n + n * n%0A while mn %25 3 != 0 and mn %25 9 != 1:%0A n += 1L%0A mn = 2L * n + n * n%0A pos = next_pos(mn, pos)%0A if pos == -1 :%0A offset += 2L%0A gen_triangles(offset)%0A pos = 0L%0A if mn == triangles%5Bpos%5D:%0A count += 1L%0A total += n%0A print %22M(%25i) = %25i is triangular%22 %25 (n, mn)%0A n += 1L;%0A else:%0A n += 1L%0A print %22The sum of the first %25i terms = %25i%22 %25 (count, total)%0A%0Ape321()%0A
ac357bc1ccefe55e25bb34021772301726ceec0e
Complete P4
Quiz/Problem4_defMyLog.py
Quiz/Problem4_defMyLog.py
Python
0.000001
@@ -0,0 +1,237 @@ +def myLog(x, b):%0A '''%0A x: a positive integer%0A b: a positive integer; b %3E= 2%0A%0A returns: log_b(x), or, the logarithm of x relative to a base b.%0A '''%0A if x %3C b:%0A return 0%0A else:%0A return 1 + myLog(x / b, b)
39d2813153d422ac22442eedb9f185d04203d848
add some docstrings
pdfkit/pdfkit.py
pdfkit/pdfkit.py
# -*- coding: utf-8 -*- import re import subprocess import sys from .source import Source from .configuration import Configuration from itertools import chain import io import codecs class PDFKit(object): """ url_or_file: str - either a URL, a path to a file or a string containing HTML to convert type_: str - either 'url', 'file' or 'string' options: dict (optional) with wkhtmltopdf options, with or w/o '--' toc: dict (optional) - toc-specific wkhtmltopdf options, with or w/o '--' cover: str (optional) - url/filename with a cover html page css: str (optional) - path to css file which will be added to input string configuration: (optional) instance of pdfkit.configuration.Configuration() """ class ImproperSourceError(Exception): """Wrong source type for stylesheets""" def __init__(self, msg): self.msg = msg def __str__(self): return self.msg def __init__(self, url_or_file, type_, options=None, toc=None, cover=None, css=None, configuration=None): self.source = Source(url_or_file, type_) self.configuration = Configuration() if configuration is None \ else configuration self.wkhtmltopdf = self.configuration.wkhtmltopdf.decode('utf-8') self.options = dict() if self.source.isString(): self.options.update(self._find_options_in_meta(url_or_file)) if options is not None: self.options.update(options) self.options = self._normalize_options(self.options) toc = {} if toc is None else toc self.toc = self._normalize_options(toc) self.cover = cover self.css = css self.stylesheets = [] def command(self, path=None): if self.css: self._prepend_css(self.css) args = [self.wkhtmltopdf] args += list(chain.from_iterable(list(self.options.items()))) args = [_f for _f in args if _f] if self.toc: args.append('toc') args += list(chain.from_iterable(list(self.toc.items()))) if self.cover: args.append('cover') args.append(self.cover) if self.source.isString(): args.append('-') else: if isinstance(self.source.source, str): args.append(self.source.to_s()) else: args += self.source.source if path: args.append(path) else: args.append('-') #args = map(lambda x: '"%s"' % x, args) return args def to_pdf(self, path=None): args = self.command(path) result = subprocess.Popen(args, stdin=subprocess.PIPE, stdout=subprocess.PIPE) if self.source.isString(): result.communicate(input=self.source.to_s().encode('utf-8')) elif self.source.isFile() and self.css: result.communicate(input=self.source.to_s().encode('utf-8')) # capture output of wkhtmltopdf and pass it to stdout (can be # seen only when running from console ) if '--quiet' not in args: while True: if result.poll() is not None: break out = result.stdout.read(1).decode('utf-8') if out != '': sys.stdout.write(out) sys.stdout.flush() if path: try: with codecs.open(path, encoding='utf-8') as f: # read 4 bytes to get PDF signature '%PDF' text = f.read(4) if text == '': raise IOError('Command failed: %s\n' 'Check whhtmltopdf output without \'quiet\' ' 'option' % ' '.join(args)) return text except IOError: raise IOError('Command failed: %s\n' 'Check whhtmltopdf output without \'quiet\' option' % ' '.join(args)) def _normalize_options(self, options): normalized_options = {} for key, value in list(options.items()): if not '--' in key: normalized_key = '--%s' % self._normalize_arg(key) else: normalized_key = self._normalize_arg(key) normalized_options[normalized_key] = str(value) if value else value return normalized_options def _normalize_arg(self, arg): return arg.lower() def _style_tag_for(self, stylesheet): return "<style>%s</style>" % stylesheet def _prepend_css(self, path): if self.source.isUrl() or isinstance(self.source.source, list): raise self.ImproperSourceError('CSS file can be added only to a single ' 'file or string') with open(path) as f: css_data = f.read() if self.source.isFile(): with open(self.source.to_s()) as f: inp = f.read() self.source = Source( inp.replace('</head>', self._style_tag_for(css_data) + '</head>'), 'string') elif self.source.isString(): if '</head>' in self.source.to_s(): self.source.source = self.source.to_s().replace( '</head>', self._style_tag_for(css_data) + '</head>') else: self.source.source = self._style_tag_for(css_data) + self.source.to_s() def _find_options_in_meta(self, content): if isinstance(content, io.IOBase) \ or content.__class__.__name__ == 'StreamReaderWriter': content = content.read() found = {} for x in re.findall('<meta [^>]*>', content): if re.search('name=["\']%s' % self.configuration.meta_tag_prefix, x): name = re.findall('name=["\']%s([^"\']*)' % self.configuration.meta_tag_prefix, x)[0] found[name] = re.findall('content=["\']([^"\']*)', x)[0] return found
Python
0.000022
@@ -4175,16 +4175,340 @@ tions):%0A + %22%22%22Updates a dict of config options to make then usable on command line%0A%0A options: dict %7Boption name: value%7D%0A%0A returns:%0A dict: %7Boption name: value%7D - option names lower cased and prepended with%0A '--' if necessary. Non-empty values cast to str%0A %22%22%22%0A @@ -5967,32 +5967,242 @@ self, content):%0A + %22%22%22Reads 'content' and extracts options encoded in HTML meta tags%0A%0A content: str or file-like object - contains HTML to parse%0A%0A returns:%0A dict: %7Bconfig option: value%7D%0A %22%22%22%0A if isins
51feabbc27821c5acb7f0ceb932d19c0d79f16d1
test ssl version check functions as expected in python 2.6
tests/test_help.py
tests/test_help.py
Python
0
@@ -0,0 +1,590 @@ +# -*- encoding: utf-8%0A%0Aimport sys%0A%0Aimport pytest%0A%0Afrom requests.help import info%0A%0A%[email protected](sys.version_info%5B:2%5D != (2,6), reason=%22Only run on Python 2.6%22)%0Adef test_system_ssl_py26():%0A %22%22%22OPENSSL_VERSION_NUMBER isn't provided in Python 2.6, verify we don't%0A blow up in this case.%0A %22%22%22%0A assert info()%5B'system_ssl'%5D == %7B'version': ''%7D%0A%0A%[email protected](sys.version_info %3C (2,7), reason=%22Only run on Python 2.7+%22)%0Adef test_system_ssl():%0A %22%22%22Verify we're actually setting system_ssl when it should be available.%22%22%22%0A assert info()%5B'system_ssl'%5D%5B'version'%5D != ''%0A
63804c534f23ffbe16ff539087048d99f9fcaf17
Implement test_encoder_decoder
test_encoder_decoder.py
test_encoder_decoder.py
Python
0.003162
@@ -0,0 +1,2468 @@ +#! /usr/bin/env python%0A# coding:utf-8%0A%0A%0Aif __name__ == %22__main__%22:%0A%0A import sys%0A import argparse%0A from seq2seq import decode%0A from util import load_dictionary%0A import configparser%0A import os%0A from chainer import serializers%0A%0A # GPU config%0A parser = argparse.ArgumentParser()%0A parser.add_argument('config_file', metavar='config_file', type=str,%0A help='config file')%0A parser.add_argument('--gpu', '-g', default=-1, type=int,%0A help='GPU ID (negative value indicates CPU)')%0A parser.add_argument('--type', '-t', default=%22relu%22, type=str,%0A help='GPU ID (negative value indicates CPU)')%0A args = parser.parse_args()%0A gpu_flag = True if args.gpu %3E= 0 else False%0A%0A config_file = args.config_file%0A parser_config = configparser.ConfigParser()%0A parser_config.read(config_file)%0A config = parser_config%5B%22CONFIG%22%5D%0A # config%5B%22SEPARATOR%22%5D = bytes(%0A # config%5B%22DEFAULT%22%5D%5B%22SEPARATOR%22%5D, %22utf-8%22%0A # ).decode(%22unicode_escape%22)%0A%0A # params%0A model_dir = config%5B%22model_dir%22%5D%0A n_units = int(config%5B%22n_units%22%5D)%0A%0A # load conversation sentences%0A dictionary = load_dictionary(config%5B%22dict_file%22%5D)%0A%0A # Prepare encoder RNN model%0A dim = len(dictionary.keys())%0A model_type = args.type%0A if model_type == %22relu%22:%0A import relu_rnn%0A model = relu_rnn.Classifier(%0A relu_rnn.ReLURNN(%0A embed_dim=dim,%0A n_units=int(config%5B%22n_units%22%5D),%0A gpu=args.gpu%0A )%0A )%0A elif model_type == %22lstm%22:%0A import lstm%0A model = lstm.Classifier(%0A lstm.LSTM(%0A embed_dim=dim,%0A n_units=int(config%5B%22n_units%22%5D),%0A gpu=args.gpu%0A )%0A )%0A else:%0A raise Exception(%22model argment should be relu or lstm%22)%0A%0A # load model%0A%0A init_model_name = os.path.join(%0A model_dir,%0A %22model.npz%22%0A )%0A if os.path.exists(init_model_name):%0A serializers.load_npz(init_model_name, model)%0A print(%22load model %7B%7D%22.format(init_model_name))%0A else:%0A raise Exception(%22learn model first%22)%0A%0A for text in (_.strip() for _ in sys.stdin):%0A ws = text.split()%0A print(ws)%0A decoded_words = decode(%0A ws,%0A model,%0A model,%0A dictionary,%0A )%0A%0A answer_text = %22%22.join(decoded_words%5B1:-1%5D)%0A print(answer_text)%0A
8eeb4c2db613c1354c38696ac6691cf79f66a383
Add spider for Brookdale Senior Living
locations/spiders/brookdale.py
locations/spiders/brookdale.py
Python
0
@@ -0,0 +1,1610 @@ +# -*- coding: utf-8 -*-%0Aimport scrapy%0Aimport json%0Afrom locations.items import GeojsonPointItem%0A%0AURL = 'https://www.brookdale.com/bin/brookdale/community-search?care_type_category=resident&loc=&finrpt=&state='%0A%0AUS_STATES = (%0A %22AL%22, %22AZ%22, %22AR%22, %22CA%22, %22CO%22, %22CT%22, %22DE%22, %22FL%22, %22GA%22,%0A %22ID%22, %22IL%22, %22IN%22, %22IA%22, %22KS%22, %22KY%22, %22LA%22, %22MD%22,%0A %22MA%22, %22MI%22, %22MN%22, %22MS%22, %22MO%22, %22MT%22, %22NE%22, %22NV%22, %22NH%22, %22NJ%22,%0A %22NM%22, %22NY%22, %22NC%22, %22OH%22, %22OK%22, %22OR%22, %22PA%22, %22RI%22, %22SC%22,%0A %22TN%22, %22TX%22, %22UT%22, %22VT%22, %22VA%22, %22WA%22, %22WV%22, %22WI%22, %22WY%22,%0A)%0A%0Aclass TemplateSpider(scrapy.Spider):%0A name = %22brookdale%22%0A allowed_domains = %5B%22www.brookdale.com%22%5D%0A%0A def start_requests(self):%0A for state in US_STATES:%0A url = ''.join(%5BURL, state%5D)%0A yield scrapy.Request(url, callback=self.parse_info)%0A%0A%0A def parse_info(self, response):%0A%0A data = json.loads(response.body_as_unicode())%0A%0A i = 0%0A while i %3C len(data):%0A%0A print(data%5Bi%5D%5B'name'%5D)%0A properties = %7B%0A %22ref%22: data%5Bi%5D%5B'community_id'%5D,%0A %22name%22: data%5Bi%5D%5B'name'%5D,%0A %22lat%22: data%5Bi%5D%5B'latitude'%5D,%0A %22lon%22: data%5Bi%5D%5B'longitude'%5D,%0A %22addr_full%22: data%5Bi%5D%5B'address1'%5D,%0A %22city%22: data%5Bi%5D%5B'city'%5D,%0A %22state%22: data%5Bi%5D%5B'state'%5D,%0A %22country%22: data%5Bi%5D%5B'country_code'%5D,%0A %22postcode%22: data%5Bi%5D%5B'zip_postal_code'%5D,%0A %22website%22: data%5Bi%5D%5B'website'%5D,%0A %22phone%22: data%5Bi%5D%5B'contact_center_phone'%5D,%0A %7D%0A%0A yield GeojsonPointItem(**properties)%0A i += 1
47893c708f3b63f79a01d5ee927f4c7d3f6dff27
Create script to delete untitled and unpublished projects
akvo/rsr/management/commands/delete_untitled_and_unpublished_projects.py
akvo/rsr/management/commands/delete_untitled_and_unpublished_projects.py
Python
0
@@ -0,0 +1,2442 @@ +# -*- coding: utf-8 -*-%0A%0A# Akvo Reporting is covered by the GNU Affero General Public License.%0A# See more details in the license.txt file located at the root folder of the Akvo RSR module.%0A# For additional details on the GNU license please see %3C http://www.gnu.org/licenses/agpl.html %3E.%0A%0Aimport datetime%0A%0Afrom tablib import Dataset%0Afrom django.core.management.base import BaseCommand%0Afrom django.db.models import Q%0Afrom akvo.rsr.models import Project, PublishingStatus, IndicatorPeriodData, Result, IatiActivityImport%0A%0A%0Aclass Command(BaseCommand):%0A help = %22%22%22%5C%0A Delete all Untitled and Unpublished projects created before the given date%0A %3Cscript%3E %3Cdate:%25Y-%25m-%25d%3E --delete%0A %22%22%22%0A%0A def add_arguments(self, parser):%0A parser.add_argument('date', type=lambda date: datetime.datetime.strptime(date, '%25Y-%25m-%25d').date())%0A parser.add_argument('--delete', action='store_true', help='Actually delete projects')%0A%0A def handle(self, *args, **options):%0A the_date = options%5B'date'%5D%0A projects = Project.objects%5C%0A .filter(created_at__lt=the_date)%5C%0A .filter(Q(title__exact='') %7C Q(publishingstatus__status=PublishingStatus.STATUS_UNPUBLISHED))%0A project_ids = projects.values_list('id', flat=True)%0A if options%5B'delete'%5D:%0A updates = IndicatorPeriodData.objects.filter(period__indicator__result__project__in=project_ids)%0A print(f%22Deleting %7Bupdates.count()%7D period updates%22)%0A updates.delete()%0A iati_import = IatiActivityImport.objects.filter(project__in=project_ids)%0A print(f%22Deleting %7Biati_import.count()%7D iati activity import%22)%0A iati_import.delete()%0A results = Result.objects.filter(project__in=project_ids)%0A print(f%22Deleting %7Bresults.count()%7D results%22)%0A results.delete()%0A print(f%22Deleting %7Bprojects.count()%7D projects)%22)%0A projects.delete()%0A else:%0A data = Dataset()%0A data.headers = %5B%0A 'project_id',%0A 'project_title',%0A 'is_published',%0A 'created_at'%0A %5D%0A for p in projects:%0A data.append(%5B%0A p.id,%0A p.title,%0A p.is_published(),%0A p.created_at%0A %5D)%0A print(data.export('csv'))%0A print(f'Found %7Bprojects.count()%7D projects to delete.')%0A
03c837b0da9d7f7a6c6c54286631e9a403da3e60
Add network scan python script - Closes #7
backend/net_scan.py
backend/net_scan.py
Python
0
@@ -0,0 +1,1254 @@ +#!/usr/bin/python%0A%0Aimport sys, getopt, nmap%0A%0Adef usage():%0A print 'sword_nmap.py -t %3Ctarget%3E -p %3Cport range%3E'%0A%0Adef main(argv):%0A target=''%0A port_range=''%0A try:%0A opts, args = getopt.getopt(argv,'ht:p:',%5B'target=','ports='%5D)%0A except getopt.GetoptError:%0A usage()%0A sys.exit(2)%0A for opt, arg in opts:%0A if opt == '-h':%0A usage()%0A sys.exit()%0A elif opt in ('-t', '--target'):%0A target = arg%0A elif opt in ('-p', '--ports'):%0A port_range = arg%0A%0A if target == '':%0A usage()%0A sys.exit(1)%0A if port_range == '':%0A usage()%0A sys.exit(1)%0A%0A scan(target, port_range)%0A%0Adef scan (target, port_range):%0A print ('Scanning %25s %25s' %25(target, port_range))%0A nm = nmap.PortScanner()%0A nm.scan(target, port_range)%0A nm.command_line()%0A%0A for host in nm.all_hosts():%0A print('Host : %25s (%25s): %25s' %25 (host, nm%5Bhost%5D.hostname(), nm%5Bhost%5D.state()))%0A%0A for proto in nm%5Bhost%5D.all_protocols():%0A lport = nm%5Bhost%5D%5Bproto%5D.keys()%0A lport.sort()%0A%0A for port in lport:%0A print ('%5Ct%25s port %25s %25s' %25 (proto, port, nm%5Bhost%5D%5Bproto%5D%5Bport%5D%5B'state'%5D))%0A%0Aif __name__ == '__main__':%0A main(sys.argv%5B1:%5D)%0A%0A
4699c1c301f1cb99f6c9e616b31769c01bc291d5
change datafiles in v1.* to make it work in v2.*
v1_to_v2.py
v1_to_v2.py
Python
0.000001
@@ -0,0 +1,369 @@ +#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0Aimport optparse, pickle, exam%0A%0Adef main():%0A opt = optparse.OptionParser()%0A (options, args) = opt.parse_args()%0A%0A for i in args:%0A with open(i,'rb') as f: data = pickle.load(f)%0A data = exam.QuestForm(data)%0A with open('v3.'.join(i.split('.')),'wb') as f: pickle.dump(data,f)%0A return%0A%0A%0Amain()%0A
0403d6f78189be3f3b22f068dad1db0c53687ef7
Add ptch module and base PatchFile class. This class can unpack RLE-compressed patchfiles.
ptch/__init__.py
ptch/__init__.py
Python
0
@@ -0,0 +1,1614 @@ +# -*- coding: utf-8 -*-%0A%22%22%22%0APTCH files are a container format for Blizzard patch files.%0AThey begin with a 72 byte header containing some metadata, immediately%0Afollowed by a RLE-packed BSDIFF40.%0AThe original BSDIFF40 format is compressed with bzip2 instead of RLE.%0A%22%22%22%0A%0A#from hashlib import md5%0Afrom struct import unpack%0Afrom binascii import hexlify%0Afrom cStringIO import StringIO%0A%0A%0Aclass PatchFile(object):%0A%09def __init__(self, file):%0A%09%09# Parse the header%0A%09%09file.seek(0)%0A%09%09assert file.read(4) == %22PTCH%22%0A%09%09unk1 = file.read(4)%0A%09%09self.sizeBefore, self.sizeAfter = unpack(%22ii%22, file.read(8))%0A%09%09assert file.read(4) == %22MD5_%22%0A%09%09assert unpack(%22i%22, file.read(4)) == (0x28, )%0A%09%09self.md5Before, self.md5After = unpack(%2216s16s%22, file.read(32))%0A%09%09self.md5Before, self.md5After = hexlify(self.md5Before), hexlify(self.md5After)%0A%09%09assert file.read(4) == %22XFRM%22%0A%09%09file.read(4)%0A%09%09assert file.read(4) == %22BSD0%22%0A%09%09self.fileSize, = unpack(%22i%22, file.read(4))%0A%09%09%0A%09%09self.compressedDiff = file.read()%0A%09%09%0A%09%09file.close()%0A%09%0A%09def __repr__(self):%0A%09%09header = (%22sizeBefore%22, %22sizeAfter%22, %22md5Before%22, %22md5After%22, %22fileSize%22)%0A%09%09return %22%25s(%25s)%22 %25 (self.__class__.__name__, %22, %22.join(%22%25s=%25r%22 %25 (k, getattr(self, k)) for k in header))%0A%09%0A%09def rleUnpack(self):%0A%09%09%22%22%22%0A%09%09Read the RLE-packed data and%0A%09%09return the unpacked output.%0A%09%09%22%22%22%0A%09%09data = StringIO(self.compressedDiff)%0A%09%09ret = %5B%5D%0A%09%09%0A%09%09byte = data.read(1)%0A%09%09while byte:%0A%09%09%09byte = ord(byte)%0A%09%09%09# Is it a repeat control?%0A%09%09%09if byte & 0x80:%0A%09%09%09%09count = (byte & 0x7F) + 1%0A%09%09%09%09ret.append(data.read(count))%0A%09%09%09%0A%09%09%09else:%0A%09%09%09%09ret.append(%22%5C0%22 * (byte+1))%0A%09%09%09%0A%09%09%09byte = data.read(1)%0A%09%09%0A%09%09return %22%22.join(ret)%0A
8533c93505a733980406ce655372c7742dfcfdfc
Add update policy that allows for in place upgrade of ES cluster (#1537)
troposphere/policies.py
troposphere/policies.py
from . import AWSProperty, AWSAttribute, validate_pausetime from .validators import positive_integer, integer, boolean class AutoScalingRollingUpdate(AWSProperty): props = { 'MaxBatchSize': (positive_integer, False), 'MinInstancesInService': (integer, False), 'MinSuccessfulInstancesPercent': (integer, False), 'PauseTime': (validate_pausetime, False), 'SuspendProcesses': ([basestring], False), 'WaitOnResourceSignals': (boolean, False), } class AutoScalingScheduledAction(AWSProperty): props = { 'IgnoreUnmodifiedGroupSizeProperties': (boolean, False), } class AutoScalingReplacingUpdate(AWSProperty): props = { 'WillReplace': (boolean, False), } class CodeDeployLambdaAliasUpdate(AWSProperty): props = { 'AfterAllowTrafficHook': (basestring, False), 'ApplicationName': (boolean, True), 'BeforeAllowTrafficHook': (basestring, False), 'DeploymentGroupName': (boolean, True), } class UpdatePolicy(AWSAttribute): props = { 'AutoScalingRollingUpdate': (AutoScalingRollingUpdate, False), 'AutoScalingScheduledAction': (AutoScalingScheduledAction, False), 'AutoScalingReplacingUpdate': (AutoScalingReplacingUpdate, False), 'CodeDeployLambdaAliasUpdate': (CodeDeployLambdaAliasUpdate, False), 'UseOnlineResharding': (boolean, False), } class ResourceSignal(AWSProperty): props = { 'Count': (positive_integer, False), 'Timeout': (validate_pausetime, False), } class AutoScalingCreationPolicy(AWSProperty): props = { 'MinSuccessfulInstancesPercent': (integer, False), } class CreationPolicy(AWSAttribute): props = { 'AutoScalingCreationPolicy': (AutoScalingCreationPolicy, False), 'ResourceSignal': (ResourceSignal, True), }
Python
0
@@ -1396,32 +1396,82 @@ oolean, False),%0A + 'EnableVersionUpgrade': (boolean, False),%0A %7D%0A%0A%0Aclass Re