commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
295823afe17cedaa1934afbcd19d955974089c63
|
Add producer written in Python
|
python/send.py
|
python/send.py
|
Python
| 0.000005 |
@@ -0,0 +1,679 @@
+#!/usr/bin/env python%0Aimport pika%0A%0A# Host in which RabbitMQ is running.%0AHOST = 'localhost'%0A%0A# Name of the queue.%0AQUEUE = 'pages'%0A%0A# The message to send.%0AMESSAGE = 'Hi there! This is a test message =)'%0A%0A# Getting the connection using pika.%0A# Creating the channel.%0A# Declaring the queue.%0Aconnection = pika.BlockingConnection(pika.ConnectionParameters(HOST))%0Achannel = connection.channel()%0Achannel.queue_declare(queue=QUEUE)%0A%0A# Sends the 'MESSAGE' to the queue.%0A# Default empty 'exchange' with 'routing_key' equal to the queue name%0A# will route the message to that queue.%0Achannel.publish(exchange='', routing_key=QUEUE, body=MESSAGE)%0A%0A# The connection is closed.%0Aconnection.close()%0A
|
|
04c8b38ac43c84abe64858cfd22a721e803b87eb
|
add mocked tests for internal /run folder
|
tests/core/test_run_files.py
|
tests/core/test_run_files.py
|
Python
| 0 |
@@ -0,0 +1,2322 @@
+# stdlib%0Aimport os%0Aimport shlex%0Aimport signal%0Aimport subprocess%0Aimport time%0Aimport unittest%0A%0A# 3p%0Aimport mock%0Afrom nose.plugins.attrib import attr%0A%0A# Mock gettempdir for testing%0Aimport tempfile; tempfile.gettempdir = mock.Mock(return_value='/a/test/tmp/dir')%0A%0A# project%0A# Mock _windows_commondata_path for testing%0Aimport config; config._windows_commondata_path = mock.Mock(return_value='./windows_commondata')%0A%0A%0Afrom utils.pidfile import PidFile%0Afrom checks.check_status import AgentStatus%0A%0Aclass TestRunFiles(unittest.TestCase):%0A %22%22%22 Tests that runfiles (.pid, .sock, .pickle etc.) are written to internal agent folders%22%22%22%0A%0A # Mac run directory expected location%0A _my_dir = os.path.dirname(os.path.abspath(__file__))%0A _mac_run_dir = '/'.join(_my_dir.split('/')%5B:-4%5D)%0A%0A def setUp(self):%0A self.agent_daemon = None%0A%0A def tearDown(self):%0A if self.agent_daemon:%0A args = shlex.split('python agent.py stop')%0A subprocess.Popen(args).communicate()%0A%0A @mock.patch('utils.platform.Platform.is_win32', return_value=True)%0A def test_agent_status_pickle_file_win32(self, *mocks):%0A ''' Test pickle file location on win32 '''%0A expected_path = os.path.join('.', 'windows_commondata', 'Datadog', 'AgentStatus.pickle')%0A # check AgentStatus pickle created%0A self.assertEqual(AgentStatus._get_pickle_path(), expected_path)%0A%0A @mock.patch('utils.pidfile.PidFile.get_dir', return_value=_mac_run_dir)%0A @mock.patch('utils.platform.Platform.is_win32', return_value=False)%0A @mock.patch('utils.platform.Platform.is_mac', return_value=True)%0A def test_agent_status_pickle_file_mac_dmg(self, *mocks):%0A ''' Test pickle file location when running a Mac DMG install '''%0A expected_path = os.path.join(self._mac_run_dir, 'AgentStatus.pickle')%0A self.assertEqual(AgentStatus._get_pickle_path(), expected_path)%0A%0A @mock.patch('utils.platform.Platform.is_win32', return_value=False)%0A @mock.patch('utils.platform.Platform.is_mac', return_value=True)%0A def test_agent_status_pickle_file_mac_source(self, *mocks):%0A ''' Test pickle file location when running a Mac source install '''%0A expected_path = os.path.join('/a/test/tmp/dir', 'AgentStatus.pickle')%0A self.assertEqual(AgentStatus._get_pickle_path(), expected_path)%0A
|
|
cf9299aad62828f1cd116403076b2a6b086721d8
|
add meta utilities
|
flask_ember/util/meta.py
|
flask_ember/util/meta.py
|
Python
| 0.000001 |
@@ -0,0 +1,477 @@
+import inspect%0A%0A%0Adef get_class_fields(klass, predicate=None):%0A return %5B(name, field) for name, field in klass.__dict__.items()%0A if (predicate(name, field) if predicate else True)%5D%0A%0A%0Adef get_fields(klass, predicate=None):%0A fields = list()%0A for base in inspect.getmro(klass)%5B::-1%5D:%0A fields.extend(get_class_fields(base, predicate))%0A return fields%0A%0A%0Adef get_methods(klass):%0A return get_fields(klass, lambda name, field: inspect.isfunction(field))%0A
|
|
481a920fe89ea7f0e518b8cf815f966715b20ca3
|
add new package : activemq (#14142)
|
var/spack/repos/builtin/packages/activemq/package.py
|
var/spack/repos/builtin/packages/activemq/package.py
|
Python
| 0 |
@@ -0,0 +1,698 @@
+# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass Activemq(Package):%0A %22%22%22%0A Apache ActiveMQ is a high performance Apache 2.0 licensed Message Broker%0A and JMS 1.1 implementation.%0A %22%22%22%0A%0A homepage = %22https://archive.apache.org/dist/activemq%22%0A url = %22https://archive.apache.org/dist/activemq/5.14.0/apache-activemq-5.14.0-bin.tar.gz%22%0A%0A version('5.14.0', sha256='81c623465af277dd50a141a8d9308d6ec8e1b78d9019b845873dc12d117aa9a6')%0A%0A def install(self, spec, prefix):%0A install_tree('.', prefix)%0A
|
|
6b9adf9f00b481562cedf2debc5aede947734744
|
remove dot
|
addons/account_analytic_analysis/cron_account_analytic_account.py
|
addons/account_analytic_analysis/cron_account_analytic_account.py
|
#!/usr/bin/env python
from osv import osv
from mako.template import Template
import time
try:
import cStringIO as StringIO
except ImportError:
import StringIO
import tools
MAKO_TEMPLATE = u"""Hello ${user.name},
Here is a list of contracts that have to be renewed for two
possible reasons:
- the end of contract date is passed
- the customer consumed more hours than expected
Can you contact the customer in order to sell a new or renew its contract.
The contract has been set with a pending state, can you update the status
of the analytic account following this rule:
- Set Done: if the customer does not want to renew
- Set Open: if the customer purchased an extra contract
Here is the list of contracts to renew:
% for partner, accounts in partners.iteritems():
* ${partner.name}
% for account in accounts:
- Name: ${account.name}
% if account.quantity_max != 0.0:
- Quantity: ${account.quantity}/${account.quantity_max} hours
% endif
- Dates: ${account.date_start} to ${account.date and account.date or '???'}
- Contacts:
. ${account.partner_id.name}, ${account.partner_id.phone}, ${account.partner_id.email}
% endfor
% endfor
You can use the report in the menu: Sales > Invoicing > Overdue Accounts
Regards,
--
OpenERP
"""
class analytic_account(osv.osv):
_inherit = 'account.analytic.account'
def cron_account_analytic_account(self, cr, uid, context=None):
domain = [
('name', 'not ilike', 'maintenance'),
('partner_id', '!=', False),
('user_id', '!=', False),
('user_id.user_email', '!=', False),
('state', 'in', ('draft', 'open')),
'|', ('date', '<', time.strftime('%Y-%m-%d')), ('date', '=', False),
]
account_ids = self.search(cr, uid, domain, context=context, order='name asc')
accounts = self.browse(cr, uid, account_ids, context=context)
users = dict()
for account in accounts:
users.setdefault(account.user_id, dict()).setdefault(account.partner_id, []).append(account)
account.write({'state' : 'pending'}, context=context)
for user, data in users.iteritems():
subject = '[OPENERP] Reporting: Analytic Accounts'
body = Template(MAKO_TEMPLATE).render_unicode(user=user, partners=data)
tools.email_send('[email protected]', [user.user_email, ], subject, body)
return True
analytic_account()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Python
| 0.000012 |
@@ -1090,10 +1090,8 @@
- .
$%7Ba
|
2aa7a6260d9d5a74ee81677be2bd5f97774f9116
|
Add tests for internal gregorian functions.
|
calexicon/internal/tests/test_gregorian.py
|
calexicon/internal/tests/test_gregorian.py
|
Python
| 0 |
@@ -0,0 +1,385 @@
+import unittest%0A%0Afrom calexicon.internal.gregorian import is_gregorian_leap_year%0A%0A%0Aclass TestGregorian(unittest.TestCase):%0A def test_is_gregorian_leap_year(self):%0A self.assertTrue(is_gregorian_leap_year(2000))%0A self.assertTrue(is_gregorian_leap_year(1984))%0A self.assertFalse(is_gregorian_leap_year(1900))%0A self.assertFalse(is_gregorian_leap_year(1901))%0A%0A
|
|
387d05dbdb81bacc4851adffbfd7f827e709d4cc
|
Add Step class - Create Step.py to hold code for the Step class. - The Step class represents a single step/instruction for a Recipe object.
|
Step.py
|
Step.py
|
Python
| 0 |
@@ -0,0 +1,123 @@
+# Step object%0Aclass Step:%0A # Initiate object%0A def __init__(self,description):%0A self.description = description%0A
|
|
7d8a566ac51e7e471603c2160dce2046eb698738
|
add sn domains conversion tool
|
conv.py
|
conv.py
|
Python
| 0 |
@@ -0,0 +1,1472 @@
+#!/usr/bin/env python%0A%0A# Read the wiki for more infomation%0A# https://github.com/lennylxx/ipv6-hosts/wiki/sn-domains%0A%0Aimport sys%0Atable = '1023456789abcdefghijklmnopqrstuvwxyz'%0A%0Adef iata2sn(iata):%0A global table%0A sn = ''%0A for v in iata%5B0:3%5D:%0A i = ((ord(v) - ord('a')) * 7 + 5) %25 36%0A sn += table%5Bi%5D%0A return sn%0A%0Adef sn2iata(sn):%0A global table%0A iata = ''%0A for v in sn:%0A i = table.index(v)%0A i = (5 - i %25 7) * 5 + i / 7 + 10%0A iata += table%5Bi%5D%0A return iata%0A%0Adef num2code(num):%0A global table%0A code = ''%0A for v in num:%0A i = ((ord(v) - ord('0') + 1) * 7) %25 36%0A code += table%5Bi%5D%0A return code%0A %0Adef code2num(code):%0A global table%0A num = ''%0A for v in code:%0A i = table.index(v)%0A i = i / 7 + i %25 7 - 1%0A num += str(i)%0A return num%0A%0Adef main():%0A if len(sys.argv) != 3:%0A print 'usage:%5Cn%5Ct./%25s -i iata%5Cn%5Ct./%25s -s sn'%5C%0A %25 (sys.argv%5B0%5D, sys.argv%5B0%5D)%0A sys.exit(1)%0A%0A input = sys.argv%5B2%5D%0A ret = ''%0A if sys.argv%5B1%5D == '-i':%0A ret += iata2sn(input%5B0:3%5D)%0A ret += num2code(input%5B3:5%5D)%0A ret += 'n'%0A ret += num2code(input%5B6:8%5D)%0A print ret%0A elif sys.argv%5B1%5D == '-s':%0A ret += sn2iata(input%5B0:3%5D)%0A ret += code2num(input%5B3:5%5D)%0A ret += 's'%0A ret += code2num(input%5B6:8%5D)%0A print ret%0A else:%0A print 'Unknown option.'%0A sys.exit(1)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
d2bdbd0d851fda046c0be55105a211a382c22766
|
Add Day 2
|
day2.py
|
day2.py
|
Python
| 0.000031 |
@@ -0,0 +1,749 @@
+#Advent of Code December 2%0A#Written by icydoge - icydoge AT gmail dot com%0A%0Awith open('paper.txt') as f:%0A content = f.read().splitlines()%5B:-1%5D #Remove last empty line%0A%0Apart_one_answer = 0%0Apart_two_answer = 0%0A%0Afor box in content:%0A dimensions = sorted(map(int,box.split('x')))%0A slack = dimensions%5B0%5D * dimensions%5B1%5D%0A wrapping = 2 * (dimensions%5B0%5D * dimensions%5B1%5D + dimensions%5B1%5D * dimensions%5B2%5D + dimensions%5B0%5D * dimensions%5B2%5D)%0A ribbon = (dimensions%5B0%5D + dimensions%5B1%5D) * 2%0A bow = dimensions%5B0%5D * dimensions%5B1%5D * dimensions%5B2%5D%0A part_one_answer += wrapping + slack%0A part_two_answer += ribbon + bow%0A%0Aprint %22Total square feet of wrapping paper (Part One):%22, part_one_answer%0Aprint %22Total feet of ribbon (Part Two):%22, part_two_answer
|
|
acf4ad1e5948354281fec040badfe412f5194529
|
add wsgi
|
flaskr/flaskr.wsgi
|
flaskr/flaskr.wsgi
|
Python
| 0.999824 |
@@ -0,0 +1,356 @@
+%3CVirtualHost *%3E%0A ServerName example.com%0A%0A WSGIDaemonProcess flaskr user=user1 group=group1 threads=5%0A WSGIScriptAlias / /var/www/FlaskDB/flaskr/flaskr.wsgi%0A%0A %3CDirectory /var/www/FlaskDB/flaskr%3E%0A WSGIProcessGroup flaskr%0A WSGIApplicationGroup %25%7BGLOBAL%7D%0A Order deny,allow%0A Allow from all%0A %3C/Directory%3E%0A%3C/VirtualHost%3E
|
|
b6fbdd70a0486718d711a7efc310e350a1837b9c
|
add collapse reads code
|
seqcluster/collapse.py
|
seqcluster/collapse.py
|
Python
| 0.000001 |
@@ -0,0 +1,788 @@
+import os%0Afrom libs.fastq import collapse, splitext_plus%0Aimport logging%0A%0A%0Alogger = logging.getLogger('seqbuster')%0A%0A%0Adef collapse_fastq(args):%0A %22%22%22collapse fasq files after adapter trimming%0A %22%22%22%0A idx = 0%0A try:%0A seqs = collapse(args.fastq)%0A out_file = splitext_plus(os.path.basename(args.fastq))%5B0%5D + %22_trimmed.fastq%22%0A except IOError as e:%0A logger.error(%22I/O error(%7B0%7D): %7B1%7D%22.format(e.errno, e.strerror))%0A raise %22Can not read file%22%0A logger.info(%22writing output%22)%0A with open(os.path.join(args.out, out_file), 'w') as handle:%0A for seq in seqs:%0A idx += 1%0A qual = %22%22.join(seqs%5Bseq%5D.get())%0A counts = seqs%5Bseq%5D.times%0A handle.write((%22@seq_%7Bidx%7D_x%7Bcounts%7D%5Cn%7Bseq%7D%5Cn+%5Cn%7Bqual%7D%5Cn%22).format(**locals()))%0A
|
|
871ec5597059934bce64f7d31fa7e5ab165063ee
|
Add basic GUI frontend
|
memorise-frontend.py
|
memorise-frontend.py
|
Python
| 0 |
@@ -0,0 +1,1706 @@
+#!/usr/bin/env python%0A# -*- Coding: utf-8 -*-%0A%0Afrom tkinter import Tk, Menu%0Afrom ttk import Frame, Button, Style%0A%0Aclass MemoriseFrontend(Frame):%0A version = %220.1-py%22%0A padding = 10%0A%0A def __init__(self, parent):%0A Frame.__init__(self, parent)%0A self.parent = parent%0A self.style = Style()%0A self.style.theme_use(%22default%22)%0A%0A self._initUI()%0A%0A def _initUI(self):%0A self.parent.title(%22Memorise v%22 + self.version)%0A%0A self.columnconfigure(0, pad=self.padding)%0A self.columnconfigure(1, pad=self.padding)%0A self.columnconfigure(2, pad=self.padding)%0A self.columnconfigure(3, pad=self.padding)%0A self.columnconfigure(4, pad=self.padding)%0A%0A self.rowconfigure(0, pad=self.padding)%0A self.rowconfigure(1, pad=self.padding)%0A self.rowconfigure(2, pad=self.padding)%0A self.rowconfigure(3, pad=self.padding)%0A self.rowconfigure(4, pad=self.padding)%0A%0A # Row 1%0A btnUp = Button(self, text=%22Up%22, command=self._onUpBtn)%0A btnUp.grid(row=1, column=2)%0A%0A # Row 2%0A btnLeft = Button(self, text=%22Left%22, command=self._onLeftBtn)%0A btnLeft.grid(row=2, column=1)%0A%0A # Row 2%0A btnRight = Button(self, text=%22Right%22, command=self._onRightBtn)%0A btnRight.grid(row=2, column=3)%0A%0A # Row 3%0A btnDown = Button(self, text=%22Down%22, command=self._onDownBtn)%0A btnDown.grid(row=3, column=2)%0A%0A self.pack()%0A%0A def _onUpBtn(self):%0A pass%0A%0A def _onLeftBtn(self):%0A pass%0A%0A def _onRightBtn(self):%0A pass%0A%0A def _onDownBtn(self):%0A pass%0A%0A%0Adef main():%0A root = Tk()%0A app = MemoriseFrontend(root)%0A root.mainloop()%0A%0Amain()%0A
|
|
db81e8ca0b0321994f188daf45211e6ae2dda4a4
|
Make a control dataset that only contains sequences with titer data.
|
dengue/utils/make_titer_strain_control.py
|
dengue/utils/make_titer_strain_control.py
|
Python
| 0.00001 |
@@ -0,0 +1,644 @@
+from Bio import SeqIO%0Afrom pprint import pprint%0A%0Awith open('../../data/dengue_titers.tsv', 'r') as f:%0A%09titerstrains = set(%5B line.split()%5B0%5D for line in f %5D)%0Awith open('../../data/dengue_titers.tsv', 'r') as f:%0A%09serastrains = set(%5B line.split()%5B1%5D for line in f %5D)%0A%0Aautologous = titerstrains.intersection(serastrains)%0Aprint len(autologous)%0A%0Astrains_with_titers = %5Bs for s in SeqIO.parse(open('../../data/dengue.fasta', 'r'), 'fasta') if s.description.split('%7C')%5B0%5D in autologous %5D%0ASeqIO.write(strains_with_titers, '../../data/control.fasta', 'fasta')%0A%0Aprint 'Found %25d strains with autologous titers and sequence data.'%25len(strains_with_titers)%0A%0A
|
|
9a67c8eca45daa2f706e8fc6bde958c37229c837
|
Create mpd_mouse_control.py
|
mpd_mouse_control.py
|
mpd_mouse_control.py
|
Python
| 0.000002 |
@@ -0,0 +1,2960 @@
+from evdev import InputDevice%0Afrom select import select%0Aimport os%0Aimport mpd%0Aimport socket%0Aimport alsaaudio%0Aimport time%0Aclient = mpd.MPDClient(use_unicode=True)%0Adev = InputDevice('/dev/input/event2')%0Adrop_lb_event = False%0Adrop_rb_event = False%0A%0Awhile True:%0A r,w,x = select(%5Bdev%5D, %5B%5D, %5B%5D)%0A try:%0A for event in dev.read():%0A client.connect(%22192.168.0.2%22, 6600)%0A if event.code == 8:%0A if 272 in dev.active_keys():%0A drop_lb_event = True%0A if (event.value %3E 0):%0A client.seekcur(%22+5%22)%0A else:%0A client.seekcur(%22-5%22)%0A elif 273 in dev.active_keys():%0A drop_rb_event = True%0A if (event.value %3E 0):%0A client.seekcur(%22+30%22)%0A else:%0A client.seekcur(%22-30%22)%0A else:%0A mixer = alsaaudio.Mixer(%22PCM%22, **%7B%22cardindex%22: 1%7D)%0A if (event.value %3E 0):%0A mixer.setvolume(int(mixer.getvolume()%5B0%5D)+2, -1)%0A else:%0A mixer.setvolume(int(mixer.getvolume()%5B0%5D)-2, -1)%0A try:%0A if event.code == 272 and event.value == 0:%0A if drop_lb_event:%0A drop_lb_event = False%0A else:%0A client.previous()%0A if event.code == 273 and event.value == 0:%0A if drop_rb_event:%0A drop_rb_event = False%0A else:%0A client.next()%0A if event.code == 274 and event.value == 1:%0A if client.status()%5B%22state%22%5D == %22stop%22:%0A client.play()%0A else:%0A client.pause()%0A os.system(%22/usr/sbin/qcontrol usbled off%22)%0A client.disconnect()%0A except mpd.ConnectionError, socket.error:%0A pass%0A except IOError, OSError:%0A time.sleep(5)%0A dev = InputDevice('/dev/input/event2')%0A
|
|
d1024a2892c6e171b3d465d56c8a1fad25d7fbdc
|
Create ESLint styler
|
zazu/plugins/eslint_styler.py
|
zazu/plugins/eslint_styler.py
|
Python
| 0 |
@@ -0,0 +1,1113 @@
+# -*- coding: utf-8 -*-%0A%22%22%22eslint plugin for zazu.%22%22%22%0Aimport zazu.styler%0Azazu.util.lazy_import(locals(), %5B%0A 'subprocess',%0A 'os',%0A 'tempfile'%0A%5D)%0A%0A__author__ = %22Patrick Moore%22%0A__copyright__ = %22Copyright 2018%22%0A%0A%0Aclass eslintStyler(zazu.styler.Styler):%0A %22%22%22ESLint plugin for code styling.%22%22%22%0A%0A def style_string(self, string):%0A %22%22%22Fix a string to be within style guidelines.%22%22%22%0A temp = tempfile.NamedTemporaryFile(delete=False, suffix=%22.js%22)%0A temp_path = temp.name%0A args = %5B'eslint', '--fix'%5D + self.options + %5Btemp_path%5D%0A temp.write(string)%0A temp.close()%0A try:%0A subprocess.check_output(args)%0A except subprocess.CalledProcessError:%0A pass%0A with open(temp_path, %22r%22) as f:%0A ret = f.read()%0A os.remove(temp_path)%0A return ret%0A%0A @staticmethod%0A def default_extensions():%0A %22%22%22Return the list of file extensions that are compatible with this Styler.%22%22%22%0A return %5B'*.js'%5D%0A%0A @staticmethod%0A def type():%0A %22%22%22Return the string type of this Styler.%22%22%22%0A return 'eslint'%0A
|
|
690b5a994bc20b561632d9aa3e332061457a3d72
|
Add missing __init__.py to overkiz tests (#62727)
|
tests/components/overkiz/__init__.py
|
tests/components/overkiz/__init__.py
|
Python
| 0.000001 |
@@ -0,0 +1,39 @@
+%22%22%22Tests for the overkiz component.%22%22%22%0A
|
|
d08426ffde22c2ded72425f1d1c54923b9aa0b97
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/4b193958ac9b893b33dc03cc6882c70ad4ad509d.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "f5ea7e9c419b881d7f3136de7a7388a23feee70e"
TFRT_SHA256 = "723c9b1fabc504fed5b391fc766e2504559c2b02b4f4e01c55bc77b8ff0df8ed"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0 |
@@ -228,133 +228,133 @@
= %22
-f5ea7e9c419b881d7f3136de7a7388a23feee70e%22%0A TFRT_SHA256 = %22723c9b1fabc504fed5b391fc766e2504559c2b02b4f4e01c55bc77b8ff0df8ed
+4b193958ac9b893b33dc03cc6882c70ad4ad509d%22%0A TFRT_SHA256 = %225b011d3f3b25e6c9646da078d0dbd8000ca063fa4fe6ef53449692c363fa13f7
%22%0A%0A
|
79a38e9ef0ac04c4efef55c26f74ad2b11442a7b
|
add a command to fix the missing packages
|
crate_project/apps/crate/management/commands/fix_missing_files.py
|
crate_project/apps/crate/management/commands/fix_missing_files.py
|
Python
| 0.000009 |
@@ -0,0 +1,494 @@
+from django.core.management.base import BaseCommand%0A%0Afrom packages.models import ReleaseFile%0Afrom pypi.processor import PyPIPackage%0A%0A%0Aclass Command(BaseCommand):%0A%0A def handle(self, *args, **options):%0A i = 0%0A for rf in ReleaseFile.objects.filter(digest=%22%22).distinct(%22release__package%22):%0A p = PyPIPackage(rf.release.package)%0A p.process()%0A i += 1%0A print rf.release.package.name, rf.release.version%0A print %22Fixed %25d packages%22 %25 i%0A
|
|
50b9aff7914885b590748ebd8bca4350d138670c
|
Add admin section for the ``Resources``.
|
us_ignite/resources/admin.py
|
us_ignite/resources/admin.py
|
Python
| 0 |
@@ -0,0 +1,397 @@
+from django.contrib import admin%0A%0Afrom us_ignite.resources.models import Resource%0A%0A%0Aclass ResourceAdmin(admin.ModelAdmin):%0A list_display = ('name', 'slug', 'status', 'is_featured')%0A search_fields = ('name', 'slug', 'description', 'url')%0A list_filter = ('is_featured', 'created')%0A date_hierarchy = 'created'%0A raw_id_fields = %5B'owner', %5D%0A%0Aadmin.site.register(Resource, ResourceAdmin)%0A
|
|
20c9f1416243c020b270041621098ca20e09eca4
|
tag retrieval script added
|
private/scripts/extras/timus_tag_retrieval.py
|
private/scripts/extras/timus_tag_retrieval.py
|
Python
| 0 |
@@ -0,0 +1,1636 @@
+%22%22%22%0A Copyright (c) 2015-2018 Raj Patel([email protected]), StopStalk%0A%0A Permission is hereby granted, free of charge, to any person obtaining a copy%0A of this software and associated documentation files (the %22Software%22), to deal%0A in the Software without restriction, including without limitation the rights%0A to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A copies of the Software, and to permit persons to whom the Software is%0A furnished to do so, subject to the following conditions:%0A%0A The above copyright notice and this permission notice shall be included in%0A all copies or substantial portions of the Software.%0A%0A THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN%0A THE SOFTWARE.%0A%22%22%22%0A%0Aimport bs4, requests%0Afrom time import sleep%0Atags = set(%5B%5D)%0Afor i in xrange(1900, 2111):%0A url = %22http://acm.timus.ru/problem.aspx?space=1&num=%25d&locale=en%22 %25 i%0A response = requests.get(url)%0A soup = bs4.BeautifulSoup(response.text, %22lxml%22)%0A all_as = soup.find(%22div%22,%0A class_=%22problem_links%22).previous_sibling.find_all(%22a%22)%5B:-1%5D%0A print i, %5Bx.text for x in all_as%5D%0A for tmp in all_as:%0A tags.add(tmp.text)%0A sleep(1)%0A%0Aprint tags
|
|
0136d50265fc390d194436238b88655327982231
|
add gobOauth.py
|
gobOauth.py
|
gobOauth.py
|
Python
| 0.000003 |
@@ -0,0 +1,479 @@
+import praw%0Aimport configparser%0A%0ASAVEFILE = %22oauth.ini%22%0Adef read_ini():%0A cfg = configparser.ConfigParser()%0A cfg.read(SAVEFILE)%0A return cfg%0A%0Adef get_refreshable_instance():%0A cfg = read_ini()%0A reddit = praw.Reddit(client_id=cfg%5B'app'%5D%5B'client_id'%5D,%0A client_secret=cfg%5B'app'%5D%5B'client_secret'%5D,%0A refresh_token=cfg%5B'token'%5D%5B'refresh_token'%5D,%0A user_agent=cfg%5B'app'%5D%5B'user_agent'%5D)%0A return reddit
|
|
e0b84a97e4c7ad5dcef336080657a884cff603fc
|
Test two windows drawing GL with different contexts.
|
tests/gl_test_2.py
|
tests/gl_test_2.py
|
Python
| 0.000005 |
@@ -0,0 +1,1526 @@
+#!/usr/bin/env python%0A%0A'''%0A'''%0A%0A__docformat__ = 'restructuredtext'%0A__version__ = '$Id$'%0A%0Aimport pyglet.window%0Afrom pyglet.window.event import *%0Aimport time%0A%0Afrom pyglet.GL.VERSION_1_1 import *%0Afrom pyglet.GLU.VERSION_1_1 import *%0Afrom pyglet import clock%0A%0Afactory = pyglet.window.WindowFactory()%0Afactory.config._attributes%5B'doublebuffer'%5D = 1%0A%0Aclass ExitHandler(object):%0A running = True%0A def on_close(self):%0A self.running = False%0A def on_keypress(self, symbol, modifiers):%0A if symbol == pyglet.window.key.K_ESCAPE:%0A self.running = False%0A return EVENT_UNHANDLED%0Aexit_handler = ExitHandler()%0A%0A%0Adef setup():%0A glMatrixMode(GL_PROJECTION)%0A glLoadIdentity()%0A gluPerspective(60., 1., 1., 100.)%0A%0A glMatrixMode(GL_MODELVIEW)%0A glClearColor(1, 1, 1, 1)%0A glColor4f(.5, .5, .5, .5)%0A%0Adef draw():%0A global r%0A glClear(GL_COLOR_BUFFER_BIT)%0A glLoadIdentity()%0A%0A r += 1%0A if r %3E 360: r = 0%0A glRotatef(r, 0, 0, 1)%0A glBegin(GL_QUADS)%0A glVertex3f(-1., -1., -5.)%0A glVertex3f(-1., 1., -5.)%0A glVertex3f(1., 1., -5.)%0A glVertex3f(1., -1., -5.)%0A glEnd()%0A%0Aw1 = factory.create(width=200, height=200)%0Aw1.push_handlers(exit_handler)%0Aw1.switch_to()%0Asetup()%0A%0Ac = clock.Clock()%0Aw2 = factory.create(width=400, height=400)%0Aw2.push_handlers(exit_handler)%0Aw2.switch_to()%0Asetup()%0A%0A%0Ar = 0%0Awhile exit_handler.running:%0A c.set_fps(60)%0A%0A w1.switch_to()%0A w1.dispatch_events()%0A draw()%0A w1.flip()%0A%0A w2.switch_to()%0A w2.dispatch_events()%0A draw()%0A w2.flip()%0A
|
|
bdaa80badf1f3d8c972c5da7d0fe65a0c3f63752
|
Update maasutils.py to fix pep8
|
tests/maasutils.py
|
tests/maasutils.py
|
#!/usr/bin/env python
import os
import sys
import click
from rackspace_monitoring.providers import get_driver
from rackspace_monitoring.types import Provider
import requests
@click.group()
@click.option("--username", required=True)
@click.option("--api-key", required=True)
@click.pass_context
def cli(ctx, api_key, username):
ctx.obj = {
'username': username,
'api-key': api_key
}
url = 'https://identity.api.rackspacecloud.com/v2.0/tokens'
headers = {"Content-type": "application/json"}
data = {
"auth": {
"RAX-KSKEY:apiKeyCredentials": {
"username": username,
"apiKey": api_key
}
}
}
try:
r = requests.post(url, headers=headers, json=data)
r.raise_for_status()
except requests.exceptions.RequestException as e:
print(e)
sys.exit(1)
except requests.exceptions.HTTPError as httpe:
print(httpe)
sys.exit(1)
resp = r.json()
ctx.obj['token'] = resp['access']['token']['id']
monitoring_service = next(
s for s in resp['access']['serviceCatalog']
if s["name"] == "cloudMonitoring"
)
ctx.obj['url'] = monitoring_service['endpoints'][0]['publicURL']
@click.command(name='get_token_url')
@click.pass_context
def get_token_url(ctx):
cred_file = os.path.expanduser('~/maas-vars.rc')
with open(cred_file, 'w') as f:
f.write(
"export MAAS_AUTH_TOKEN={token}\n"
"export MAAS_API_URL={url}\n".format(
token=ctx.obj['token'],
url=ctx.obj['url']
)
)
click.echo(
'Credentials file written to "{cred_file}"'.format(
cred_file=cred_file
)
)
return ctx.obj['token'], ctx.obj['url']
@click.command(name='set_webhook_token')
@click.option("--token", 'webhook_token', required=True)
@click.pass_context
def set_webhook_token(ctx, webhook_token):
"""Sets the token that is included in MaaS webhook notifications
This is one method of verifying that receieved requests are
from MaaS. This is per account.
"""
auth_token, url = ctx.invoke(get_token_url)
try:
response = requests.put(
"{url}/account".format(url=url),
headers={'X-Auth-Token': auth_token},
json={'webhook_token': webhook_token})
response.raise_for_status()
click.echo("Webhook token set to {}".format(webhook_token))
except requests.exceptions.HTTPError as e:
click.echo(response.text)
raise e
@click.command(name='get_entity_id')
@click.option("--label", help="label of entity to get ID for", required=True)
@click.pass_context
def get_entity_id(ctx, label):
Cls = get_driver(Provider.RACKSPACE)
driver = Cls(ctx.obj['username'], ctx.obj['api-key'])
entities = driver.list_entities()
for e in entities:
if label == e.label:
click.echo(e.id)
cli.add_command(get_token_url)
cli.add_command(set_webhook_token)
cli.add_command(get_entity_id)
if __name__ == "__main__":
cli()
|
Python
| 0 |
@@ -3104,8 +3104,9 @@
cli()
+%0A
|
8affb8e4a3744e604b88157a918ef690203cbfa8
|
Remove disallowed characters from stream names.
|
zerver/migrations/0375_invalid_characters_in_stream_names.py
|
zerver/migrations/0375_invalid_characters_in_stream_names.py
|
Python
| 0 |
@@ -0,0 +1,2802 @@
+import unicodedata%0A%0Afrom django.db import connection, migrations%0Afrom django.db.backends.postgresql.schema import DatabaseSchemaEditor%0Afrom django.db.migrations.state import StateApps%0A%0A# There are 66 Unicode non-characters; see%0A# https://www.unicode.org/faq/private_use.html#nonchar4%0Aunicode_non_chars = set(%0A chr(x)%0A for x in list(range(0xFDD0, 0xFDF0)) # FDD0 through FDEF, inclusive%0A + list(range(0xFFFE, 0x110000, 0x10000)) # 0xFFFE, 0x1FFFE, ... 0x10FFFE inclusive%0A + list(range(0xFFFF, 0x110000, 0x10000)) # 0xFFFF, 0x1FFFF, ... 0x10FFFF inclusive%0A)%0A%0A%0Adef character_is_printable(character: str) -%3E bool:%0A return not (unicodedata.category(character) in %5B%22Cc%22, %22Cs%22%5D or character in unicode_non_chars)%0A%0A%0Adef fix_stream_names(apps: StateApps, schema_editor: DatabaseSchemaEditor) -%3E None:%0A Stream = apps.get_model(%22zerver%22, %22Stream%22)%0A Realm = apps.get_model(%22zerver%22, %22Realm%22)%0A%0A total_fixed_count = 0%0A realm_ids = Realm.objects.values_list(%22id%22, flat=True)%0A if len(realm_ids) == 0:%0A return%0A%0A print(%22%22)%0A for realm_id in realm_ids:%0A print(f%22Processing realm %7Brealm_id%7D%22)%0A realm_stream_dicts = Stream.objects.filter(realm_id=realm_id).values(%22id%22, %22name%22)%0A occupied_stream_names = set(stream_dict%5B%22name%22%5D for stream_dict in realm_stream_dicts)%0A%0A for stream_dict in realm_stream_dicts:%0A stream_name = stream_dict%5B%22name%22%5D%0A fixed_stream_name = %22%22.join(%0A %5B%0A character if character_is_printable(character) else %22%5CN%7BREPLACEMENT CHARACTER%7D%22%0A for character in stream_name%0A %5D%0A )%0A%0A if fixed_stream_name == stream_name:%0A continue%0A%0A if fixed_stream_name == %22%22:%0A fixed_stream_name = %22(no name)%22%0A%0A # The process of stripping invalid characters can lead to collisions,%0A # with the new stream name being the same as the name of another existing stream.%0A # We append underscore until the name no longer conflicts.%0A while fixed_stream_name in occupied_stream_names:%0A fixed_stream_name += %22_%22%0A%0A occupied_stream_names.add(fixed_stream_name)%0A total_fixed_count += 1%0A with connection.cursor() as cursor:%0A cursor.execute(%0A %22UPDATE zerver_stream SET name = %25s WHERE id = %25s%22,%0A %5Bfixed_stream_name, stream_dict%5B%22id%22%5D%5D,%0A )%0A%0A print(f%22Fixed %7Btotal_fixed_count%7D stream names%22)%0A%0A%0Aclass Migration(migrations.Migration):%0A atomic = False%0A%0A dependencies = %5B%0A (%22zerver%22, %220374_backfill_user_delete_realmauditlog%22),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(fix_stream_names, reverse_code=migrations.RunPython.noop),%0A %5D%0A
|
|
1deb35d9aa62a6c950cb978063c7f4aed645067b
|
Add utility module for logging
|
mediacloud/mediawords/util/log.py
|
mediacloud/mediawords/util/log.py
|
Python
| 0 |
@@ -0,0 +1,363 @@
+import logging%0A%0A%0Adef create_logger(name):%0A %22%22%22Create and return 'logging' instance.%22%22%22%0A formatter = logging.Formatter(fmt='%25(asctime)s - %25(levelname)s - %25(module)s - %25(message)s')%0A%0A handler = logging.StreamHandler()%0A handler.setFormatter(formatter)%0A%0A l = logging.getLogger(name)%0A l.setLevel(logging.DEBUG)%0A l.addHandler(handler)%0A return l%0A
|
|
12c483953f39a3bacaab6d49ba17c4920db52179
|
Add script to clean up all FD phone and fax numbers.
|
firecares/firestation/management/commands/cleanup_phonenumbers.py
|
firecares/firestation/management/commands/cleanup_phonenumbers.py
|
Python
| 0 |
@@ -0,0 +1,1445 @@
+from django.core.management.base import BaseCommand%0Afrom firecares.firestation.models import FireDepartment%0Afrom phonenumber_field.modelfields import PhoneNumber%0Aimport re%0A%0A%22%22%22%0AThis command is for cleaning up every phone and fax number in the%0Adatabase. It removes all non-numeric characters, such as parenthesis,%0Ahyphens, spaces, etc. It also removes prefixed 1s These numbers should%0Abe made human-readable on the client side.%0A%22%22%22%0A%0Adef cleanNumber(no1):%0A no2 = re.sub('%5B%5E0-9%5D','', no1)%0A if no2.startswith(%221%22):%0A no2 = no2%5B1:%5D%0A return no2%0A%0Aclass Command(BaseCommand):%0A%0A def handle(self, *args, **kwargs):%0A print(%22Don't worry, it always takes this long.%22)%0A%0A for fd in FireDepartment.objects.all():%0A # If the FD has a phone number, clean it up%0A if fd.headquarters_phone and not fd.headquarters_phone.raw_input == %22Invalid Input%22:%0A newPhone = cleanNumber(fd.headquarters_phone.raw_input)%0A print(newPhone)%0A fd.headquarters_phone = newPhone%0A # If the FD has a fax number, clean it up%0A if fd.headquarters_fax and not fd.headquarters_fax.raw_input == %22Invalid Input%22:%0A newFax = cleanNumber(fd.headquarters_fax.raw_input)%0A print(newFax)%0A fd.headquarters_fax = newFax%0A # Save and continue to the next FD (if any)%0A fd.save()%0A%0A print(%22Completed successfully!%22)%0A
|
|
c99b4c9c4b42d7f6c1e3800ed5595e86db95b6cf
|
finish hello world program for dajax
|
gui/ajax.py
|
gui/ajax.py
|
Python
| 0.997924 |
@@ -0,0 +1,456 @@
+# -*- coding: UTF-8 -*-%0A'''%0ACreated on 2013-03-25%0A%0A@author: tianwei%0A%0ADesc: This module will be used for ajax request, such as form valid, search%0A query, calculated submit.%0A'''%0A%0Aimport simplejson%0Afrom dajaxice.decorators import dajaxice_register%0A%0A%0A@dajaxice_register(method='GET')%0A@dajaxice_register(method='POST', name=%22calculate_submit_post%22)%0Adef calculate_submit(request, data):%0A return simplejson.dumps(%7B'message': 'tianwei hello world!'+data%7D)%0A
|
|
88cacd862477ded4344ac1ab3de1580d09f6db9c
|
add org level and lables
|
indicators/test.py
|
indicators/test.py
|
from django.test import TestCase
from django.test import RequestFactory
from django.test import Client
from indicators.models import Indicator, IndicatorType, DisaggregationType, ReportingFrequency, CollectedData
from workflow.models import Program, Country, Organization
from django.contrib.auth.models import User
class IndicatorTestCase(TestCase):
fixtures = ['fixtures/organziation.json','fixtures/country.json']
def setUp(self):
new_organization = Organization.objects.create(name="tola")
new_organization.save()
get_organization = Organization.objects.get(name="tola")
new_country = Country.objects.create(country="testcountry", organization=get_organization)
new_country.save()
get_country = Country.objects.get(country="testcountry")
new_program = Program.objects.create(name="testprogram")
new_program.save()
new_program.country.add(get_country)
get_program = Program.objects.get(name="testprogram")
new_indicator_type = IndicatorType.objects.create(indicator_type="testtype")
new_indicator_type.save()
get_indicator_type = IndicatorType.objects.get(indicator_type="testtype")
new_disaggregation = DisaggregationType.objects.create(disaggregation_type="disagg")
new_disaggregation.save()
get_disaggregation = DisaggregationType.objects.get(disaggregation_type="disagg")
new_frequency = ReportingFrequency.objects.create(frequency="newfreq")
new_frequency.save()
get_frequency = ReportingFrequency.objects.get(frequency="newfreq")
user = User.objects.create_user('john', '[email protected]', 'johnpassword')
user.save()
get_user = User.objects.get(username='john')
new_indicator = Indicator.objects.create(name="testindicator",number="1.2.3",source="testing",
baseline="10",lop_target="10", reporting_frequency=get_frequency)
new_indicator.save()
new_indicator.disaggregation.add(get_disaggregation)
new_indicator.indicator_type.add(get_indicator_type)
new_indicator.program.add(get_program)
get_indicator = Indicator.objects.get(name="testindicator")
new_collected = CollectedData.objects.create(targeted="12",achieved="20", description="somevaluecollected", indicator=get_indicator)
new_collected.save()
def test_indicator_exists(self):
"""Check for Indicator object"""
get_indicator = Indicator.objects.get(name="testindicator")
self.assertEqual(Indicator.objects.filter(id=get_indicator.id).count(), 1)
def test_collected_exists(self):
"""Check for CollectedData object"""
get_collected = CollectedData.objects.get(description="somevaluecollected")
self.assertEqual(CollectedData.objects.filter(id=get_collected.id).count(), 1)
|
Python
| 0.000008 |
@@ -382,10 +382,10 @@
rgan
-z
i
+z
atio
|
2db51d6c117bbe0555ddffe34f52679685c68fbb
|
update url
|
indicators/urls.py
|
indicators/urls.py
|
from django.conf.urls import patterns, include, url
from .views import CollectedDataList, CollectedDataCreate, CollectedDataUpdate, CollectedDataDelete, IndicatorCreate, IndicatorDelete, IndicatorUpdate,\
IndicatorList, IndicatorExport, CollectedDataExport
urlpatterns = patterns('',
###INDICATOR PLANING TOOL
#Home
url(r'^home/(?P<pk>\w+)/$', IndicatorList.as_view(), name='indicator_list'),
#Indicator Report
url(r'^report/(?P<program>\w+)/$', 'indicators.views.indicator_report', name='indicator_report'),
url(r'^program_report/(?P<program>\w+)/$', 'indicators.views.programIndicatorReport', name='programIndicatorReport'),
#Indicator Form
url(r'^indicator_list/(?P<pk>\w+)/$', IndicatorList.as_view(), name='indicator_list'),
url(r'^indicator_create/(?P<id>\w+)/$', 'indicators.views.indicator_create', name='indicator_create'),
url(r'^indicator_add/(?P<id>\w+)/$', IndicatorCreate.as_view(), name='indicator_add'),
url(r'^indicator_update/(?P<pk>\w+)/$', IndicatorUpdate.as_view(), name='indicator_update'),
url(r'^indicator_delete/(?P<pk>\w+)/$', IndicatorDelete.as_view(), name='indicator_delete'),
#Collected Data List
url(r'^collecteddata/(?P<indicator>\w+)/$', CollectedDataList.as_view(), name='collecteddata_list'),
url(r'^collecteddata/(?P<indicator>\w+)/(?P<program>\w+)/$', CollectedDataList.as_view(), name='collecteddata_list'),
url(r'^collecteddata/(?P<indicator>\w+)/(?P<program>\w+)/(?P<agreement>\w+)/$', CollectedDataList.as_view(), name='collecteddata_list'),
url(r'^collecteddata_add/(?P<program>\w+)/(?P<indicator>\w+)/$', CollectedDataCreate.as_view(), name='collecteddata_add'),
url(r'^collecteddata_import/$', 'indicators.views.collecteddata_import', name='collecteddata_import'),
url(r'^collecteddata_update/(?P<pk>\w+)/$', CollectedDataUpdate.as_view(), name='collecteddata_update'),
url(r'^collecteddata_delete/(?P<pk>\w+)/$', CollectedDataDelete.as_view(), name='collecteddata_delete'),
url(r'^collecteddata_export/(?P<program>\w+)/(?P<indicator>\w+)/$', CollectedDataList.as_view(), name='collecteddata_list'),
#Indicator Data Report
url(r'^data/(?P<id>\w+)/$', 'indicators.views.indicator_data_report', name='indicator_data_report'),
url(r'^data/(?P<id>\w+)/(?P<program>\w+)/map/$', 'indicators.views.indicator_data_report', name='indicator_data_report'),
url(r'^data/(?P<id>\w+)/(?P<program>\w+)/graph/$', 'indicators.views.indicator_data_report', name='indicator_data_report'),
url(r'^data/(?P<id>\w+)/(?P<program>\w+)/table/$', 'indicators.views.indicator_data_report', name='indicator_data_report'),
url(r'^data/(?P<id>\w+)/(?P<program>\w+)/$', 'indicators.views.indicator_data_report', name='indicator_data_report'),
url(r'^export/(?P<program>\w+)/$', IndicatorExport.as_view(), name='indicator_export'),
url(r'^export_data/(?P<indicator>\w+)/(?P<program>\w+)/$', CollectedDataExport.as_view(), name='indicator_data_export'),
#ajax calls
url(r'^service/(?P<service>[-\w]+)/service_json/', 'indicators.views.service_json', name='service_json'),
url(r'^collected_data_table/(?P<indicator>[-\w]+)/(?P<program>[-\w]+)/', 'indicators.views.collected_data_json', name='collected_data_json'),
url(r'^program_indicators/(?P<program>[-\w]+)/', 'indicators.views.program_indicators_json', name='program_indicators_json'),
)
|
Python
| 0.000001 |
@@ -233,37 +233,16 @@
orExport
-, CollectedDataExport
%0A%0A%0Aurlpa
|
7d128f2386fd3bbcbff1a407018f9ab9ed580810
|
Add tests for path join
|
tests/test_path.py
|
tests/test_path.py
|
Python
| 0 |
@@ -0,0 +1,239 @@
+from gypsy.path import _join%0A%0Adef test_join():%0A assert _join('s3://', 'bucket', 'prefix') == 's3://bucket/prefix'%0A assert _join('s3://bucket', 'prefix') == 's3://bucket/prefix'%0A assert _join('bucket', 'prefix') == 'bucket/prefix'%0A
|
|
af2654df47b8b7ea60d78fd7f692e911c2d3a82c
|
allow oveerride of font used
|
tests/text_test.py
|
tests/text_test.py
|
import sys
import os
import time
import pyglet.window
from pyglet.window.event import *
from pyglet.GL.VERSION_1_1 import *
from pyglet.GLU.VERSION_1_1 import *
from pyglet import clock
from pyglet.text import Font
from ctypes import *
factory = pyglet.window.WindowFactory()
factory.config._attributes['doublebuffer'] = 1
w1 = factory.create(width=400, height=200)
filename = os.path.join(os.path.split(__file__)[0], 'Vera.ttf')
font = Font.load_font(filename, 72)
text = font.render('Hello World!')
exit_handler = ExitHandler()
w1.push_handlers(exit_handler)
c = clock.Clock()
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
glOrtho(0, w1.width, 0, w1.height, -1, 1)
glEnable(GL_COLOR_MATERIAL)
glMatrixMode(GL_MODELVIEW)
glClearColor(0, 0, 0, 0)
glColor4f(1, 1, 1, 1)
r = 0
while not exit_handler.exit:
c.set_fps(60)
w1.dispatch_events()
glClear(GL_COLOR_BUFFER_BIT)
glLoadIdentity()
#r += 1
if r > 360: r = 0
glTranslatef(w1.width/2, w1.height/2, 0)
glRotatef(r, 0, 0, 1)
glTranslatef(-text.width/2, -text.height/2, 0)
text.draw()
w1.flip()
|
Python
| 0 |
@@ -363,16 +363,76 @@
t=200)%0A%0A
+if len(sys.argv) == 2:%0A filename = sys.argv%5B1%5D%0Aelse:%0A
filename
|
7336cc3c89727383c7a9cbbf564f6cfce7f198f9
|
add similiarty3.py
|
app/find_similarity3.py
|
app/find_similarity3.py
|
Python
| 0 |
@@ -0,0 +1,2440 @@
+import sys%0Aimport string%0Aimport requests%0Aimport json%0Aimport pymysql %0Aimport numpy as np%0Aimport pandas as pd%0Afrom operator import itemgetter%0Afrom sklearn.metrics.pairwise import cosine_similarity%0Afrom sklearn.decomposition import PCA, RandomizedPCA, TruncatedSVD%0Afrom sklearn.preprocessing import Normalizer%0A%0Adef find_companies(investorname):%0A investorname = np.int(investorname)%0A rmatrix= np.loadtxt(open(%22investorcompanyPCA.csv%22),delimiter=%22,%22)%0A investor_id = np.loadtxt(open(%22investorIDorder.csv%22),delimiter=%22,%22)%0A investor_id = investor_id.astype(int)%0A count=0%0A score = %5B%5D%0A target=%5B%5D%0A for row in investor_id:%0A if row == investorname:%0A target = rmatrix%5Bcount%5D%0A break%0A count += 1%0A %0A counter2 = 0%0A for row in rmatrix:%0A #score.append(%5Bcosine_similarity(target,row).tolist()%5B0%5D%5B0%5D, investor_id%5Bcounter2%5D%5D)%0A score.append(%7Bu'similarity': cosine_similarity(target,row).tolist()%5B0%5D%5B0%5D, u'investor_id': investor_id%5Bcounter2%5D%7D)%0A counter2 += 1%0A #score = sorted(score,reverse=True)%0A con = pymysql.connect(host='localhost', user='root', passwd='****')%0A cur = con.cursor()%0A cur.execute('''USE Venturenetwork16;''')%0A current_query='''SELECT startupID FROM Investor_comp'''%0A company_total = pd.io.sql.frame_query(current_query, con)%0A company_total = list(company_total%5B'startupID'%5D)%0A similarcomp=%5B%5D%0A%0A current_query='''SELECT * FROM Investor_comp'''%0A rows = pd.io.sql.frame_query(current_query, con)%0A df = pd.Series(list(rows%5B'startupID'%5D),list(rows%5B'investor_id'%5D))%0A score = sorted(score,key=itemgetter('similarity'),reverse=True)%0A similarcomp = %5B%5D%0A for investor_row in score%5B1:20%5D:%0A for company in list(df%5Binvestor_row%5B'investor_id'%5D%5D):%0A similarcomp.append(%5Bcompany, investor_row%5B'similarity'%5D%5D)%0A companyid = %5B row%5B0%5D for row in similarcomp %5D%0A companysim = %5B row%5B1%5D for row in similarcomp %5D%0A uniquecompID = list(set(companyid))%0A uniquesimcomp = %5B%5D%0A for company in uniquecompID:%0A compscore = 0%0A for company2 in similarcomp:%0A if company == company2%5B0%5D and company not in list(df%5Binvestorname%5D):%0A compscore += company2%5B1%5D%0A uniquesimcomp.append(%5Bcompscore, company%5D)%0A return sorted(uniquesimcomp, reverse=True)%5B0:40%5D, score %0A%0Aif __name__ == %22__main__%22:%0A %5Buniquesimcomp,score%5D = find_companies(sys.argv%5B1%5D)%0A print %5Buniquesimcomp,score%5D%0A
|
|
08988d19c712ad4604f0acced71a069c7c20067a
|
Add kv store for file storage
|
zou/app/stores/file_store.py
|
zou/app/stores/file_store.py
|
Python
| 0 |
@@ -0,0 +1,1241 @@
+import flask_fs as fs%0A%0Afrom zou.app import app%0A%0A%0Apictures = fs.Storage(%22pictures%22, overwrite=True)%0Amovies = fs.Storage(%22movies%22, overwrite=True)%0A%0Apictures.configure(app)%0Amovies.configure(app)%0A%0A%0Adef make_key(prefix, id):%0A return %22%25s-%25s%22 %25 (prefix, id)%0A%0A%0Adef add_picture(prefix, id, path):%0A key = make_key(prefix, id)%0A with open(path, 'rb') as fd:%0A return pictures.write(key, fd)%0A%0A%0Adef get_picture(prefix, id):%0A key = make_key(prefix, id)%0A return pictures.read(key)%0A%0A%0Adef open_picture(prefix, id):%0A key = make_key(prefix, id)%0A return pictures.open(key, 'rb')%0A%0A%0Adef exists_picture(prefix, id):%0A key = make_key(prefix, id)%0A return pictures.exists(key)%0A%0A%0Adef remove_picture(prefix, id):%0A key = make_key(prefix, id)%0A pictures.delete(key)%0A%0A%0Adef add_movie(prefix, id, content):%0A key = make_key(prefix, id)%0A return movies.write(key, content)%0A%0A%0Adef get_movie(prefix, id):%0A key = make_key(prefix, id)%0A return movies.read(key)%0A%0A%0Adef open_movie(prefix, id):%0A key = make_key(prefix, id)%0A return movies.open(key, 'rb')%0A%0A%0Adef exists_movie(prefix, id):%0A key = make_key(prefix, id)%0A return movies.exists(key)%0A%0A%0Adef remove_movie(prefix, id):%0A key = make_key(prefix, id)%0A movies.delete(key)%0A
|
|
56c27d56ca16f6659a478af0b6529291b1140636
|
Create find-peak-element-ii.py
|
Python/find-peak-element-ii.py
|
Python/find-peak-element-ii.py
|
Python
| 0.00137 |
@@ -0,0 +1,2257 @@
+# Time: O(max(m, n))%0A# Space: O(1)%0A%0Aclass Solution:%0A #@param A: An list of list integer %0A #@return: The index of position is a list of integer, for example %5B2,2%5D%0A def findPeakII(self, A):%0A upper, down = 0, len(A) - 1%0A left, right = 0, len(A%5B0%5D) - 1%0A%0A %0A while upper %3C down and left %3C right:%0A height = down - upper + 1%0A width = right - left + 1%0A %0A # T(m, n) = T(m / 2, n / 2) + O(m) + O(n / 2) = O(max(m, n))%0A if width %3E height: # Vertical split.%0A mid_j = left + (right - left) / 2%0A left_max, central_max, right_max = 0, 0, 0%0A max_i, max_j = -1, -1%0A for i in xrange(upper+1, down):%0A if A%5Bi%5D%5Bmid_j%5D %3E central_max:%0A max_i, max_j = i, mid_j%0A central_max = A%5Bi%5D%5Bmid_j%5D%0A left_max = max(left_max, A%5Bi%5D%5Bmid_j - 1%5D)%0A right_max = max(right_max, A%5Bi%5D%5Bmid_j + 1%5D)%0A %0A if left_max %3E central_max and left_max %3E right_max: # Find left.%0A right = mid_j%0A elif right_max %3E central_max and right_max %3E left_max: # Find right.%0A left = mid_j%0A else: # Find one peak. %0A return %5Bmax_i, max_j%5D%0A %0A else: # Horizontal split.%0A mid_i = upper + (down - upper) / 2%0A upper_max, central_max, down_max = 0, 0, 0%0A max_i, max_j = 0, 0%0A for j in xrange(left + 1, right):%0A if A%5Bmid_i%5D%5Bj%5D %3E central_max:%0A max_i, max_j = mid_i, j%0A central_max = A%5Bmid_i%5D%5Bj%5D%0A upper_max = max(upper_max, A%5Bmid_i - 1%5D%5Bj%5D)%0A down_max = max(down_max, A%5Bmid_i + 1%5D%5Bj%5D)%0A %0A if upper_max %3E central_max and upper_max %3E down_max: # Find upper.%0A down = mid_i%0A elif down_max %3E central_max and down_max %3E upper_max: # Find down.%0A upper = mid_i%0A else: # Find one peak.%0A return %5Bmax_i, max_j%5D%0A%0A return %5B-1, -1%5D # Not found.%0A
|
|
49882e51faa26dbaa17a5f3510f0ba215b317dac
|
add simple test
|
test/simple.py
|
test/simple.py
|
Python
| 0.000011 |
@@ -0,0 +1,1185 @@
+import matplotlib.pyplot as plt%0Aimport numpy%0A%0Anumpy.random.seed(0)%0A%0AN = 1000%0ANe = N * 0.8%0ANi = N - Ne%0A%0Aa = numpy.concatenate((%0A 0.02 * numpy.ones((Ne, 1)),%0A 0.1 * numpy.ones((Ni, 1))%0A))%0Ab = numpy.concatenate((%0A 0.2 * numpy.ones((Ne, 1)),%0A 0.2 * numpy.ones((Ni, 1))%0A))%0Ac = numpy.concatenate((%0A -65 * numpy.ones((Ne, 1)),%0A -65 * numpy.ones((Ni, 1))%0A))%0Ad = numpy.concatenate((%0A 8 * numpy.ones((Ne, 1)),%0A 2 * numpy.ones((Ni, 1))%0A))%0A%0AS = numpy.concatenate((%0A 0.5 * numpy.random.rand(N, Ne),%0A -1.0 * numpy.random.rand(N, Ni)), axis=1)%0A%0Av = -65 * numpy.ones((N, 1))%0Au = numpy.multiply(b, v)%0A%0Afirings = %5B%5B%5D, %5B%5D%5D%0A%0Afor t in range(1000):%0A I = 13 * (numpy.random.rand(N, 1) - 0.5)%0A fired = numpy.argwhere(v %3E= 30)%5B:,0%5D%0A if fired.size %3E 0:%0A for firing in fired:%0A firings%5B0%5D.append(t)%0A firings%5B1%5D.append(firing)%0A v%5Bfired%5D = c%5Bfired%5D%0A u%5Bfired%5D += d%5Bfired%5D%0A I += numpy.sum(S%5B:, fired%5D, 1).reshape((N, 1))%0A v = v + (0.04 * numpy.square(v) + 5 * v + 140 - u + I)%0A u = u + numpy.multiply(a, numpy.multiply(b, v) - u)%0A%0Aplt.scatter(%0A firings%5B0%5D,%0A firings%5B1%5D,%0A color=%22black%22,%0A marker=%22.%22)%0Aplt.show()%0A
|
|
32c5a681c7dd498204d38d5d1152aa7f67e09069
|
Add feedback entries to the Admin panel
|
taiga/feedback/admin.py
|
taiga/feedback/admin.py
|
Python
| 0 |
@@ -0,0 +1,1242 @@
+# Copyright (C) 2014 Andrey Antukh %[email protected]%3E%0A# Copyright (C) 2014 Jes%C3%BAs Espino %[email protected]%3E%0A# Copyright (C) 2014 David Barrag%C3%A1n %[email protected]%3E%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as%0A# published by the Free Software Foundation, either version 3 of the%0A# License, or (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0Afrom django.contrib import admin%0A%0Afrom . import models%0A%0A%0Aclass FeedbackEntryAdmin(admin.ModelAdmin):%0A list_display = %5B'created_date', 'full_name', 'email' %5D%0A list_display_links = list_display%0A list_filter = %5B'created_date',%5D%0A date_hierarchy = %22created_date%22%0A ordering = (%22-created_date%22, %22id%22)%0A search_fields = (%22full_name%22, %22email%22, %22id%22)%0A%0A%0Aadmin.site.register(models.FeedbackEntry, FeedbackEntryAdmin)%0A
|
|
f68b1a9d5aa2c36f9301588a55bc217a9ed120c1
|
Create PowerofThree_001.py
|
leetcode/326-Power-of-Three/PowerofThree_001.py
|
leetcode/326-Power-of-Three/PowerofThree_001.py
|
Python
| 0.000002 |
@@ -0,0 +1,180 @@
+class Solution(object):%0A def isPowerOfThree(self, n):%0A %22%22%22%0A :type n: int%0A :rtype: bool%0A %22%22%22%0A return n %3E 0 and 3 ** round(math.log(n, 3)) == n%0A
|
|
6c00711a5440fe958691c8064227565461e0acdf
|
add tools for laa analysis
|
sequana/laa.py
|
sequana/laa.py
|
Python
| 0 |
@@ -0,0 +1,2360 @@
+from sequana import BAM%0Aimport glob%0Aimport pandas as pd%0Aimport pylab%0A%0A%0Aclass LAA():%0A def __init__(self, where=%22bc*%22):%0A self.filenames = glob.glob(where + %22/%22 + %22amplicon_*summary.csv%22)%0A self.data = %5Bpd.read_csv(this) for this in self.filenames%5D%0A%0A def hist_amplicon(self, fontsize=12):%0A data = %5Blen(x) for x in self.data%5D%0A pylab.hist(data, bins=max(data), ec=%22k%22)%0A pylab.ylabel(%22#%22, fontsize=fontsize)%0A pylab.ylabel(%22Number of amplicons per barcode%22, fontsize=fontsize)%0A%0A%0Aclass LAA_Assembly():%0A %22%22%22%0A%0A Input is a SAM/BAM from the mapping of amplicon onto a known reference.%0A Based on the position, we can construct the new reference.%0A%0A %22%22%22%0A def __init__(self, filename):%0A self.bam = BAM(filename)%0A%0A%0A def build_reference(self):%0A self.bam.reset()%0A # scan BAM file assuming it is small%0A aa = %5Ba for a in self.bam%5D%0A%0A # retrieve data of interest%0A data = %5B(a.pos, %7B%0A %22name%22:a.query_name,%0A %22sequence%22: a.query_sequence,%0A %22cigar%22: a.cigarstring,%0A %22position%22: a.pos,%0A %22qstart%22: a.qstart,%0A %22qend%22: a.qend%7D) for a in aa%5D%0A%0A # sort by starting position%0A data.sort(key=lambda x: x%5B0%5D)%0A%0A for i, read in enumerate(data):%0A read = read%5B1%5D%0A if i == 0:%0A sequence = read%5B%22sequence%22%5D # 2 is query_sequence%0A else:%0A pr = data%5Bi-1%5D%5B1%5D # previous read%0A L = len(pr%5B%22sequence%22%5D)%0A end_position_pr = pr%5B'position'%5D - pr%5B'qstart'%5D + L %0A%0A # overlap between previous read and this one%0A overlap = end_position_pr - (read%5B'position'%5D - read%5B'qstart'%5D) +0%0A print(overlap)%0A print(pr%5B'position'%5D, pr%5B'qstart'%5D, L, end_position_pr)%0A print(read%5B'position'%5D, read%5B'qstart'%5D)%0A sequence = sequence + read%5B%22sequence%22%5D%5Boverlap+1:%5D%0A%0A # argmax(%5Bsum(a==b for a,b in zip(X%5B-i:%5D , Y%5B:i%5D))/float(i+1) for i in range(1000)%5D)%0A return sequence%0A%0A def save_fasta(self, filename, sequence=None):%0A if sequence is None:%0A sequence = self.build_reference()%0A%0A%0A with open(filename, %22w%22) as fout:%0A fout.write(%22%3Etest%5Cn%7B%7D%22.format(sequence))%0A%0A%0A
|
|
bbbe3b7d79d57e350b1203a636b6ea64fe818caa
|
Update migration chain
|
src/ggrc/migrations/versions/20160421141928_1257140cbce5_delete_responses_table.py
|
src/ggrc/migrations/versions/20160421141928_1257140cbce5_delete_responses_table.py
|
# Copyright (C) 2016 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: [email protected]
# Maintained By: [email protected]
"""Delete responses table and any other references to responses
Create Date: 2016-04-21 14:19:28.527745
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = '1257140cbce5'
down_revision = '33459bd8b70d'
def upgrade():
"""Upgrade database schema and/or data, creating a new revision."""
op.drop_constraint('meetings_ibfk_3', 'meetings', type_='foreignkey')
op.drop_column('meetings', 'response_id')
op.drop_table('responses')
def downgrade():
"""Downgrade database schema and/or data back to the previous revision."""
op.create_table(
'responses',
sa.Column('title', sa.String(length=250), nullable=False),
sa.Column('request_id', sa.Integer(), nullable=False),
sa.Column(
'response_type',
sa.Enum(u'documentation', u'interview', u'population sample'),
nullable=False),
sa.Column('status', sa.String(length=250), nullable=False),
sa.Column('population_worksheet_id', sa.Integer(), nullable=False),
sa.Column('population_count', sa.Integer(), nullable=False),
sa.Column('sample_worksheet_id', sa.Integer(), nullable=False),
sa.Column('sample_count', sa.Integer(), nullable=False),
sa.Column('sample_evidence_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['request_id'], ['requests.id']),
sa.ForeignKeyConstraint(['population_worksheet_id'], ['documents.id']),
sa.ForeignKeyConstraint(['sample_worksheet_id'], ['documents.id']),
sa.ForeignKeyConstraint(['sample_evidence_id'], ['documents.id']),
sa.Index('population_worksheet_document', 'population_worksheet_id'),
sa.Index('sample_evidence_document', 'sample_evidence_id'),
sa.Index('sample_worksheet_document', 'sample_worksheet_id')
)
op.add_column(
'meetings', sa.Column('response_id', sa.Integer(), nullable=False))
op.create_foreign_key(
'meetings_ibfk_3', 'meetings', 'responses', ['response_id'], ['id'])
|
Python
| 0.000001 |
@@ -169,27 +169,22 @@
By:
-dan@reciprocitylabs
+goodson@google
.com
@@ -205,29 +205,22 @@
By:
-peter@reciprocitylabs
+goodson@google
.com
@@ -579,20 +579,20 @@
= '
-33459bd8b70d
+5599d1769f25
'%0A%0A%0A
|
1d3327d8d804a6e53c020e69b77efbea2086379b
|
Add staging settings file
|
manchester_traffic_offences/settings/staging.py
|
manchester_traffic_offences/settings/staging.py
|
Python
| 0 |
@@ -0,0 +1,1439 @@
+from .base import *%0Aimport os%0A%0ADEBUG = False%0ATEMPLATE_DEBUG = DEBUG%0A%0AINSTALLED_APPS += ('raven.contrib.django.raven_compat', )%0A%0ARAVEN_CONFIG = %7B%0A 'dsn': os.environ%5B'RAVEN_DSN'%5D,%0A%7D%0A%0ADATABASES = %7B%0A 'default': %7B%0A 'ENGINE': 'django.db.backends.postgresql_psycopg2',%0A 'NAME': os.environ%5B'POSTGRES_DB'%5D,%0A 'USER': os.environ%5B'POSTGRES_USER'%5D,%0A 'PASSWORD': os.environ.get('POSTGRES_PASS', ''),%0A 'HOST': os.environ.get('POSTGRES_HOST', ''),%0A 'PORT': os.environ.get('POSTGRES_PORT', ''),%0A %7D%0A%7D%0A%0AADMINS = (%0A ('Ian George', '[email protected]'),%0A ('Lyndon Garvey', '[email protected]')%0A)%0A%0AMANAGERS = ADMINS%0AALLOWED_HOSTS = %5B%22staging.makeaplea.justice.gov.uk%22, %5D%0ASESSION_COOKIE_SECURE = True%0A%0A# Emails%0ASMTP_ROUTES%5B%22GSI%22%5D%5B%22HOST%22%5D = os.environ.get('GSI_EMAIL_HOST', '')%0ASMTP_ROUTES%5B%22GSI%22%5D%5B%22PORT%22%5D = int(os.environ.get('GSI_EMAIL_PORT', '25'))%0A%0AEMAIL_HOST = os.environ.get('EMAIL_HOST', 'email-smtp.eu-west-1.amazonaws.com')%0AEMAIL_PORT = int(os.environ.get('EMAIL_PORT', '587'))%0AEMAIL_HOST_USER = os.environ%5B'EMAIL_HOST_USERNAME'%5D%0AEMAIL_HOST_PASSWORD = os.environ%5B'EMAIL_HOST_PASSWORD'%5D%0AEMAIL_USE_TLS = True%0A%0APLEA_EMAIL_FROM = os.environ%5B'PLEA_EMAIL_FROM'%5D%0APLEA_EMAIL_TO = %5Bos.environ%5B'PLEA_EMAIL_TO'%5D, %5D%0APLP_EMAIL_TO = %5Bos.environ%5B%22PLP_EMAIL_TO%22%5D, %5D%0A%0AFEEDBACK_EMAIL_TO = %5Bos.environ%5B%22FEEDBACK_EMAIL_TO%22%5D, %5D%0AFEEDBACK_EMAIL_FROM = os.environ%5B%22FEEDBACK_EMAIL_FROM%22%5D%0A
|
|
a79a463624ab8bf62fe54d2392d4768c5a38626a
|
Add migration for removing challenge from Participant. (#203)
|
apps/participants/migrations/0003_remove_participant_challenge.py
|
apps/participants/migrations/0003_remove_participant_challenge.py
|
Python
| 0 |
@@ -0,0 +1,425 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.10.2 on 2016-12-02 14:45%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('participants', '0002_participantteam_participantteammember'),%0A %5D%0A%0A operations = %5B%0A migrations.RemoveField(%0A model_name='participant',%0A name='challenge',%0A ),%0A %5D%0A
|
|
d78b6c8d0efa3c4b29f254b7465e5e6fcb889395
|
Initialize P1_multiplicationTable
|
books/AutomateTheBoringStuffWithPython/Chapter12/PracticeProjects/P1_multiplicationTable.py
|
books/AutomateTheBoringStuffWithPython/Chapter12/PracticeProjects/P1_multiplicationTable.py
|
Python
| 0.000405 |
@@ -0,0 +1,223 @@
+# Create a program multiplicationTable.py that takes a number N from the%0A# command line and creates an N%C3%97N multiplication table in an Excel spreadsheet.%0A# Row 1 and column A should be used for labels and should be in bold.%0A
|
|
58cfcfbde61859a98b317f0498f35f7b7921e41b
|
Add dummy FileBrowseField
|
mezzanine_grappelli/filebrowser/fields.py
|
mezzanine_grappelli/filebrowser/fields.py
|
Python
| 0 |
@@ -0,0 +1,125 @@
+from filebrowser.fields import FileBrowseField as BaseFileBrowseField%0A%0A%0Aclass FileBrowseField(BaseFileBrowseField):%0A pass%0A
|
|
775104979a8ee5be040ac830133e69ca848d1ce1
|
add snpPriority.py, LD score and effect size weighted SNP scoring
|
snpPriority.py
|
snpPriority.py
|
Python
| 0 |
@@ -0,0 +1,2372 @@
+'''%0AsnpPriority.py - score SNPs based on their LD score and SE weighted effect sizes%0A===============================================================================%0A%0A:Author: Mike Morgan%0A:Release: $Id$%0A:Date: %7Ctoday%7C%0A:Tags: Python%0A%0APurpose%0A-------%0A%0A.. Score SNPs based on their LD score and SE weighted effect sizes from%0Aassociation analysis.%0A%0AUsage%0A-----%0A%0A.. Example use case%0A%0AExample::%0A%0A python snpPriority.py%0A%0AType::%0A%0A python snpPriority.py --help%0A%0Afor command line help.%0A%0ACommand line options%0A--------------------%0A%0A'''%0A%0Aimport sys%0Aimport CGAT.Experiment as E%0Aimport PipelineGWAS as gwas%0Aimport re%0Aimport pandas as pd%0A%0A%0Adef main(argv=None):%0A %22%22%22script main.%0A parses command line options in sys.argv, unless *argv* is given.%0A %22%22%22%0A%0A if argv is None:%0A argv = sys.argv%0A%0A # setup command line parser%0A parser = E.OptionParser(version=%22%25prog version: $Id$%22,%0A usage=globals()%5B%22__doc__%22%5D)%0A%0A parser.add_option(%22--database%22, dest=%22database%22, type=%22string%22,%0A help=%22SQL database containing LD information %22%0A %22in table format. Expects columns SNP_A, %22%0A %22SNP_B, R2, BP_A and BP_B (Plink --r2 output)%22)%0A%0A parser.add_option(%22--table-name%22, dest=%22table%22, type=%22string%22,%0A help=%22name of the SQL table containing the LD%22%0A %22values%22)%0A%0A parser.add_option(%22--chromosome%22, dest=%22chromosome%22, type=%22string%22,%0A help=%22chromosome to subset the association results %22%0A %22file on%22)%0A%0A # add common options (-h/--help, ...) and parse command line%0A (options, args) = E.Start(parser, argv=argv)%0A%0A infile = argv%5B-1%5D%0A%0A peek = pd.read_table(infile, nrows=5, sep=None, header=0)%0A if len(peek%5B%22TEST%22%5D != %22ADD%22):%0A clean = False%0A else:%0A clean = True%0A%0A snpscores = gwas.snpPriorityScore(gwas_results=infile,%0A database=options.database,%0A table_name=options.table,%0A chromosome=options.chromosome,%0A clean=clean)%0A%0A snpscores.to_csv(options.stdout, index_label=%22SNP%22,%0A sep=%22%5Ct%22)%0A%0A # write footer and output benchmark information.%0A E.Stop()%0A%0Aif __name__ == %22__main__%22:%0A sys.exit(main(sys.argv))%0A
|
|
80d579bd9376d955eab4a431fb3bcb493518582a
|
Create __init__.py
|
kernel/__init__.py
|
kernel/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
6deb5c1f2f614e6e6cb420c56c250a27fa032c8b
|
Add undelete script
|
bin/undelete.py
|
bin/undelete.py
|
Python
| 0.000001 |
@@ -0,0 +1,2394 @@
+#!/usr/bin/env python%0A%22%22%22%0ARemove the %60deleted%60 tag from containers (recursively) or from individual files.%0A%22%22%22%0Aimport argparse%0Aimport logging%0Aimport sys%0A%0Aimport bson%0A%0Afrom api import config%0Afrom api.dao.containerutil import propagate_changes%0A%0A%0Alog = logging.getLogger('scitran.undelete')%0A%0A%0Adef main():%0A cont_names = %5B'projects', 'sessions', 'acquisitions', 'analyses'%5D%0A cont_names_str = '%7C'.join(cont_names)%0A ap = argparse.ArgumentParser(description=sys.modules%5B__name__%5D.__doc__)%0A ap.add_argument('cont_name', help='container name to undelete %7B%7D'.format(cont_names_str))%0A ap.add_argument('cont_id', help='container id to undelete (bson.ObjectId)')%0A ap.add_argument('filename', nargs='?', help='filename within container (string, optional)')%0A args = ap.parse_args(sys.argv%5B1:%5D or %5B'--help'%5D)%0A%0A if args.cont_name not in cont_names:%0A raise ValueError('Invalid cont_name %22%7B%7D%22 (must be one of %7B%7D)'.format(args.cont_name, cont_names_str))%0A if not bson.ObjectId.is_valid(args.cont_id):%0A raise ValueError('Invalid cont_id %22%7B%7D%22'.format(args.cont_id))%0A%0A args.cont_id = bson.ObjectId(args.cont_id)%0A query = %7B'_id': args.cont_id%7D%0A collection = config.db%5Bargs.cont_name%5D%0A container = collection.find_one(query)%0A if container is None:%0A raise RuntimeError('Cannot find %7B%7D/%7B%7D'.format(args.cont_name, args.cont_id))%0A%0A update = %7B'$unset': %7B'deleted': True%7D%7D%0A if args.filename is None:%0A log.info('Removing %22deleted%22 tag from %7B%7D/%7B%7D...'.format(args.cont_name, args.cont_id))%0A collection.update_one(query, update)%0A log.info('Removing %22deleted%22 tag from child containers recursively...')%0A propagate_changes(args.cont_name, args.cont_id, None, update, include_refs=True)%0A else:%0A log.info('Removing %22deleted%22 tag from file %7B%7D/%7B%7D/%7B%7D...'.format(args.cont_name, args.cont_id, args.filename))%0A for f in container.get('files', %5B%5D):%0A if f%5B'name'%5D == args.filename:%0A del f%5B'deleted'%5D%0A break%0A else:%0A raise RuntimeError('Cannot find %7B%7D/%7B%7D/%7B%7D'.format(args.cont_name, args.cont_id, args.filename))%0A collection.update_one(query, %7B'$set': %7B'files': container%5B'files'%5D%7D%7D)%0A log.info('Done.')%0A%0A%0Aif __name__ == '__main__':%0A try:%0A main()%0A except (ValueError, RuntimeError) as exc:%0A log.error(exc.message)%0A sys.exit(1)%0A
|
|
d0cb340a874cc0430c8b77a0af052d8f2fd4d8c3
|
test script to cache Genewiki content
|
scheduled_bots/cache/genes/getWDHumanGenes.py
|
scheduled_bots/cache/genes/getWDHumanGenes.py
|
Python
| 0 |
@@ -0,0 +1,676 @@
+from wikidataintegrator import wdi_core%0Aimport pandas as pd%0Afrom rdflib import Graph%0Aimport time%0Aimport sys%0Aquery = %22%22%22%0ASELECT * WHERE %7B%0A ?item wdt:P31 wd:Q7187 ;%0A wdt:P703 wd:Q15978631 .%0A%7D%0A%22%22%22%0Akg = Graph()%0Aresults = wdi_core.WDItemEngine.execute_sparql_query(query)%0Ai =0%0Afor qid in results%5B%22results%22%5D%5B%22bindings%22%5D:%0A try:%0A # print(qid%5B%22item%22%5D%5B%22value%22%5D.replace(%22http://www.wikidata.org/entity/%22, %22%22))%0A kg.parse(qid%5B%22item%22%5D%5B%22value%22%5D+%22.ttl%22)%0A i+=1%0A print(i)%0A except:%0A print(print(qid%5B%22item%22%5D%5B%22value%22%5D.replace(%22http://www.wikidata.org/entity/%22, %22%22)))%0A time.sleep(5)%0Akg.serialize(destination=%22diseases.ttl%22, format=%22turtle%22)
|
|
4f2df78c7d8a9621340ff4ee5cfc6f22548d26d5
|
add TracedThread that continues the context propagation
|
proposal/helpers.py
|
proposal/helpers.py
|
Python
| 0 |
@@ -0,0 +1,912 @@
+%22%22%22Helpers that are used in examples. In the current state, we may not require%0Ato put these classes and functions as part of the main proposal.%0A%22%22%22%0Afrom threading import Thread%0Afrom proposal import tracer%0A%0A%0Aclass TracedThread(Thread):%0A %22%22%22Helper class OpenTracing-aware, that continues the propagation of%0A the current ActiveSpan in a new thread using an internal wrapper.%0A %22%22%22%0A def __init__(self, *args, **kwargs):%0A # implementation detail%0A # get the ActiveSpan when we're in the %22parent%22 thread%0A self._active_span = tracer.active_span_source.active_span()%0A super(TracedThread, self).__init__(*args, **kwargs)%0A%0A def run(self):%0A # implementation detail%0A # set the ActiveSpan in this thread and remove the local reference%0A tracer.active_span_source.make_active(self._active_span)%0A del self._active_span%0A super(TracedThread, self).run()%0A
|
|
8f1beddb8e3d1a63df10fcde9d3faae0d8d11171
|
Add kodi_automation.py
|
kodi_automation.py
|
kodi_automation.py
|
Python
| 0.00001 |
@@ -0,0 +1,1517 @@
+%0Aimport sys%0Aimport argparse%0A%0A%0Adef Classification(paths):%0A return (%5B%5D, %5B%5D)%0A%0A%0Adef MoveMoveFile(path, movies_dir, dry_run=False):%0A if dry_run:%0A sys.stderr.write('Moving movie', path)%0A return%0A%0A%0Adef MoveEpisodeFile(path, seria, season, episode, series_dir, dry_run=False):%0A if dry_run:%0A sys.stderr.write('Moving episode', *args)%0A return%0A%0A%0Adef main():%0A parser = argparse.ArgumentParser()%0A parser.add_argument('--scan-dir', '-s', dest='scan_dir', default=None)%0A parser.add_argument('--movies-dir', dest='movies_dir', default=None)%0A parser.add_argument('--series-dir', dest='series_dir', default=None)%0A parser.add_argument('--video-exts', '-v', dest='video_exts',%0A default='mkv,avi,mp4')%0A parser.add_argument('--dry-run', dest='dry_run', default=False)%0A args = parser.parse_args()%0A%0A video_exts = args.video_exts.split(',')%0A%0A new_paths = ScanDir(args.scan_dir)%0A new_paths = %5Bpath for path in new_paths if any(path.endswith(ext) for ext in video_exts)%5D%0A%0A movies_paths, episodes = Clasification(new_paths)%0A%0A for movie_path in movies_paths:%0A print 'Moving', path, 'to', args.movies_dir%0A MoveMoveFile(movie_path, args.movies_dir, dry_run=args.dry_run)%0A%0A for episode in episodes:%0A print 'Moving', episode.path, 'as', episode.seria, 'S', episode.season, 'E', episode.episode, 'to', args.series_dir%0A MoveEpisodeFile(%0A episode.path, episode.seria, episode.season, episode.episode,%0A args.series_dir, dry_run=args.dry_run)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
d1ca3e7363b835aeca7be2fa00cd7083d9fc8c08
|
Create divide_by_year.py
|
pipeline/preprocessing/google/divide_by_year.py
|
pipeline/preprocessing/google/divide_by_year.py
|
Python
| 0.999031 |
@@ -0,0 +1,1543 @@
+import glob%0Aimport gzip%0Aimport codecs%0Aimport re%0Aimport sys%0Aimport os%0A%0Awith_pos = False%0A%0Atargets = %7B%7D%0Amy_buffer = %7B%7D%0A%0Adef flush(a_buffer, some_targets, a_year):%0A for line in a_buffer%5Ba_year%5D:%0A some_targets%5Ba_year%5D.write(line)%0A a_buffer%5Ba_year%5D.clear()%0A%0A%0Aif len(sys.argv) != 3:%0A raise Exception(%22Provide 2 arguments:%5Cn%5Ct1,Source directory with raw corpus%5Cn%5Ct2,Target directory for transformed corpus%22)%0Adirectory = sys.argv%5B1%5D%0Atarget = sys.argv%5B2%5D%0Aif not os.path.exists(target):%0A os.makedirs(target)%0Afor gziped in glob.glob(os.path.join(directory, %22googlebooks-*-5gram-20120701-*.gz%22)):%0A print(%22Processing %22+gziped)%0A with gzip.open(gziped, 'rb') as unpacked:%0A reader = codecs.getreader(%22utf-8%22)%0A for line in reader(unpacked):%0A text, year, match_count, volume_count = line.split(%22%5Ct%22)%0A has_pos = %22_%22 in text%0A if (with_pos and has_pos) or (not with_pos and not has_pos):%0A if year not in targets:%0A targets%5Byear%5D = open(os.path.join(target,year),%22w%22,encoding=%22utf-8%22) %0A my_buffer%5Byear%5D = %5B%5D%0A elif len(my_buffer%5Byear%5D) %3E 10000:%0A flush(my_buffer, targets, year)%0A my_buffer%5Byear%5D.append(line)%0A%0Afor year in targets: %0A flush(my_buffer, targets, year)%0A targets%5Byear%5D.close()%0A
|
|
8832a542405a1999c296cc8b55d454b8cf35b5ea
|
Add merge.py
|
algorithms/merge.py
|
algorithms/merge.py
|
Python
| 0.000003 |
@@ -0,0 +1,800 @@
+import sys%0A%0Asys.setrecursionlimit(1000000)%0A%0A%0Aclass Merge:%0A def merge_sort(self, lists):%0A if len(lists) %3C= 1:%0A return lists%0A num = len(lists) // 2%0A left = self.merge_sort(lists%5B:num%5D)%0A right = self.merge_sort(lists%5Bnum:%5D)%0A return self.merge(left, right)%0A%0A def merge(self, left, right):%0A i, j = 0, 0%0A result = %5B%5D%0A while i %3C len(left) and j %3C len(right):%0A if left%5Bi%5D %3C= right%5Bj%5D:%0A result.append(left%5Bi%5D)%0A i += 1%0A else:%0A result.append(right%5Bj%5D)%0A j += 1%0A result += left%5Bi:%5D%0A result += right%5Bj:%5D%0A return result%0A%0A%0Aif __name__ == %22__main__%22:%0A s = %5B3, 4, 1, 6, 2, 9, 7, 0, 8, 5%5D%0A merge = Merge()%0A print(merge.merge_sort(s))%0A
|
|
85b518638e990cb7be298ea4b533aa465dd681b5
|
Add models to store data in...
|
acctwatch/models.py
|
acctwatch/models.py
|
Python
| 0 |
@@ -0,0 +1,2050 @@
+from sqlalchemy import *%0Afrom sqlalchemy.ext.declarative import declarative_base%0A%0Afrom sqlalchemy.orm import (%0A scoped_session,%0A sessionmaker,%0A relationship,%0A )%0A%0ADBSession = scoped_session(sessionmaker())%0ABase = declarative_base()%0A%0Aclass LoginItem(Base):%0A __table__ = Table('login_item', Base.metadata,%0A Column('id', Integer, primary_key=True, unique=True, autoincrement=True),%0A Column('guid', String, index=True),%0A Column('time', DateTime(timezone=True)),%0A Column('success', Boolean, default=False, nullable=False),%0A Column('failure', String, nullable=True),%0A Column('ip', String, nullable=False),%0A )%0A%0A actor = relationship(%22Actor%22, backref=%22logins%22, secondary=%22actor_logins%22)%0A location = relationship(%22Location%22, backref=%22logins%22, secondary=%22login_locations%22)%0A%0Aclass ActorLogins(Base):%0A __table__ = Table('actor_logins', Base.metadata, %0A Column('lid', Integer, ForeignKey('login_item.id', onupdate=%22CASCADE%22, ondelete=%22RESTRICT%22), nullable=False),%0A Column('aid', String, ForeignKey('actor.id', onupdate=%22CASCADE%22, ondelete=%22RESTRICT%22), nullable=False),%0A%0A PrimaryKeyConstraint('lid', 'aid'),%0A )%0A%0Aclass Actor(Base):%0A __table__ = Table('actor', Base.metadata, %0A Column('id', String, primary_key=True, unique=True),%0A Column('email', String),%0A )%0A%0Aclass Location(Base):%0A __table__ = Table('location', Base.metadata, %0A Column('id', Integer, primary_key=True, unique=True),%0A Column('location', String(), unique=True, index=True)%0A )%0A%0Aclass LoginLocation(Base):%0A __table__ = Table('login_locations', Base.metadata,%0A Column('loc_id', Integer, ForeignKey('location.id', onupdate=%22CASCADE%22, ondelete=%22RESTRICT%22), nullable=False),%0A Column('login_id', Integer, ForeignKey('login_item.id', onupdate=%22CASCADE%22, ondelete=%22RESTRICT%22), nullable=False),%0A%0A PrimaryKeyConstraint('loc_id', 'login_id'),%0A )%0A%0A%0A
|
|
42e88bc8e6d81916164e8e0fe6b8b6c476567526
|
add script to integrate disambiguated results
|
integrate.py
|
integrate.py
|
Python
| 0 |
@@ -0,0 +1,1348 @@
+#!/usr/bin/env python%0A%22%22%22%0ATakes in a CSV file that represents the output of the disambiguation engine:%0A Patent Number, Firstname, Lastname, Unique_Inventor_ID%0AGroups by Unique_Inventor_ID and then inserts them into the Inventor table using%0Alib.alchemy.match%0A%22%22%22%0A%0Aimport sys%0Aimport lib.alchemy as alchemy%0Afrom lib.util.csv_reader import read_file%0Afrom lib.handlers.xml_util import normalize_document_identifier%0Afrom collections import defaultdict%0Aimport cPickle as pickle%0A%0A%0Adef integrate(filename):%0A blocks = defaultdict(list)%0A for line in read_file(filename):%0A patent_number, name_first, name_last, unique_inventor_id = line%0A patent_number = normalize_document_identifier(patent_number)%0A rawinventors = alchemy.session.query(alchemy.RawInventor).filter_by(%0A patent_id = patent_number,%0A name_first = name_first,%0A name_last = name_last).all()%0A blocks%5Bunique_inventor_id%5D.extend(rawinventors)%0A pickle.dump(blocks, open('integrate.db', 'wb'))%0A for block in blocks.itervalues():%0A alchemy.match(block)%0A%0Adef main():%0A if len(sys.argv) %3C= 1:%0A print 'USAGE: python integrate.py %3Cpath-to-csv-file%3E'%0A sys.exit()%0A filename = sys.argv%5B1%5D%0A integrate(filename)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
80ecafd51cf258880bb5b1e183d5dd166c2d18fc
|
Add lockrun.py
|
lockrun.py
|
lockrun.py
|
Python
| 0.000002 |
@@ -0,0 +1,1985 @@
+import optparse%0Aimport signal%0Aimport threading%0Aimport syslog%0Aimport time%0Aimport os%0Aimport re%0A%0A%0Adef find_process(first_pid, process):%0A # Find a process in /proc%0A process = re.sub(%22 +%22, %22 %22, process).strip()%0A m = re.compile(%22%5E%5B0-9%5D+$%22)%0A all_proc = %5B x for x in os.listdir(%22/proc%22) if m.search(x)%5D%0A for p in all_proc%5Ball_proc.index(str(first_pid)):%5D:%0A try:%0A with open(%22/proc/%25s/cmdline%22 %25 p, %22r%22) as f:%0A cmdline = f.readline().replace(%22%5Cx00%22, %22 %22).rstrip('%5Cn').strip()%0A if process == cmdline:%0A return int(p)%0A except IOError:%0A pass%0A%0A return False%0A%0Adef process_watcher(child_process, parent_pid, timeout):%0A%0A child_pid = find_process(parent_pid, child_process)%0A%0A if child_pid:%0A syslog.syslog(syslog.LOG_WARNING,%0A %22%22%22Trying to kill process %22%25s%22%5B%25s%5D by timeout(%25ss)%22%22%22%0A %25 (child_process, child_pid, timeout))%0A%0A os.kill(child_pid, signal.SIGTERM)%0A else:%0A syslog.syslog(syslog.LOG_WARNING,%0A %22%22%22Can't find task process %22%25s%22 in /proc%22%22%22 %25 child_process)%0A%0A%0Aif __name__ == %22__main__%22:%0A%0A op = optparse.OptionParser()%0A op.add_option(%22-P%22, %22--program%22, dest=%22program%22, default=False, type=%22string%22)%0A op.add_option(%22-p%22, %22--lockfile%22, dest=%22lockfile%22, default=False, type=%22string%22)%0A op.add_option(%22-t%22, %22--timeout%22, dest=%22timeout%22, default=False, type=%22int%22)%0A%0A opts, args = op.parse_args()%0A%0A if opts.timeout:%0A watcher = threading.Timer(opts.timeout, process_watcher, %5Bopts.program, os.getpid(), opts.timeout%5D)%0A watcher.start()%0A%0A # Run program%0A start_time = time.time()%0A return_code = os.system(opts.program)%0A total_tile = time.time() - start_time%0A%0A if opts.timeout:%0A watcher.cancel()%0A%0A syslog.syslog(syslog.LOG_NOTICE,%0A %22%22%22Command %22%25s%22 is done with return code: %25s. Execution time %25.2fs%22%22%22 %25 (opts.program, return_code, total_tile))%0A
|
|
f87fba419a59513d2356ab8b911dba471b0676ce
|
mark entity class Parameter explicitly as abstract.
|
icat/entities.py
|
icat/entities.py
|
"""Provide the classes corresponding to the entities in the ICAT schema.
Entity classes defined in this module are derived from the abstract
base class :class:`icat.entity.Entity`. They override the class
attributes :attr:`icat.entity.Entity.BeanName`,
:attr:`icat.entity.Entity.Constraint`,
:attr:`icat.entity.Entity.InstAttr`,
:attr:`icat.entity.Entity.InstRel`,
:attr:`icat.entity.Entity.InstMRel`,
:attr:`icat.entity.Entity.AttrAlias`, and
:attr:`icat.entity.Entity.SortAttrs` as appropriate.
.. note::
This module is used internally in :mod:`icat.client`. Most users
will not need to use it directly.
"""
import itertools
from icat.entity import Entity
from icat.exception import InternalError
class GroupingMixin:
def addUsers(self, users):
ugs = []
uids = set()
for u in users:
if u.id in uids:
continue
ugs.append(self.client.new('userGroup', user=u, grouping=self))
uids.add(u.id)
if ugs:
self.client.createMany(ugs)
def getUsers(self, attribute=None):
if attribute is not None:
query = ("User.%s <-> UserGroup <-> %s [id=%d]"
% (attribute, self.BeanName, self.id))
else:
query = ("User <-> UserGroup <-> %s [id=%d]"
% (self.BeanName, self.id))
return self.client.search(query)
class InstrumentMixin:
def addInstrumentScientists(self, users):
iss = []
for u in users:
iss.append(self.client.new('instrumentScientist',
instrument=self, user=u))
if iss:
self.client.createMany(iss)
def getInstrumentScientists(self, attribute=None):
if attribute is not None:
query = ("User.%s <-> InstrumentScientist <-> Instrument [id=%d]"
% (attribute, self.id))
else:
query = ("User <-> InstrumentScientist <-> Instrument [id=%d]"
% (self.id))
return self.client.search(query)
class InvestigationMixin:
def addInstrument(self, instrument):
ii = self.client.new('investigationInstrument',
investigation=self, instrument=instrument)
ii.create()
def addKeywords(self, keywords):
kws = []
for k in keywords:
kws.append(self.client.new('keyword', name=k, investigation=self))
if kws:
self.client.createMany(kws)
def addInvestigationUsers(self, users, role='Investigator'):
ius = []
for u in users:
ius.append(self.client.new('investigationUser',
investigation=self, user=u, role=role))
if ius:
self.client.createMany(ius)
class Investigation44Mixin(InvestigationMixin):
def addInvestigationGroup(self, group, role=None):
ig = self.client.new('investigationGroup', investigation=self)
ig.grouping = group
ig.role = role
ig.create()
_parent = {
'DataCollectionParameter': 'parameter',
'DatafileParameter': 'parameter',
'DatasetParameter': 'parameter',
'InvestigationParameter': 'parameter',
'SampleParameter': 'parameter',
}
_extra_attrs = {
'DataCollection': [
(None, {
'AttrAlias': {'parameters': 'dataCollectionParameters'},
'SortAttrs': ('dataCollectionDatasets', 'dataCollectionDatafiles'),
}),
('4.3.1', {
'AttrAlias': {'dataCollectionParameters': 'parameters'},
}),
],
'DataCollectionDatafile': [
(None, {
'SortAttrs': ('datafile',),
}),
],
'DataCollectionDataset': [
(None, {
'SortAttrs': ('dataset',),
}),
],
'Grouping': [
(None, {
'Mixin': GroupingMixin,
}),
],
'Instrument': [
(None, {
'Mixin': InstrumentMixin,
}),
],
'Investigation': [
(None, {
'Mixin': InvestigationMixin,
}),
('4.4.0', {
'Mixin': Investigation44Mixin,
}),
],
'InvestigationType': [
(None, {
'SortAttrs': ('facility', 'name'),
}),
],
'Job': [
(None, {
'SortAttrs': ('application', 'arguments',
'inputDataCollection', 'outputDataCollection'),
}),
],
'Log': [
(None, {
'SortAttrs': ('operation', 'entityName'),
}),
],
'Publication': [
(None, {
'SortAttrs': ('investigation', 'fullReference'),
}),
],
'Rule': [
(None, {
'AttrAlias': {'group': 'grouping'},
'SortAttrs': ('grouping', 'what'),
}),
],
'Study': [
(None, {
'SortAttrs': ('name',),
}),
],
'UserGroup': [
(None, {
'AttrAlias': {'group': 'grouping'},
}),
],
}
def getTypeMap(client):
typemap = { 'entityBaseBean': Entity, }
for beanName in itertools.chain(('Parameter',), client.getEntityNames()):
try:
parent = typemap[_parent[beanName]]
except KeyError:
parent = Entity
info = client.getEntityInfo(beanName)
attrs = { 'BeanName': str(beanName), }
try:
attrs['__doc__'] = str(info.classComment)
except AttributeError:
attrs['__doc__'] = ""
try:
constraints = info.constraints[0]['fieldNames']
if constraints:
attrs['Constraint'] = tuple(str(n) for n in constraints)
except AttributeError:
pass
instAttr = []
instRel = []
instMRel = []
for field in info.fields:
if field['name'] in parent.MetaAttr:
continue
elif field['relType'] == 'ATTRIBUTE':
instAttr.append(str(field['name']))
elif field['relType'] == 'ONE':
instRel.append(str(field['name']))
elif field['relType'] == 'MANY':
instMRel.append(str(field['name']))
else:
raise InternalError("Invalid relType '%s'" % field['relType'])
instAttr = frozenset(instAttr)
if instAttr != parent.InstAttr:
attrs['InstAttr'] = instAttr
instRel = frozenset(instRel)
if instRel != parent.InstRel:
attrs['InstRel'] = instRel
instMRel = frozenset(instMRel)
if instMRel != parent.InstMRel:
attrs['InstMRel'] = instMRel
mixin = None
if beanName in _extra_attrs:
for minver, extra in _extra_attrs[beanName]:
if minver and minver > client.apiversion:
continue
mixin = extra.pop('Mixin', None)
attrs.update(extra)
if mixin:
bases = (parent, mixin)
else:
bases = (parent,)
instanceName = beanName[0].lower() + beanName[1:]
typemap[instanceName] = type(str(beanName), bases, attrs)
return typemap
|
Python
| 0 |
@@ -3282,24 +3282,109 @@
a_attrs = %7B%0A
+ 'Parameter': %5B%0A (None, %7B%0A 'BeanName': None,%0A %7D),%0A %5D,%0A
'DataCol
|
1551cb57ab21364a4e96fa109786ccb0a4ccc3a0
|
Create MergeCSVs.py
|
utils/MergeCSVs.py
|
utils/MergeCSVs.py
|
Python
| 0 |
@@ -0,0 +1,422 @@
+# merge all columns of the csv file in current directory into a single 'merge.csv' file. %0A# requires pandas librairy to be installed.%0A# you can customize the merge in many ways: https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.concat.html%0A%0Aimport pandas as pd%0Aimport glob%0A%0Adfs = glob.glob('*.csv')%0Aresult = pd.concat(%5Bpd.read_csv(df, sep=';') for df in dfs%5D, ignore_index=True)%0Aresult.to_csv('merge.csv')%0A
|
|
9b6c1af3420653124495103169865036df4f7705
|
Add logging module for Pyro-related debugging
|
osbrain/logging.py
|
osbrain/logging.py
|
Python
| 0 |
@@ -0,0 +1,96 @@
+import os%0Aos.environ%5B%22PYRO_LOGFILE%22%5D = %22pyro_osbrain.log%22%0Aos.environ%5B%22PYRO_LOGLEVEL%22%5D = %22DEBUG%22%0A
|
|
bb188bcc196b12842378aa1c0c535800717a6b61
|
add example to extract word frequencies
|
polbotcheck/word_frequencies.py
|
polbotcheck/word_frequencies.py
|
Python
| 0.001091 |
@@ -0,0 +1,1495 @@
+import nltk%0Afrom nltk.corpus import stopwords%0A%0Adef get_word_frequencies(text, words_n=10, lang='german'):%0A default_stopwords = set(nltk.corpus.stopwords.words(lang))%0A words = nltk.tokenize.word_tokenize(text)%0A words = %5Bword for word in words if len(word) %3E 1%5D%0A words = %5Bword for word in words if not word.isnumeric()%5D%0A words = %5Bword.lower() for word in words%5D%0A words = %5Bword for word in words if word not in default_stopwords%5D%0A%0A fdist = nltk.FreqDist(words)%0A for word, frequency in fdist.most_common(words_n):%0A print(u'%7B%7D:%7B%7D'.format(word, frequency))%0A%0A return fdist.most_common(words_n)%0Aif __name__ == %22__main__%22:%0A text = 'Die offene Gesellschaft ist ein in der Tradition des Liberalismus stehendes Gesellschaftsmodell Karl Poppers, das zum Ziel hat, %E2%80%9Edie kritischen F%C3%A4higkeiten des Menschen%E2%80%9C freizusetzen. Die Gewalt des Staates soll dabei so weit wie m%C3%B6glich geteilt werden, um Machtmissbrauch zu verhindern. Poppers Vorstellung von der offenen Gesellschaft ist eng mit der Staatsform der Demokratie verbunden, allerdings nicht verstanden als Herrschaft der Mehrheit, sondern als die M%C3%B6glichkeit, die Regierung gewaltfrei abzuw%C3%A4hlen. Der offenen Gesellschaft steht einerseits die Laissez-Faire-Gesellschaft gegen%C3%BCber, andererseits die totalit%C3%A4re, am holistisch-kollektivistischen Denken ausgerichtete %E2%80%9Egeschlossene Gesellschaft%E2%80%9C, die Popper auch ironisch den %E2%80%9EHimmel auf Erden%E2%80%9C nennt, weil sie als solcher propagiert wird.'%0A%0A get_word_frequencies(text)%0A
|
|
20ecbf00c05d1f959e78cbf87cf459fd46dea59f
|
Create pythonhelloworld.py
|
pythonhelloworld.py
|
pythonhelloworld.py
|
Python
| 0.999993 |
@@ -0,0 +1,21 @@
+print %22hello world%22 %0A
|
|
99a63431e441a1c52d3f16f6faf0594497755d45
|
add a new special case install_zstack. It only installs zstack and initalize database, but not do any real cloud deployment
|
integrationtest/vm/basic/install_zstack.py
|
integrationtest/vm/basic/install_zstack.py
|
Python
| 0 |
@@ -0,0 +1,597 @@
+'''%0D%0A%0D%0A@author: Youyk%0D%0A'''%0D%0A%0D%0Aimport os%0D%0Aimport zstackwoodpecker.setup_actions as setup_actions%0D%0Aimport zstackwoodpecker.test_lib as test_lib%0D%0Aimport zstackwoodpecker.test_util as test_util%0D%0A%0D%0AUSER_PATH = os.path.expanduser('~')%0D%0AEXTRA_SUITE_SETUP_SCRIPT = '%25s/.zstackwoodpecker/extra_suite_setup_config.sh' %25 USER_PATH%0D%0Adef test():%0D%0A setup = setup_actions.SetupAction()%0D%0A setup.plan = test_lib.all_config%0D%0A setup.run()%0D%0A%0D%0A if os.path.exists(EXTRA_SUITE_SETUP_SCRIPT):%0D%0A os.system(%22bash %25s%22 %25 EXTRA_SUITE_SETUP_SCRIPT)%0D%0A test_util.test_pass('ZStack Installation Success')%0D%0A%0D%0A
|
|
c6ded12845f25e305789840e1687bfee83e82be5
|
Add a few simple pytest tests
|
tests/test_standings.py
|
tests/test_standings.py
|
Python
| 0 |
@@ -0,0 +1,964 @@
+#!/usr/bin/env python%0Aimport pytest%0Afrom datetime import datetime%0Afrom mlbgame import standings%0A%0A%0A%0Adate = datetime(2017, 5, 15, 19, 4, 59, 367187)%0As = standings.Standings(date)%0A%0Adef test_standings_url():%0A standings_url = 'http://mlb.mlb.com/lookup/json/named.standings_schedule_date.bam?season=2017&' %5C%0A 'schedule_game_date.game_date=%25272017/05/15%2527&sit_code=%2527h0%2527&league_id=103&' %5C%0A 'league_id=104&all_star_sw=%2527N%2527&version=2'%0A assert s.standings_url == standings_url%0A%0A%0Adef test_historical_standings_url():%0A date = datetime(2016, 5, 15)%0A s = standings.Standings(date)%0A standings_url = 'http://mlb.mlb.com/lookup/json/named.historical_standings_schedule_date.bam?season=2016&' %5C%0A 'game_date=%25272016/05/15%2527&sit_code=%2527h0%2527&league_id=103&league_id=104&' %5C%0A 'all_star_sw=%2527N%2527&version=48'%0A assert s.standings_url == standings_url%0A%0A%0Adef test_divisions_is_list():%0A assert type(s.divisions) is list%0A
|
|
66d3d329674521c8756a8644f2f0a58824a1ec41
|
add spider for ups freight
|
locations/spiders/ups_freight_service_centers.py
|
locations/spiders/ups_freight_service_centers.py
|
Python
| 0.000001 |
@@ -0,0 +1,1776 @@
+# -*- coding: utf-8 -*-%0Aimport re%0A%0Aimport scrapy%0Afrom locations.items import GeojsonPointItem%0A%0A%0Aclass UPSFreightServiceCenter(scrapy.Spider):%0A download_delay = 0.2%0A name = %22ups_freight_service_centers%22%0A allowed_domains = %5B%22upsfreight.com%22%5D%0A start_urls = (%0A 'https://www.upsfreight.com/ProductsandServices/ServiceCenterDir/default.aspx',%0A )%0A%0A def parse_location(self, response):%0A ref = re.search(r'.+/(.+)', response.url).group(1)%0A%0A properties = %7B%0A 'addr_full': response.xpath('//span%5Bcontains(@id, %22Address%22)%5D/text()').extract()%5B0%5D,%0A 'city': response.xpath('//span%5Bcontains(@id, %22Zip%22)%5D/text()').extract()%5B0%5D.split(',')%5B0%5D,%0A 'state': response.xpath('//span%5Bcontains(@id, %22Zip%22)%5D/text()').extract()%5B0%5D.split(', ')%5B1%5D.split(' ')%5B0%5D,%0A 'postcode': response.xpath('//span%5Bcontains(@id, %22Zip%22)%5D/text()').extract()%5B0%5D.split(', ')%5B1%5D.split(' ')%5B1%5D,%0A 'ref': ref,%0A 'website': response.url,%0A 'phone': response.xpath('//span%5Bcontains(@id, %22Telephone%22)%5D/text()').extract()%5B0%5D,%0A 'name': response.xpath('//span%5Bcontains(@id, %22lName%22)%5D/text()').extract()%5B0%5D,%0A 'country': ref.split('qcountry=')%5B1%5D.split('&svc')%5B0%5D%0A %7D%0A%0A yield GeojsonPointItem(**properties)%0A%0A def parse_state(self, response):%0A location_urls = response.xpath('//*%5B@id=%22app_ctl00_scTable_hlDetail%22%5D/@href').extract()%0A%0A for url in location_urls:%0A yield scrapy.Request(response.urljoin(url), callback=self.parse_location)%0A%0A def parse(self, response):%0A urls = response.xpath('//table//table//table//table//table//a/@href').extract()%0A%0A for url in urls:%0A yield scrapy.Request(response.urljoin(url), callback=self.parse_state)%0A
|
|
74f5bc8e9abb79a2a394fd0f397ca30af9e83794
|
Rework arbitrary arg passing to be consistent with v20
|
acos_client/v30/axapi_http.py
|
acos_client/v30/axapi_http.py
|
# Copyright 2014, Doug Wiegley, A10 Networks.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
import sys
import requests
if sys.version_info >= (3, 0):
import http.client as http_client
else:
# Python 2
import httplib as http_client
http_client.HTTPConnection.debuglevel = 1
import responses as acos_responses
import acos_client
LOG = logging.getLogger(__name__)
import sys
out_hdlr = logging.StreamHandler(sys.stdout)
out_hdlr.setLevel(logging.DEBUG)
LOG.addHandler(out_hdlr)
LOG.setLevel(logging.DEBUG)
broken_replies = {
"": '{"response": {"status": "OK"}}'
}
class HttpClient(object):
HEADERS = {
"Content-type": "application/json",
"User-Agent": "ACOS-Client-AGENT-%s" % acos_client.VERSION,
}
def __init__(self, host, port=None, protocol="https"):
if port is None:
if protocol is 'http':
port = 80
else:
port = 443
self.url_base = "%s://%s:%s" % (protocol, host, port)
def request(self, method, api_url, params={}, headers=None,
file_name=None, file_content=None, **kwargs):
LOG.debug("axapi_http: full url = %s", self.url_base + api_url)
LOG.debug("axapi_http: %s url = %s", method, api_url)
LOG.debug("axapi_http: params = %s", json.dumps(params, indent=4))
# Update params with **kwargs for currently unsupported configuration
# of objects
formatted_kwargs = dict([(k.replace('_', '-'), v) for k, v in kwargs.iteritems()])
param_keys = params.keys()
if params != {}:
if len(param_keys) != 1:
raise KeyError("params must have exactly one key, not {}. "
"params: {}".format(len(param_keys), params))
params[param_keys[0]].update(formatted_kwargs)
if (file_name is None and file_content is not None) or \
(file_name is not None and file_content is None):
raise ValueError("file_name and file_content must both be "
"populated if one is")
hdrs = self.HEADERS.copy()
if headers:
hdrs.update(headers)
if params:
# FIXME - re-enable kwargs merge at some point; dict keys
# between 2.1 and 3.0 do not match, and a10-neutron-lbaas
# uses 2.1 wrappers
# extra_params = kwargs.get('axapi_args', {})
params_copy = params.copy()
# params_copy.update(extra_params)
LOG.debug("axapi_http: params_all = %s", params_copy)
payload = json.dumps(params_copy, encoding='utf-8')
else:
payload = None
LOG.debug("axapi_http: headers = %s", json.dumps(hdrs, indent=4))
if file_name is not None:
files = {
'file': (file_name, file_content, "application/octet-stream"),
'json': ('blob', payload, "application/json")
}
hdrs.pop("Content-type", None)
hdrs.pop("Content-Type", None)
z = requests.request(method, self.url_base + api_url, verify=False,
files=files, headers=hdrs)
else:
z = requests.request(method, self.url_base + api_url, verify=False,
data=payload, headers=hdrs)
if z.status_code == 204:
return None
try:
r = z.json()
except ValueError as e:
# Suspect that the JSON response was empty, like in the case of a
# successful file import.
if z.status_code == 200:
return {}
else:
raise e
LOG.debug("axapi_http: data = %s", json.dumps(r, indent=4))
if 'response' in r and 'status' in r['response']:
if r['response']['status'] == 'fail':
acos_responses.raise_axapi_ex(r, method, api_url)
if 'authorizationschema' in r:
acos_responses.raise_axapi_auth_error(
r, method, api_url, headers)
return r
def get(self, api_url, params={}, headers=None, **kwargs):
return self.request("GET", api_url, params, headers, **kwargs)
def post(self, api_url, params={}, headers=None, **kwargs):
return self.request("POST", api_url, params, headers, **kwargs)
def put(self, api_url, params={}, headers=None, **kwargs):
return self.request("PUT", api_url, params, headers, **kwargs)
def delete(self, api_url, params={}, headers=None, **kwargs):
return self.request("DELETE", api_url, params, headers, **kwargs)
|
Python
| 0 |
@@ -1663,24 +1663,41 @@
ontent=None,
+ axapi_args=None,
**kwargs):%0A
@@ -1688,32 +1688,32 @@
one, **kwargs):%0A
-
LOG.debu
@@ -1935,20 +1935,22 @@
ms with
-**kw
+axapi_
args for
@@ -1985,26 +1985,16 @@
guration
-%0A #
of obje
@@ -1997,16 +1997,55 @@
objects%0A
+ if axapi_args is not None:%0A
@@ -2050,26 +2050,30 @@
formatted_
-kw
+axapi_
args = dict(
@@ -2113,278 +2113,72 @@
v in
- kwargs.iteritems()%5D)%0A param_keys = params.keys()%0A if params != %7B%7D:%0A if len(param_keys) != 1:%0A raise KeyError(%22params must have exactly one key, not %7B%7D. %22%0A %22params: %7B%7D%22.format(len(param_keys), params)
+%0A axapi_args.iteritems()%5D
)%0A
@@ -2197,23 +2197,8 @@
rams
-%5Bparam_keys%5B0%5D%5D
.upd
@@ -2211,18 +2211,22 @@
rmatted_
-kw
+axapi_
args)%0A%0A
@@ -2556,32 +2556,32 @@
pdate(headers)%0A%0A
+
if param
@@ -2587,238 +2587,8 @@
ms:%0A
- # FIXME - re-enable kwargs merge at some point; dict keys%0A # between 2.1 and 3.0 do not match, and a10-neutron-lbaas%0A # uses 2.1 wrappers%0A # extra_params = kwargs.get('axapi_args', %7B%7D)%0A
|
9311d3d4acd8c67c20d76cc74d00e0f5a83318e6
|
add product-of-array-except-self
|
vol5/product-of-array-except-self/product-of-array-except-self.py
|
vol5/product-of-array-except-self/product-of-array-except-self.py
|
Python
| 0.999376 |
@@ -0,0 +1,432 @@
+class Solution(object):%0A def productExceptSelf(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: List%5Bint%5D%0A %22%22%22%0A n = len(nums)%0A ret = %5B1%5D * n%0A product = 1%0A for i in range(n):%0A ret%5Bi%5D = product%0A product *= nums%5Bi%5D%0A product = 1%0A for i in range(n - 1, -1, -1):%0A ret%5Bi%5D *= product%0A product *= nums%5Bi%5D%0A return ret
|
|
6ae6544cca07e857d680d199b2c2f436cb1d9a82
|
add wordpress stats
|
wordpress_stats.py
|
wordpress_stats.py
|
Python
| 0.000001 |
@@ -0,0 +1,1066 @@
+from utils import *%0D%0Aimport urllib, json%0D%0Aimport time%0D%0Aimport datetime%0D%0A%0D%0Adef dump(blogid,filepath): %0D%0A posts = %5B%5D%0D%0A offset = 0%0D%0A while True:%0D%0A puts(%22offset%22,offset)%0D%0A url = %22https://public-api.wordpress.com/rest/v1/sites/%22 + blogid + %22/posts?number=100&offset=%22 + str(offset)%0D%0A response = urllib.urlopen(url);%0D%0A data = json.loads(response.read())%0D%0A for post in data%5B'posts'%5D:%0D%0A posts.append(post)%0D%0A if len(data%5B'posts'%5D) %3C 100:%0D%0A break%0D%0A offset += 100%0D%0A%0D%0A output=open(filepath, 'w+')%0D%0A content = %22%3Cwordpress nfollowers=%5C%22%22 + %22NA%22 + %22%5C%22 timestamp=%5C%22%22 + str(time.time()) + %22%5C%22%3E%5Cn%22%0D%0A%0D%0A for post in posts:%0D%0A puts(post%5B'title'%5D,post%5B'like_count'%5D,post%5B'date'%5D)%0D%0A content = content + %22%5Ct%3Cpost name=%5C%22%22 + post%5B'title'%5D + %22%5C%22 %5Ct timestamp=%5C%22%22 + str(post%5B'date'%5D) + %22%5C%22 %5Ct fav_count=%5C%22%22 + str(post%5B'like_count'%5D) + %22%5C%22%3E%3C/post%3E%5Cn%22%0D%0A%0D%0A content = content + %22%3C/wordpress%3E%5Cn%22%0D%0A output.write(content.encode('utf8'))%0D%0A output.close()%0D%0A%0D%0A# dump(%22wordpressexample.xml%22)%0D%0A
|
|
e349a43ad33abf0e6cce2a410e0e6cb2342456f1
|
No in Python
|
2017-05-06/no.py
|
2017-05-06/no.py
|
Python
| 0.999311 |
@@ -0,0 +1,12 @@
+print('No!')
|
|
9cc09c6143025d88eedfa4f8eedcd23e2fe7990e
|
Create sahilprakash.py
|
Python/sahilprakash.py
|
Python/sahilprakash.py
|
Python
| 0.000024 |
@@ -0,0 +1,22 @@
+print(%22Hello World!%22)%0A
|
|
9cc13ca511987584ea4f52cf0c2e57e6b98a9e8b
|
Add lc0350_intersection_of_two_arrays_ii.py
|
lc0350_intersection_of_two_arrays_ii.py
|
lc0350_intersection_of_two_arrays_ii.py
|
Python
| 0.000351 |
@@ -0,0 +1,1025 @@
+%22%22%22Leetcode 350. Intersection of Two Arrays II%0AEasy%0A%0AURL: https://leetcode.com/problems/intersection-of-two-arrays-ii/%0A%0AGiven two arrays, write a function to compute their intersection.%0A%0AExample 1:%0AInput: nums1 = %5B1,2,2,1%5D, nums2 = %5B2,2%5D%0AOutput: %5B2,2%5D%0A%0AExample 2:%0AInput: nums1 = %5B4,9,5%5D, nums2 = %5B9,4,9,8,4%5D%0AOutput: %5B4,9%5D%0A%0ANote:%0A- Each element in the result should appear as many times as it shows in both arrays.%0A- The result can be in any order.%0A%0AFollow up:%0A- What if the given array is already sorted? How would you optimize your algorithm?%0A- What if nums1's size is small compared to nums2's size? Which algorithm is better?%0A- What if elements of nums2 are stored on disk, and the memory is limited such that%0A you cannot load all elements into the memory at once?%0A%22%22%22%0A%0Aclass Solution(object):%0A def intersect(self, nums1, nums2):%0A %22%22%22%0A :type nums1: List%5Bint%5D%0A :type nums2: List%5Bint%5D%0A :rtype: List%5Bint%5D%0A %22%22%22%0A pass%0A%0A%0Adef main():%0A pass%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
eb3882051241843716ef9b7ceef8aeb6ee2a35c6
|
add mysqlproxy.py
|
misc/mysqlproxy.py
|
misc/mysqlproxy.py
|
Python
| 0.000002 |
@@ -0,0 +1,3046 @@
+#!/usr/bin/env python3%0A##############################################################################%0A#The MIT License (MIT)%0A#%0A#Copyright (c) 2016 Hajime Nakagami%0A#%0A#Permission is hereby granted, free of charge, to any person obtaining a copy%0A#of this software and associated documentation files (the %22Software%22), to deal%0A#in the Software without restriction, including without limitation the rights%0A#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell%0A#copies of the Software, and to permit persons to whom the Software is%0A#furnished to do so, subject to the following conditions:%0A#%0A#The above copyright notice and this permission notice shall be included in all%0A#copies or substantial portions of the Software.%0A#%0A#THE SOFTWARE IS PROVIDED %22AS IS%22, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR%0A#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,%0A#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE%0A#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER%0A#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,%0A#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE%0A#SOFTWARE.%0A##############################################################################%0Aimport sys%0Aimport socket%0Aimport binascii%0A%0Adef recv_mysql_packet(sock):%0A head = sock.recv(4)%0A n = int.from_bytes(head%5B:3%5D, byteorder='little')%0A%0A recieved = b''%0A while n:%0A bs = sock.recv(n)%0A recieved += bs%0A n -= len(bs)%0A return head + recieved%0A%0Adef asc_dump(s):%0A r = ''%0A for c in s:%0A r += chr(c) if (c %3E= 32 and c %3C 128) else '.'%0A if r:%0A print('%5B' + r + '%5D')%0A%0A%0Adef proxy_wire(server_name, server_port, listen_host, listen_port):%0A sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0A sock.bind((listen_host, listen_port))%0A sock.listen(1)%0A client_sock, addr = sock.accept()%0A server_sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)%0A server_sock.connect((server_name, server_port))%0A%0A while True:%0A client_data = recv_mysql_packet(client_sock)%0A server_sock.send(client_data)%0A print('%3E%3E', binascii.b2a_hex(client_data).decode('ascii'))%0A asc_dump(client_data)%0A%0A server_data = recv_mysql_packet(server_sock)%0A client_sock.send(server_data)%0A print('%3C%3C', binascii.b2a_hex(server_data).decode('ascii'))%0A asc_dump(server_data)%0A%0Aif __name__ == '__main__':%0A if len(sys.argv) %3C 3:%0A print('Usage : ' + sys.argv%5B0%5D + ' server%5B:port%5D %5Blisten_host:%5Dlisten_port')%0A sys.exit()%0A%0A server = sys.argv%5B1%5D.split(':')%0A server_name = server%5B0%5D%0A if len(server) == 1:%0A server_port = 3306%0A else:%0A server_port = int(server%5B1%5D)%0A%0A listen = sys.argv%5B2%5D.split(':')%0A if len(listen) == 1:%0A listen_host = 'localhost'%0A listen_port = int(listen%5B0%5D)%0A else:%0A listen_host = listen%5B0%5D%0A listen_port = int(listen%5B1%5D)%0A%0A proxy_wire(server_name, server_port, listen_host, listen_port)%0A
|
|
f1c389a0028c6f92300573bef587c084204e858f
|
Create circlecli.py
|
mocks/circlecli.py
|
mocks/circlecli.py
|
Python
| 0 |
@@ -0,0 +1,979 @@
+# -*- coding: utf-8 -*-%0A%0A%22%22%22%0AMocks for the CircleCLI API library tests.%0A%0A%22%22%22%0A%0A%0Afrom httmock import response, urlmatch%0A%0ANETLOC = r'(.*%5C.)?circleci%5C.com$'%0AHEADERS = %7B'content-type': 'application/json'%7D%0AGET = 'get'%0A%0A%0Aclass Resource:%0A %22%22%22 A CircleCli resource.%0A%0A :param path: The file path to the resource.%0A%0A %22%22%22%0A%0A def __init__(self, path):%0A self.path = path%0A%0A def get(self):%0A %22%22%22 Perform a GET request on the resource.%0A%0A :rtype: str%0A%0A %22%22%22%0A with open(self.path, 'r') as f:%0A content = f.read()%0A return content%0A%0A%0A@urlmatch(netloc=NETLOC, method=GET)%0Adef resource_get(url, request):%0A file_path = url.netloc + url.path%0A try:%0A content = Resource(file_path).get()%0A except EnvironmentError:%0A # catch any environment errors (i.e. file does not exist) and return a%0A # 404.%0A return response(404, %7B%7D, HEADERS, None, 5, request)%0A return response(200, content, HEADERS, None, 5, request)%0A
|
|
30359b6e9ec105b2938cedd59127e5fa40964396
|
Create setrun.py
|
rect-shelf/setrun.py
|
rect-shelf/setrun.py
|
Python
| 0.000001 |
@@ -0,0 +1 @@
+%0A
|
|
39d2a5eec167e659cd30f5522a9e4e9ca11a620a
|
Create layoutUVPlus.py
|
af_scripts/uv/layoutUVPlus.py
|
af_scripts/uv/layoutUVPlus.py
|
Python
| 0 |
@@ -0,0 +1,374 @@
+import pymel.core as pm%0Aimport math%0Asels = pm.ls(sl=1)%0Agap = 0.003%0Afor i, x in enumerate(sels):%0A%09x=x.getShape()%0A%09pm.select('%7B0%7D.map%5B:%5D'.format(x), r=1)%0A%09buv = pm.polyEvaluate(x,b2=1)%0A%09w = abs(buv%5B0%5D%5B1%5D - buv%5B0%5D%5B0%5D)%0A%09if i==0:%0A%09%09pm.polyEditUV(u=-buv%5B0%5D%5B0%5D+(gap*(i+1)),v=-buv%5B1%5D%5B0%5D+gap)%0A%09else:%0A%09%09pm.polyEditUV(u=-buv%5B0%5D%5B0%5D+(w*i+gap*(i+1)),v=-buv%5B1%5D%5B0%5D+gap)%0Apm.select(sels,r=1)%0A
|
|
eee6b08e07e60a8ec1f3c2fa2e156344e01737d2
|
clean out
|
avatar/admin.py
|
avatar/admin.py
|
from django.contrib import admin
from django.utils.translation import ugettext_lazy as _
from django.utils import six
from django.template.loader import render_to_string
from avatar.models import Avatar
from avatar.signals import avatar_updated
from avatar.util import get_user_model
class AvatarAdmin(admin.ModelAdmin):
list_display = ('get_avatar', 'user', 'primary', "date_uploaded")
list_filter = ('primary',)
search_fields = ('user__%s' % getattr(get_user_model(), 'USERNAME_FIELD', 'username'),)
list_per_page = 50
def get_avatar(self, avatar_in):
context = dict({
'user': avatar_in.user,
'url': avatar_in.avatar.url,
'alt': six.text_type(avatar_in.user),
'size': 80,
})
return render_to_string('avatar/avatar_tag.html',context)
get_avatar.short_description = _('Avatar')
get_avatar.allow_tags = True
def save_model(self, request, obj, form, change):
super(AvatarAdmin, self).save_model(request, obj, form, change)
avatar_updated.send(sender=Avatar, user=request.user, avatar=obj)
admin.site.register(Avatar, AvatarAdmin)
|
Python
| 0.000002 |
@@ -815,16 +815,17 @@
g.html',
+
context)
|
2d9300aeefc840e007d7c615ce48ad36343038f2
|
Add "--optimize-autoloader" to `composer.phar install` command
|
php-silex/setup.py
|
php-silex/setup.py
|
import subprocess
import sys
import setup_util
from os.path import expanduser
home = expanduser("~")
def start(args):
setup_util.replace_text("php-silex/web/index.php", "192.168.100.102", "" + args.database_host + "")
setup_util.replace_text("php-silex/deploy/php-silex", "\".*\/FrameworkBenchmarks", "\"" + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/php-silex", "Directory .*\/FrameworkBenchmarks", "Directory " + home + "/FrameworkBenchmarks")
setup_util.replace_text("php-silex/deploy/nginx.conf", "root .*\/FrameworkBenchmarks", "root " + home + "/FrameworkBenchmarks")
try:
#subprocess.check_call("sudo cp cake/deploy/cake /etc/apache2/sites-available/", shell=True)
#subprocess.check_call("sudo a2ensite cake", shell=True)
#subprocess.check_call("sudo chown -R www-data:www-data cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 start", shell=True)
subprocess.check_call("composer.phar install", shell=True, cwd="php-silex")
subprocess.check_call("sudo php-fpm --fpm-config config/php-fpm.conf -g " + home + "/FrameworkBenchmarks/php-silex/deploy/php-fpm.pid", shell=True)
subprocess.check_call("sudo /usr/local/nginx/sbin/nginx -c " + home + "/FrameworkBenchmarks/php-silex/deploy/nginx.conf", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
def stop():
try:
subprocess.call("sudo /usr/local/nginx/sbin/nginx -s stop", shell=True)
subprocess.call("sudo kill -QUIT $( cat php-silex/deploy/php-fpm.pid )", shell=True)
#subprocess.check_call("sudo a2dissite cake", shell=True)
#subprocess.check_call("sudo /etc/init.d/apache2 stop", shell=True)
#subprocess.check_call("sudo chown -R $USER:$USER cake", shell=True)
return 0
except subprocess.CalledProcessError:
return 1
|
Python
| 0.000004 |
@@ -1,17 +1,16 @@
-%0A
import subproces
@@ -973,16 +973,38 @@
install
+ --optimize-autoloader
%22, shell
|
26011563bf0880206269582a87f9fff61f262c83
|
add a new migration
|
osf/migrations/0056_citationstyle_has_bibliography.py
|
osf/migrations/0056_citationstyle_has_bibliography.py
|
Python
| 0.000001 |
@@ -0,0 +1,465 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.4 on 2017-08-29 14:25%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('osf', '0055_auto_20170823_1648'),%0A %5D%0A%0A operations = %5B%0A migrations.AddField(%0A model_name='citationstyle',%0A name='has_bibliography',%0A field=models.BooleanField(default=False),%0A ),%0A %5D%0A
|
|
d637cbe9c904fb0f0b67fbc10f66db299d153f4e
|
Add basic smoke tests for doc generation
|
tests/functional/test_docs.py
|
tests/functional/test_docs.py
|
Python
| 0 |
@@ -0,0 +1,1453 @@
+# Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22). You%0A# may not use this file except in compliance with the License. A copy of%0A# the License is located at%0A#%0A# http://aws.amazon.com/apache2.0/%0A#%0A# or in the %22license%22 file accompanying this file. This file is%0A# distributed on an %22AS IS%22 BASIS, WITHOUT WARRANTIES OR CONDITIONS OF%0A# ANY KIND, either express or implied. See the License for the specific%0A# language governing permissions and limitations under the License.%0Afrom tests import unittest%0A%0Aimport botocore.session%0Afrom boto3.docs import docs_for%0A%0A%0Aclass TestDocs(unittest.TestCase):%0A def setUp(self):%0A self.session = botocore.session.get_session()%0A%0A def test_resource_docs_generated(self):%0A docs_str = docs_for('s3', self.session)%0A self.assertIn('Service Resource', docs_str)%0A self.assertIn('A resource representing Amazon Simple Storage Service',%0A docs_str)%0A%0A def test_client_docs_generated(self):%0A docs_str = docs_for('s3', self.session)%0A self.assertIn('s3.Client', docs_str)%0A self.assertIn(%0A 'A low-level client representing Amazon Simple Storage Service',%0A docs_str)%0A%0A def test_waiter_docs_generated(self):%0A docs_str = docs_for('s3', self.session)%0A self.assertIn('Waiter', docs_str)%0A self.assertIn('bucket_exists', docs_str)%0A
|
|
8fb94de6b72847bdb618ffa60fa037d16bab443e
|
Add closing tests module
|
tests/plantcv/test_closing.py
|
tests/plantcv/test_closing.py
|
Python
| 0 |
@@ -0,0 +1,868 @@
+import pytest%0Aimport cv2%0Aimport numpy as np%0Afrom plantcv.plantcv import closing%0A%0A%0Adef test_closing(test_data):%0A # Read in test data%0A bin_img = cv2.imread(test_data.small_bin_img, -1)%0A filtered_img = closing(gray_img=bin_img)%0A # Assert that the output image has the dimensions of the input image and is binary%0A assert bin_img.shape == filtered_img.shape and np.array_equal(np.unique(filtered_img), np.array(%5B0, 255%5D))%0A%0A%0Adef test_closing_grayscale(test_data):%0A # Read in test data%0A gray_img = cv2.imread(test_data.small_gray_img, -1)%0A filtered_img = closing(gray_img=gray_img, kernel=np.ones((4, 4), np.uint8))%0A assert np.sum(filtered_img) == 33160632%0A%0A%0Adef test_closing_bad_input(test_data):%0A # Read in test data%0A rgb_img = cv2.imread(test_data.small_rgb_img)%0A with pytest.raises(RuntimeError):%0A _ = closing(gray_img=rgb_img)%0A
|
|
548f4f6512ced9a9c41a074a3c8382f87ccafa66
|
add image resizing to python script and give it a better name
|
xkcd1110_stitch.py
|
xkcd1110_stitch.py
|
Python
| 0.000001 |
@@ -0,0 +1,1673 @@
+#! /usr/bin/env python%0A%22%22%22Combines resized tiles grabbed from xkcd 1110 into one large png%22%22%22%0A%0Aimport Image%0Aimport os%0Aimport sys%0A%0Adef coord(image_x, image_y, tilesize):%0A '''%0A converts x, y coordinates to tile naming format%0A '''%0A%0A image_dir = %22images/%22%0A%0A if image_x %3E 0:%0A #east%0A lng = %22%25se%22 %25 (image_x)%0A else:%0A #west%0A lng = %22%25sw%22 %25 (-image_x+1)%0A%0A if image_y %3E 0:%0A #north%0A lat = %22%25sn%22 %25 (image_y)%0A else:%0A #south%0A lat = %22%25ss%22 %25 (-image_y+1)%0A%0A return_file = image_dir + lat + lng + %22.png%22%0A%0A if os.path.isfile(return_file):%0A return return_file%0A else:%0A # insert black or white tiles in the empty spots%0A if image_y %3E 0:%0A return image_dir + %22white.png%22%0A else:%0A return image_dir + %22black.png%22%0A%0Adef merge_images(xmin, xmax, ymin, ymax, tilesize) :%0A '''%0A combines tiles into one large image%0A '''%0A%0A out = Image.new('RGB', ((xmax-xmin+1) * tilesize, (ymax-ymin+1) * tilesize))%0A%0A imx = 0%0A for image_x in range(xmin, xmax+1) :%0A imy = 0%0A for image_y in range(ymin, ymax+1) :%0A #print image_x, image_y, %22-%3E%22,%0A #print coord(image_x, -image_y, tilesize), %22-%3E%22, imx, imy%0A tile = Image.open(coord(image_x, -image_y, tilesize))%0A resized_tile = tile.resize((tilesize, tilesize))%0A out.paste(tile, (imx, imy))%0A imy += tilesize%0A imx += tilesize%0A%0A out.save(%22xkcd_1110_combined_%25s.png%22 %25 (tilesize))%0A%0Atry:%0A input_arg = int(sys.argv%5B1%5D)%0A if 0 %3C input_arg %3C= 2048:%0A merge_images(-32, 48, -13, 18, input_arg)%0Aexcept ValueError:%0A sys.exit(-1)%0A
|
|
c226835aa56a2d5ba8583e63c4b75765cd24711d
|
add new package (#27971)
|
var/spack/repos/builtin/packages/py-zipfile-deflate64/package.py
|
var/spack/repos/builtin/packages/py-zipfile-deflate64/package.py
|
Python
| 0 |
@@ -0,0 +1,722 @@
+# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other%0A# Spack Project Developers. See the top-level COPYRIGHT file for details.%0A#%0A# SPDX-License-Identifier: (Apache-2.0 OR MIT)%0A%0Afrom spack import *%0A%0A%0Aclass PyZipfileDeflate64(PythonPackage):%0A %22%22%22Extract Deflate64 ZIP archives with Python's zipfile API.%22%22%22%0A%0A homepage = %22https://github.com/brianhelba/zipfile-deflate64%22%0A pypi = %22zipfile-deflate64/zipfile-deflate64-0.2.0.tar.gz%22%0A%0A version('0.2.0', sha256='875a3299de102edf1c17f8cafcc528b1ca80b62dc4814b9cb56867ec59fbfd18')%0A%0A depends_on('[email protected]:', type=('build', 'run'))%0A depends_on('py-setuptools@42:', type='build')%0A depends_on('[email protected]:+toml', type='build')%0A
|
|
8c6646d75ec6f9345e1582c02611984a1d953582
|
add reproducing case
|
tests/trac/test-issue-0092.py
|
tests/trac/test-issue-0092.py
|
Python
| 0.000141 |
@@ -0,0 +1,2173 @@
+# -*- coding: utf-8 -*-%0Afrom __future__ import unicode_literals%0Aimport logging%0Aimport pyxb.binding.generate%0Aimport pyxb.utils.domutils%0Aimport xml.dom.minidom as dom%0A%0A%0Aif __name__ == '__main__':%0A logging.basicConfig()%0A_log = logging.getLogger(__name__)%0A%0Axsd = '''%3C?xml version=%221.0%22 encoding=%22UTF-8%22?%3E%0A%3Cxs:schema xmlns:xs=%22http://www.w3.org/2001/XMLSchema%22%3E%0A %3Cxs:element name=%22HOST%22%3E%0A %3Cxs:complexType%3E%0A %3Cxs:sequence%3E%0A %3Cxs:element name=%22ID%22 type=%22xs:integer%22/%3E%0A %3Cxs:element name=%22TEMPLATE%22 type=%22xs:anyType%22/%3E%0A %3C/xs:sequence%3E%0A %3C/xs:complexType%3E%0A %3C/xs:element%3E%0A%3C/xs:schema%3E%0A'''%0A%0Acode = pyxb.binding.generate.GeneratePython(schema_text=xsd)%0A%0Arv = compile(code, 'test', 'exec')%0Aeval(rv)%0A%0Aimport unittest%0A%0Aclass TestIssue0092 (unittest.TestCase):%0A def testCreateEmptyTemplate (self):%0A xmlt = '%3CHOST%3E%3CID%3E1%3C/ID%3E%3CTEMPLATE/%3E%3C/HOST%3E';%0A xmld = xmlt.encode('utf-8');%0A doc = CreateFromDocument(xmld);%0A self.assertEqual(doc.ID,1)%0A%0A def testCreateToDom (self):%0A xmlt = '%3CHOST%3E%3CID%3E1%3C/ID%3E%3CTEMPLATE%3E%3CNODE%3E1%3C/NODE%3E%3C/TEMPLATE%3E%3C/HOST%3E';%0A xmld = xmlt.encode('utf-8');%0A doc = CreateFromDocument(xmld);%0A templateFragment=doc.TEMPLATE.toDOM()%0A self.assertEqual(templateFragment.toxml(), '''%3C?xml version=%221.0%22 ?%3E%3CTEMPLATE%3E%3CNODE%3E1%3C/NODE%3E%3C/TEMPLATE%3E''')%0A%0A def testCreateWithCDATAToDom (self):%0A xmlt = '%3CHOST%3E%3CID%3E1%3C/ID%3E%3CTEMPLATE%3E%3CNODE%3E%3C!%5BCDATA%5Btext%5D%5D%3E%3C/NODE%3E%3C/TEMPLATE%3E%3C/HOST%3E';%0A xmld = xmlt.encode('utf-8');%0A doc = CreateFromDocument(xmld);%0A templateFragment=doc.TEMPLATE.toDOM()%0A self.assertEqual(templateFragment.toxml(), '''%3C?xml version=%221.0%22 ?%3E%3CTEMPLATE%3E%3CNODE%3Etext%3C/NODE%3E%3C/TEMPLATE%3E''')%0A%0A def testCreateFromDOMWithCDATAToDom (self):%0A xmlt = '%3CHOST%3E%3CID%3E1%3C/ID%3E%3CTEMPLATE%3E%3CNODE%3E%3C!%5BCDATA%5Btext%5D%5D%3E%3C/NODE%3E%3C/TEMPLATE%3E%3C/HOST%3E';%0A xmld = xmlt.encode('utf-8');%0A domDoc=dom.parseString(xmld);%0A doc = CreateFromDOM(domDoc);%0A templateFragment=doc.TEMPLATE.toDOM()%0A self.assertEqual(templateFragment.toxml(), '''%3C?xml version=%221.0%22 ?%3E%3CTEMPLATE%3E%3CNODE%3Etext%3C/NODE%3E%3C/TEMPLATE%3E''')%0A%0A%0Aif __name__ == '__main__':%0A unittest.main()%0A
|
|
86d8f0fd48ccb577a8300362ea9d181e63d2fa5d
|
Add unit tests for bandit.core.issue
|
tests/unit/core/test_issue.py
|
tests/unit/core/test_issue.py
|
Python
| 0.000001 |
@@ -0,0 +1,2288 @@
+# -*- coding:utf-8 -*-%0A#%0A# Copyright 2015 Hewlett-Packard Development Company, L.P.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22); you may%0A# not use this file except in compliance with the License. You may obtain%0A# a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS, WITHOUT%0A# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the%0A# License for the specific language governing permissions and limitations%0A# under the License.%0A%0Aimport testtools%0A%0Aimport bandit%0Afrom bandit.core import issue%0A%0A%0Aclass IssueTests(testtools.TestCase):%0A%0A def test_issue_create(self):%0A new_issue = _get_issue_instance()%0A self.assertIsInstance(new_issue, issue.Issue)%0A%0A def test_issue_str(self):%0A test_issue = _get_issue_instance()%0A self.assertEqual(%0A (%22Issue: 'Test issue' from bandit_plugin: Severity: MEDIUM %22%0A %22Confidence: MEDIUM at code.py:1%22),%0A str(test_issue)%0A )%0A%0A def test_issue_as_dict(self):%0A test_issue = _get_issue_instance()%0A test_issue_dict = test_issue.as_dict(with_code=False)%0A self.assertIsInstance(test_issue_dict, dict)%0A for attr in %5B%0A 'filename', 'test_name', 'issue_severity', 'issue_confidence',%0A 'issue_text', 'line_number', 'line_range'%0A %5D:%0A self.assertIn(attr, test_issue_dict)%0A%0A def test_issue_filter(self):%0A test_issue = _get_issue_instance()%0A result = test_issue.filter(bandit.HIGH, bandit.HIGH)%0A self.assertFalse(result)%0A result = test_issue.filter(bandit.MEDIUM, bandit.MEDIUM)%0A self.assertTrue(result)%0A result = test_issue.filter(bandit.LOW, bandit.LOW)%0A self.assertTrue(result)%0A result = test_issue.filter(bandit.LOW, bandit.HIGH)%0A self.assertFalse(result)%0A result = test_issue.filter(bandit.HIGH, bandit.LOW)%0A self.assertFalse(result)%0A%0Adef _get_issue_instance():%0A new_issue = issue.Issue(bandit.MEDIUM, bandit.MEDIUM, 'Test issue')%0A new_issue.fname = 'code.py'%0A new_issue.test = 'bandit_plugin'%0A new_issue.lineno = 1%0A return new_issue%0A
|
|
3d935fcc7d2d2afb004348a8839f2ec7813fe78c
|
Add unbound performance plugin
|
satori-rules/plugin/unbound/30_unbound.py
|
satori-rules/plugin/unbound/30_unbound.py
|
Python
| 0 |
@@ -0,0 +1,1079 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0Afrom __future__ import absolute_import%0A%0A# -- prioritized --%0Aimport sys%0Aimport os.path%0A# sys.path.append(os.path.join(os.path.dirname(__file__), '../libs'))%0A%0A# -- stdlib --%0Aimport json%0Aimport re%0Aimport socket%0Aimport subprocess%0Aimport time%0A%0A# -- third party --%0A%0A# -- own --%0A%0A# -- code --%0Aendpoint = socket.gethostname()%0Ats = int(time.time())%0A%0Aproc = subprocess.Popen(%5B'/usr/sbin/unbound-control', 'stats'%5D, stdout=subprocess.PIPE)%0Astats = %7B%0A match%5B0%5D: float(match%5B1%5D)%0A for match in re.findall(r'(.*)%5C=(.*)', proc.stdout.read(), re.MULTILINE)%0A%7D%0A%0Arst = %7B%0A 'uptime': stats%5B'time.up'%5D,%0A 'queries.total': stats%5B'total.num.queries'%5D,%0A 'queries.pending': stats%5B'total.requestlist.current.all'%5D,%0A 'queries.hit_rate': (stats%5B'total.num.cachehits'%5D / stats%5B'total.num.queries'%5D) * 100,%0A%7D%0A%0Aprint json.dumps(%5B%0A %7B%0A %22metric%22: %22unbound.%7B%7D%22.format(k),%0A %22endpoint%22: endpoint,%0A %22timestamp%22: ts,%0A %22step%22: 30,%0A %22value%22: int(v),%0A %22tags%22: %7B%22server%22: endpoint%7D,%0A %7D%0A for k, v in rst.items()%0A%5D)%0A
|
|
a0714c8754c769c4fee868f2b449d9dc69d144a9
|
Add Welcome plugin to welcome new members
|
plugins/welcome.py
|
plugins/welcome.py
|
Python
| 0 |
@@ -0,0 +1,1158 @@
+import json%0Afrom plugin import Plugin%0A%0A%0Aclass Welcome(Plugin):%0A %22%22%22%0A Welcomes new members when they join the Slack team%0A %22%22%22%0A%0A def __init__(self):%0A Plugin.__init__(self)%0A self.event_type = 'team_join'%0A%0A def on_event(self, bot, event, response):%0A # Get list of all channels (don't include archived channels)%0A channel_response = bot.sc.api_call('channels.list', **%7B'exclude_archived': 1%7D)%0A # Convert string response to JSON%0A channel_response = json.loads(channel_response)%0A%0A # Find general channel%0A general_channel = None%0A if channel_response.get('ok'):%0A for channel in channel_response%5B'channels'%5D:%0A if channel.get('is_general'):%0A general_channel = channel%5B'id'%5D%0A%0A # Post welcome to general channel if one found%0A if general_channel:%0A user = event%5B'user'%5D%5B'id'%5D%0A response%5B'channel'%5D = general_channel%0A response%5B'link_names'%5D = 1 # Enables linking of names%0A response%5B'text'%5D = 'Welcome to the Slack team %3C@%25s%3E!' %25 user%0A bot.sc.api_call('chat.postMessage', **response)%0A
|
|
3ce048f8c0346c30173b52a691bd18ece1cbc13d
|
Add a TensorFlow Probability sample
|
scripts/stock_price/tough_question_tfp.py
|
scripts/stock_price/tough_question_tfp.py
|
Python
| 0.000184 |
@@ -0,0 +1,1862 @@
+#!/usr/bin/python3%0D%0A# coding: utf-8%0D%0A%0D%0A'''%0D%0AImplementation of the article below with TensorFlow Probability%0D%0A'Bayesian Methods for Hackers'%0D%0Ahttps://github.com/CamDavidsonPilon/Probabilistic-Programming-and-Bayesian-Methods-for-Hackers/blob/master/Chapter2_MorePyMC/Ch2_MorePyMC_PyMC3.ipynb%0D%0A%0D%0ABased on an example of TensorFlow Probability%0D%0Ahttps://github.com/tensorflow/probability/tree/master/tensorflow_probability/python/edward2%0D%0Ahttps://www.hellocybernetics.tech/entry/2018/11/09/231817%0D%0A'''%0D%0A%0D%0Aimport matplotlib.pyplot as plt%0D%0Aimport numpy as np%0D%0Aimport tensorflow as tf%0D%0Aimport tensorflow_probability as tfp%0D%0A## from tensorflow_probability import edward2 as ed%0D%0A%0D%0Atfd = tfp.distributions%0D%0A%0D%0AN = 1000%0D%0AX = 300%0D%0AN_RESULTS = 2000%0D%0AN_BURNIN = 1000%0D%0A%0D%0A## Explanatory variable(s)%0D%0Atrue_prob = tf.random_uniform(%5B%5D, minval=0.0, maxval=1.0)%0D%0A## Observed data%0D%0Aobservations = tf.random.shuffle(tf.concat(%5Btf.ones(X, dtype=tf.int32), tf.zeros(N-X, dtype=tf.int32)%5D, 0))%0D%0A%0D%0Adef target_log_prob_fn(true_prob):%0D%0A log_prob_parts = %5B%0D%0A tfd.Bernoulli(probs=0.5).log_prob(tf.fill(%5BN%5D, 1)) + tfd.Bernoulli(probs=true_prob).log_prob(observations),%0D%0A tfd.Bernoulli(probs=0.5).log_prob(tf.fill(%5BN%5D, 0)) + tfd.Bernoulli(probs=0.5).log_prob(observations)%0D%0A %5D%0D%0A sum_log_prob = tf.reduce_sum(tf.reduce_logsumexp(log_prob_parts, 0))%0D%0A return sum_log_prob%0D%0A%0D%0Ahmc_kernel = tfp.mcmc.HamiltonianMonteCarlo(%0D%0A target_log_prob_fn=target_log_prob_fn,%0D%0A step_size=0.01,%0D%0A num_leapfrog_steps=5)%0D%0A%0D%0Astates, kernels_results = tfp.mcmc.sample_chain(%0D%0A num_results=N_RESULTS,%0D%0A current_state=%5Btrue_prob%5D,%0D%0A kernel=hmc_kernel,%0D%0A num_burnin_steps=N_BURNIN)%0D%0A%0D%0Awith tf.Session() as sess:%0D%0A sess.run(tf.global_variables_initializer())%0D%0A states_, results_ = sess.run(%5Bstates, kernels_results%5D)%0D%0A plt.hist(states_%5B0%5D, bins=50)%0D%0A plt.show()%0D%0A
|
|
e42c115f8a612b3995e30b3606913acb7e7b0f63
|
Create 2-off.py
|
Code/2-off.py
|
Code/2-off.py
|
Python
| 0.000001 |
@@ -0,0 +1,252 @@
+import RPi.GPIO as GPIO%0AGPIO.setmode(GPIO.BCM)%0AGPIO.setwarnings(False)%0AGPIO.setup(18,GPIO.OUT)%0AGPIO.setup(23,GPIO.OUT)%0AGPIO.setup(24,GPIO.OUT)%0Aprint %22Lights off%22%0AGPIO.output(18,GPIO.LOW)%0AGPIO.output(23,GPIO.LOW)%0AGPIO.output(24,GPIO.LOW)%0AGPIO.cleanup()%0A
|
|
a2214039defb1094d47b7ce0abc4e56032136508
|
Add merge migration
|
osf/migrations/0137_merge_20181011_1525.py
|
osf/migrations/0137_merge_20181011_1525.py
|
Python
| 0.000001 |
@@ -0,0 +1,349 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.13 on 2018-10-11 15:25%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('osf', '0136_merge_20181010_2242'),%0A ('osf', '0136_add_ember_auth_register_waffle_flag'),%0A %5D%0A%0A operations = %5B%0A %5D%0A
|
|
1a3b9eec2a947a8f036fdea80a4d7de4b7549211
|
Add prime functions library
|
prime_functions.py
|
prime_functions.py
|
Python
| 0.000005 |
@@ -0,0 +1,770 @@
+import numpy as np%0Afrom math import sqrt%0A%0A%0Adef get_primes_below(n):%0A # http://stackoverflow.com/questions/2068372/fastest-way-to-list-all-primes-below-n-in-python/3035188#3035188%0A %22%22%22 Input n%3E=6, Returns a array of primes, 2 %3C= p %3C n %22%22%22%0A sieve = np.ones(n/3 + (n %25 6 == 2), dtype=np.bool)%0A sieve%5B0%5D = False%0A for i in xrange(int(n**0.5)/3+1):%0A if sieve%5Bi%5D:%0A k = 3*i+1 %7C 1%0A sieve%5B((k*k)/3)::2*k%5D = False%0A sieve%5B(k*k+4*k-2*k*(i & 1))/3::2*k%5D = False%0A return np.r_%5B2, 3, ((3*np.nonzero(sieve)%5B0%5D+1) %7C 1)%5D%0A%0A%0Adef is_prime(n):%0A if n == 2:%0A return True%0A if n %25 2 == 0 or n %3C= 1:%0A return False%0A for i in range(3, int(sqrt(n))+1, 2):%0A if n %25 i == 0:%0A return False%0A return True%0A
|
|
451a65d6b5cbc182418f00703e2da84b7c346a70
|
Create Dictionary._((glas+python?glaskrypt))
|
Dictionary.py
|
Dictionary.py
|
Python
| 0.00002 |
@@ -0,0 +1,803 @@
+#!/usr/bin/env python%0A%0A#%0A# Basic hack%0A#%0A# What?Needed??%0A# Bison like parser for js%0A# _WHY? Because I forget things easily%0A%0Aclass Dict(dict)%0A def __init__(self, keyd, *arguments,**context):%0A self._context = context%0A%0A# Elaborate on that%0Aclass Elaboration(Dict)%0A pass%0A%0A# To bind them together... I have no idea what the hell I am doing, here%0Aclass Dictionary(Elaboration):%0A def __init__(self, *args, **kw):%0A pass%0A%0A#%0A# Read the input(STDIN), and translate it on the output(STDOUT)%0A#%0Aclass py(object):%0A class code(dict):%0A pass%0A%0A#%0A#... Interface !translate%0A#%0Aclass Rune(py.code, dict, Dictionary):%0A pass%0A%0Adef translate(outputs, *runes):%0A rune = next(runes)%0A output.append(%0A translate(next(outputs), rune)%0A )%0A# Dictionary: %60py; ~ translate: %60%60glas.cup%0A
|
|
0ebddf569f291ceca050972fe9cfd3d9e498e87c
|
add timeout decorator
|
pyannote/audio/utils/timeout.py
|
pyannote/audio/utils/timeout.py
|
Python
| 0.000001 |
@@ -0,0 +1,1609 @@
+#!/usr/bin/env python%0A# encoding: utf-8%0A%0A# Shamelessly stolen from%0A# https://gist.github.com/TySkby/143190ad1b88c6115597c45f996b030c%0A%0A%22%22%22Easily put time restrictions on things%0A%0ANote: Requires Python 3.x%0A%0AUsage as a context manager:%0A%60%60%60%0Awith timeout(10):%0A something_that_should_not_exceed_ten_seconds()%0A%60%60%60%0A%0AUsage as a decorator:%0A%60%60%60%0A@timeout(10)%0Adef something_that_should_not_exceed_ten_seconds():%0A do_stuff_with_a_timeout()%0A%60%60%60%0A%0AHandle timeouts:%0A%60%60%60%0Atry:%0A with timeout(10):%0A something_that_should_not_exceed_ten_seconds()%0A except TimeoutError:%0A log('Got a timeout, couldn't finish')%0A%60%60%60%0A%0ASuppress TimeoutError and just die after expiration:%0A%60%60%60%0Awith timeout(10, suppress_timeout_errors=True):%0A something_that_should_not_exceed_ten_seconds()%0A%0Aprint('Maybe exceeded 10 seconds, but finished either way')%0A%60%60%60%0A%22%22%22%0Aimport contextlib%0Aimport errno%0Aimport os%0Aimport signal%0A%0A%0ADEFAULT_TIMEOUT_MESSAGE = os.strerror(errno.ETIME)%0A%0A%0Aclass timeout(contextlib.ContextDecorator):%0A def __init__(self, seconds, *, timeout_message=DEFAULT_TIMEOUT_MESSAGE, suppress_timeout_errors=False):%0A self.seconds = int(seconds)%0A self.timeout_message = timeout_message%0A self.suppress = bool(suppress_timeout_errors)%0A%0A def _timeout_handler(self, signum, frame):%0A raise TimeoutError(self.timeout_message)%0A%0A def __enter__(self):%0A signal.signal(signal.SIGALRM, self._timeout_handler)%0A signal.alarm(self.seconds)%0A%0A def __exit__(self, exc_type, exc_val, exc_tb):%0A signal.alarm(0)%0A if self.suppress and exc_type is TimeoutError:%0A return True%0A
|
|
927a49da0ac7fe633c72f6d08ed93710c1d71630
|
Refactor image alias tests to reduce copypasta
|
pylxd/tests/test_image_alias.py
|
pylxd/tests/test_image_alias.py
|
# Copyright (c) 2015 Canonical Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from ddt import data
from ddt import ddt
import mock
import unittest
from pylxd import api
from pylxd import connection
from pylxd.tests import annotated_data
from pylxd.tests import fake_api
@ddt
class LXDUnitTestAlias(unittest.TestCase):
def setUp(self):
super(LXDUnitTestAlias, self).setUp()
self.lxd = api.API()
def test_alias_list(self):
with mock.patch.object(connection.LXDConnection, 'get_object') as ms:
ms.return_value = ('200', fake_api.fake_alias_list())
self.assertEqual(['ubuntu'], self.lxd.alias_list())
ms.assert_called_once_with('GET', '/1.0/images/aliases')
@data(True, False)
def test_alias_defined(self, expected):
with mock.patch.object(connection.LXDConnection, 'get_status') as ms:
ms.return_value = expected
self.assertEqual(expected, self.lxd.alias_defined('fake'))
ms.assert_called_once_with('GET', '/1.0/images/aliases/fake')
def test_alias_show(self):
with mock.patch.object(connection.LXDConnection, 'get_object') as ms:
ms.return_value = ('200', fake_api.fake_alias())
self.assertEqual(
fake_api.fake_alias(), self.lxd.alias_show('fake')[1])
ms.assert_called_once_with('GET', '/1.0/images/aliases/fake')
@annotated_data(
('create', 'POST', '', ('fake',), ('"fake"',)),
('update', 'PUT', '/test-alias',
('test-alias', 'fake',), ('"fake"',)),
('rename', 'POST', '/test-alias',
('test-alias', 'fake',), ('"fake"',)),
('delete', 'DELETE', '/test-alias', ('test-alias',)),
)
def test_alias_operations(self, method, http, path, args, call_args=()):
with mock.patch.object(connection.LXDConnection, 'get_status') as ms:
ms.return_value = True
self.assertTrue(getattr(self.lxd, 'alias_' + method)(*args))
ms.assert_called_once_with(
http,
'/1.0/images/aliases' + path,
*call_args
)
|
Python
| 0 |
@@ -660,46 +660,8 @@
ock%0A
-import unittest%0A%0Afrom pylxd import api
%0Afro
@@ -763,269 +763,199 @@
api%0A
-%0A%0A@ddt%0Aclass LXDUnitTestAlias(unittest.TestCase):%0A%0A def setUp(self):%0A super(LXDUnitTestAlias, self).setUp()%0A self.lxd = api.API()%0A%0A def test_alias_list(self):%0A with mock.patch.object(connection.LXDConnection, 'get_object') as ms:%0A
+from pylxd.tests import LXDAPITestBase%0A%0A%0A@ddt%[email protected](connection.LXDConnection, 'get_object')%0Aclass LXDAPIImageAliasTestObject(LXDAPITestBase):%0A%0A def test_alias_list(self, ms):%0A
@@ -1012,36 +1012,32 @@
list())%0A
-
-
self.assertEqual
@@ -1068,28 +1068,24 @@
ias_list())%0A
-
ms.a
@@ -1146,147 +1146,37 @@
-@data(True, False)%0A def test_alias_defined(self, expected):%0A with mock.patch.object(connection.LXDConnection, 'get_status') as ms
+def test_alias_show(self, ms)
:%0A
@@ -1173,36 +1173,32 @@
f, ms):%0A
-
ms.return_value
@@ -1203,54 +1203,98 @@
e =
-expected%0A self.assertEqual(expected
+('200', fake_api.fake_alias())%0A self.assertEqual(%0A fake_api.fake_alias()
, se
@@ -1310,23 +1310,20 @@
ias_
-defined
+show
('fake')
)%0A
@@ -1318,22 +1318,21 @@
('fake')
-)%0A
+%5B1%5D)%0A
@@ -1398,52 +1398,15 @@
')%0A%0A
- def test_alias_show(self):%0A with
+%0A@ddt%0A@
mock
@@ -1454,96 +1454,182 @@
get_
-object') as ms
+status')%0Aclass LXDAPIImageAliasTestStatus(LXDAPITestBase)
:%0A
+%0A
- ms.return_value = ('200', fake_api.fake_alias())%0A
+@data(True, False)%0A def test_alias_defined(self, expected, ms):%0A ms.return_value = expected%0A
sel
@@ -1616,33 +1616,32 @@
xpected%0A
-
self.assertEqual
@@ -1645,46 +1645,16 @@
ual(
-%0A fake_api.fake_alias()
+expected
, se
@@ -1662,36 +1662,39 @@
f.lxd.alias_
-show
+defined
('fake')
%5B1%5D)%0A
@@ -1677,33 +1677,26 @@
ined('fake')
-%5B1%5D)%0A
+)%0A
ms.a
@@ -2069,16 +2069,20 @@
alias',)
+, ()
),%0A )
@@ -2157,131 +2157,15 @@
args
-=()):%0A with mock.patch.object(connection.LXDConnection, 'get_status') as ms:%0A ms.return_value = True%0A
+, ms):%0A
@@ -2233,28 +2233,24 @@
s))%0A
-
ms.assert_ca
@@ -2281,22 +2281,14 @@
-
-
http,%0A
-
@@ -2337,20 +2337,16 @@
-
*call_ar
@@ -2348,20 +2348,16 @@
ll_args%0A
-
|
8276553a34c043e316c0ea2c5963c0375336f545
|
fix 'done' is not found bug
|
btc/btclient.py
|
btc/btclient.py
|
import re, os
import json, sys
import argparse
import fileinput
import utils
class BTClientError(Exception):
pass
class BTClient:
def __init__(self, decoder, host='127.0.0.1', port=8080, username='admin', password=''):
self.host = host
self.port = port
self.username = username
self.password = password
self.decoder = decoder
def get_token_argument(self):
response = self.send_command(root='/gui/token.html', token=False)
l = re.findall(r"<html><div id='token' style='display:none;'>(.*)</div></html>", response)
return l[0]
def send_command(self, params='', root='/gui/', token=True,
torrent_file=None, username=None, password=None):
if username is None:
username = self.username
if password is None:
password = self.password
host = '%s:%s' % (self.host, self.port)
if token:
token = self.get_token_argument()
params = 'token=%s&%s' % (token, params)
if params:
url = '%s?%s' % (root, params)
else:
url = root
if torrent_file:
ret = utils.post_multipart(host, url, [('torrent_file', torrent_file)],
[], username, password)
else:
ret = utils.get(host, url, username, password)
try:
ret_json = json.loads(ret)
if 'error' in ret_json:
raise BTClientError(ret_json['error'])
except BTClientError:
raise
except: # Output might not be JSON
pass
return ret
def list_torrents(self):
return self.torrent_list(self.send_command('list=1'))
def add_torrent_url(self, url):
self.send_command('action=add-url&s=%s' % url)
def add_torrent_file(self, torrent_file_path):
torrent_file = open(torrent_file_path, 'rb')
self.send_command('action=add-file', torrent_file=torrent_file.read())
torrent_file.close()
def remove_torrent(self, thash, keep_data=True, keep_torrent=False):
cmd = None
if keep_data and keep_torrent:
cmd = 'action=remove&hash=%s'
elif keep_data and not keep_torrent:
cmd = 'action=removetorrent&hash=%s'
elif keep_torrent and not keep_data:
cmd = 'action=removedata&hash=%s'
elif not keep_torrent and not keep_data:
cmd = 'action=removedatatorrent&hash=%s'
self.send_command(cmd % thash)
def stop_torrent(self, thash):
self.send_command('action=stop&hash=%s' % thash)
def start_torrent(self, thash):
self.send_command('action=start&hash=%s' % thash)
def torrent_files(self, thash, sids={}):
if isinstance(thash, list):
if len(thash) == 0:
return {}
thash = '&hash='.join(thash)
l = self.send_command('action=getfiles&format=json&hash=%s' % thash)
return self.files_dict(l, sids)
def torrent_download_file(self, sid, fileid, name, path='.'):
cmd = 'sid=%s&file=%d&service=DOWNLOAD&qos=0&disposition=inline' % (sid, fileid)
content = self.send_command(cmd, root='/proxy', token=False)
filename = os.path.join(path, name)
f = open(filename, 'w')
f.write(content)
f.close()
def torrent_stream_url(self, sid, fileid):
return 'http://%s:%s@%s:%d/proxy?sid=%s&file=%d&service=DOWNLOAD&qos=0&disposition=inline' % \
(self.username, self.password, self.host, self.port, sid, fileid)
def torrent_list(self, response):
response_dict = self.decoder.decode(response)
response = []
for torrent_response in response_dict['torrents']:
torrent_dict = {}
response.append(torrent_dict)
torrent_dict['hash'] = str(torrent_response[0].upper())
torrent_dict['name'] = torrent_response[2]
torrent_dict['size'] = torrent_response[3]
torrent_dict['downloaded'] = torrent_response[5]
torrent_dict['uploaded'] = torrent_response[6]
torrent_dict['eta'] = torrent_response[10]
torrent_dict['peers_connected'] = torrent_response[12]
torrent_dict['seeds_connected'] = torrent_response[14]
torrent_dict['sid'] = torrent_response[22]
torrent_dict['ul_rate'] = torrent_response[8]
torrent_dict['dl_rate'] = torrent_response[9]
if torrent_dict['size'] != 0:
torrent_dict['progress'] = round(100 * float(torrent_dict['done']) / torrent_dict['size'], 2)
else:
torrent_dict['progress'] = 0
state = torrent_response[21]
state = state.upper()
state = state.replace('[F] ', '')
state = state.replace(' ', '_')
state = re.sub(r'CHECKED.*', 'CHECKING', state)
state = re.sub(r'ERROR.*', 'ERROR', state)
torrent_dict['state'] = state
return response
def files_dict(self, response, sids={}):
response_dict = self.decoder.decode(response)
response = list()
h = None
for e in response_dict['files']:
if isinstance(e, unicode):
h = e.upper()
elif isinstance(e, list):
i = 0
for l in e:
f = dict()
if h in sids:
f['sid'] = sids[h]
f['fileid'] = i
f['hash'] = h.upper()
f['name'] = l[0]
f['size'] = l[1]
f['downloaded'] = l[2]
f['priority'] = l[3]
f['progress'] = round(100 * float(f['downloaded']) / f['size'])
response.append(f)
i += 1
return response
|
Python
| 0.000022 |
@@ -4619,18 +4619,24 @@
dict%5B'do
-ne
+wnloaded
'%5D) / to
|
ca0bec705a6c68c7540c9b7f0a02972e1f26723c
|
Create py-递归设置.py
|
py-递归设置.py
|
py-递归设置.py
|
Python
| 0.000001 |
@@ -0,0 +1,251 @@
+#!/usr/bin/python%0A# -*- encoding:utf-8 -*-%0Aimport sys%0Asys.setrecursionlimit(1500) # set the maximum depth as 1500%0A%0Adef recursion(n): %0A if(n %3C= 0): %0A return %0A print n %0A recursion(n - 1) %0A%0Aif __name__ == %22__main__%22:%0A recursion(1200)%0A
|
|
61419ddc8db5f393bd79d200fc09424721877729
|
Change BG color of None state TIs
|
airflow/utils/state.py
|
airflow/utils/state.py
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import unicode_literals
from builtins import object
class State(object):
"""
Static class with task instance states constants and color method to
avoid hardcoding.
"""
# scheduler
NONE = None
REMOVED = "removed"
SCHEDULED = "scheduled"
# set by the executor (t.b.d.)
# LAUNCHED = "launched"
# set by a task
QUEUED = "queued"
RUNNING = "running"
SUCCESS = "success"
SHUTDOWN = "shutdown" # External request to shut down
FAILED = "failed"
UP_FOR_RETRY = "up_for_retry"
UPSTREAM_FAILED = "upstream_failed"
SKIPPED = "skipped"
task_states = (
SUCCESS,
RUNNING,
FAILED,
UPSTREAM_FAILED,
UP_FOR_RETRY,
QUEUED,
)
dag_states = (
SUCCESS,
RUNNING,
FAILED,
)
state_color = {
QUEUED: 'gray',
RUNNING: 'lime',
SUCCESS: 'green',
SHUTDOWN: 'blue',
FAILED: 'red',
UP_FOR_RETRY: 'gold',
UPSTREAM_FAILED: 'orange',
SKIPPED: 'pink',
REMOVED: 'lightgrey',
SCHEDULED: 'white',
}
@classmethod
def color(cls, state):
if state in cls.state_color:
return cls.state_color[state]
else:
return 'white'
@classmethod
def color_fg(cls, state):
color = cls.color(state)
if color in ['green', 'red']:
return 'white'
else:
return 'black'
@classmethod
def finished(cls):
"""
A list of states indicating that a task started and completed a
run attempt. Note that the attempt could have resulted in failure or
have been interrupted; in any case, it is no longer running.
"""
return [
cls.SUCCESS,
cls.SHUTDOWN,
cls.FAILED,
cls.SKIPPED,
]
@classmethod
def unfinished(cls):
"""
A list of states indicating that a task either has not completed
a run or has not even started.
"""
return [
cls.NONE,
cls.SCHEDULED,
cls.QUEUED,
cls.RUNNING,
cls.UP_FOR_RETRY
]
|
Python
| 0 |
@@ -1313,24 +1313,38 @@
QUEUED,%0A
+ NONE,%0A
)%0A%0A d
@@ -1702,24 +1702,51 @@
D: 'white',%0A
+ NONE: 'lightblue',%0A
%7D%0A%0A @
|
ad7d04f73637d6228b82fbb89d51c13844cb1025
|
Fix styling of the invites icon
|
shell/view/frame/ActivitiesBox.py
|
shell/view/frame/ActivitiesBox.py
|
# Copyright (C) 2006, Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import hippo
import logging
import conf
from sugar.graphics.canvasicon import CanvasIcon
from sugar.presence import PresenceService
from sugar.graphics import style
class ActivityItem(CanvasIcon):
def __init__(self, activity):
icon_name = activity.get_icon()
CanvasIcon.__init__(self, icon_name=icon_name)
style.apply_stylesheet(self, 'frame.ActivityIcon')
self._activity = activity
def get_bundle_id(self):
return self._activity.get_id()
class InviteItem(CanvasIcon):
def __init__(self, invite):
CanvasIcon.__init__(self, icon_name=invite.get_icon(),
color=invite.get_color())
self._invite = invite
def get_activity_id(self):
return self._invite.get_activity_id()
def get_bundle_id(self):
return self._invite.get_bundle_id()
def get_invite(self):
return self._invite
class ActivitiesBox(hippo.CanvasBox):
def __init__(self, shell):
hippo.CanvasBox.__init__(self, orientation=hippo.ORIENTATION_HORIZONTAL)
self._shell = shell
self._invite_to_item = {}
self._invites = self._shell.get_model().get_invites()
registry = conf.get_activity_registry()
for activity in registry.list_activities():
if activity.get_show_launcher():
self.add_activity(activity)
for invite in self._invites:
self.add_invite(invite)
self._invites.connect('invite-added', self._invite_added_cb)
self._invites.connect('invite-removed', self._invite_removed_cb)
def _activity_clicked_cb(self, icon):
self._shell.start_activity(icon.get_bundle_id())
def _invite_clicked_cb(self, icon):
self._invites.remove_invite(icon.get_invite())
self._shell.join_activity(icon.get_bundle_id(),
icon.get_activity_id())
def _invite_added_cb(self, invites, invite):
self.add_invite(invite)
def _invite_removed_cb(self, invites, invite):
self.remove_invite(invite)
def add_activity(self, activity):
item = ActivityItem(activity)
item.connect('activated', self._activity_clicked_cb)
self.append(item, 0)
def add_invite(self, invite):
item = InviteItem(invite)
item.connect('activated', self._invite_clicked_cb)
self.append(item, 0)
self._invite_to_item[invite] = item
def remove_invite(self, invite):
self.remove(self._invite_to_item[invite])
del self._invite_to_item[invite]
|
Python
| 0 |
@@ -1306,26 +1306,85 @@
on()
-,%0A%09%09%09%09%09%09
+)%0A%0A%09%09style.apply_stylesheet(self, 'frame.ActivityIcon')%0A%09%09self.props.
color
-=
+ =
invi
@@ -1397,17 +1397,17 @@
_color()
-)
+%0A
%0A%09%09self.
|
5ade8c78e6ab875047ca29779dc37f9029a9f0d6
|
Create set_auth.py
|
bluemix/set_auth.py
|
bluemix/set_auth.py
|
Python
| 0.000003 |
@@ -0,0 +1,315 @@
+import airflow%0Afrom airflow import models, settings%0Afrom airflow.contrib.auth.backends.password_auth import PasswordUser%0Auser = PasswordUser(models.User())%0Auser.username = 'username'%0Auser.email = '[email protected]'%0Auser.password = 'pwd'%0Asession = settings.Session()%0Asession.add(user)%0Asession.commit()%0Asession.close()%0A
|
|
cb5b85fc4a011f7eb9628b7099311b399f4d033d
|
Create born_on_a_friday.py
|
born_on_a_friday.py
|
born_on_a_friday.py
|
Python
| 0.000048 |
@@ -0,0 +1,511 @@
+#!/usr/bin/env python3%0A%0Afrom datetime import datetime%0Afrom typing import Tuple%0A%0A%0Adef ask_month_day_year(prompt: str = %22Enter your birthday%22) -%3E Tuple%5Bint, int, int%5D:%0A date = input(f%22%7Bprompt%7D in the format: MM/DD/YYYY %22)%0A month, day, year = (int(x.strip()) for x in date.split(%22/%22))%0A return month, day, year%0A%0A%0Adef day_of_the_week(year, month, day):%0A return f%22%7Bdatetime(year, month, day):%25A%7D%22%0A%0A%0Amonth, day, year = ask_month_day_year()%0Aprint(f%22You were born on a %7Bday_of_the_week(year, month, day)%7D.%22)%0A
|
|
576dd7270714ec63beab9ce6af22f94e20dc1dd5
|
Add admin model classes to forum_tracking app
|
machina/apps/forum_tracking/admin.py
|
machina/apps/forum_tracking/admin.py
|
Python
| 0 |
@@ -0,0 +1,768 @@
+# -*- coding: utf-8 -*-%0A%0A# Standard library imports%0Afrom __future__ import unicode_literals%0A%0A# Third party imports%0Afrom django.contrib import admin%0A%0A# Local application / specific library imports%0Afrom machina.core.db.models import get_model%0A%0AForumReadTrack = get_model('forum_tracking', 'ForumReadTrack')%0ATopicReadTrack = get_model('forum_tracking', 'TopicReadTrack')%0A%0A%0Aclass ForumReadTrackAdmin(admin.ModelAdmin):%0A list_display = ('__str__', 'user', 'forum', 'mark_time',)%0A list_filter = ('mark_time',)%0A%0A%0Aclass TopicReadTrackAdmin(admin.ModelAdmin):%0A list_display = ('__str__', 'user', 'topic', 'mark_time',)%0A list_filter = ('mark_time',)%0A%0A%0Aadmin.site.register(ForumReadTrack, ForumReadTrackAdmin)%0Aadmin.site.register(TopicReadTrack, TopicReadTrackAdmin)%0A
|
|
2f7e3cf34e8460565d572507c2f97b98ac653036
|
Allow overriding of DEFAULT_ENV_PREFIX
|
cbs/__init__.py
|
cbs/__init__.py
|
from functools import partial
import importlib
import inspect
import os
from django.utils import six
from .utils import as_bool
DEFAULT_ENV_PREFIX = ''
class env(object):
'''
Decorator to make environ based settings simpler.
@env
def SOMETHING_KEY(self):
return 'default'
You can override the key to use in the env:
@env(key='OTHER_NAME')
def SETTINGS_NAME(self):
...
Or, if you want the env to have a prefix not in settings:
@env(prefix='MY_')
def SETTING(self):
...
``key`` and ``prefix`` can be used together.
You can pass a type caster / validator:
@env(type=int)
def SETTING(self):
'''
def __new__(cls, *args, **kwargs):
if not args:
return partial(env, **kwargs)
return object.__new__(cls)
def __init__(self, getter, key=None, type=None, prefix=DEFAULT_ENV_PREFIX):
self.getter = getter
self.type = type
key = key or getter.__name__
self.key = ''.join([prefix, key])
def __get__(self, obj, type=None):
if obj is None:
return self
try:
value = os.environ[self.key]
except KeyError:
value = self.getter(self)
obj.__dict__[self.getter.__name__] = value
if self.type:
value = self.type(value)
return value
class envbool(env):
'''
A special case of env that returns a boolean.
'''
def __init__(self, *args, **kwargs):
kwargs.setdefault(type=as_bool)
super(envbool, self).__init__(*args, **kwargs)
def apply(name, to):
'''
Apply settings to ``to``, which is expected to be globals().
Place at the end of settings.py / settings/__init__.py to apply a given
settings class.
Pass a settings class:
cbs.apply(MySettings, globals())
Pass a class name:
cbs.apply('MySettings', globals())
Pass an import path:
cbs.apply('settings.my.MySettings', globals())
'''
if isinstance(name, six.string_types):
if '.' in name:
module, obj_name = name.rsplit('.', 1)
module = importlib.import_module(module)
obj = getattr(module, obj_name)
else:
obj = to.get(name)
else:
obj = name
if obj is None:
raise ValueError('Could not find settings class: %r', name)
settings = obj()
def resolve_callable(value):
if callable(value):
return value()
return value
to.update({
key: resolve_callable(getattr(settings, key))
for key in dir(settings)
if key == key.upper()
})
from django import VERSION
base = importlib.import_module('cbs.base.django{}{}'.format(*VERSION[:2]))
BaseSettings = getattr(base, 'Base{}{}Settings'.format(*VERSION[:2]))
|
Python
| 0.00027 |
@@ -881,34 +881,20 @@
prefix=
-DEFAULT_ENV_PREFIX
+None
):%0A
@@ -979,16 +979,83 @@
_name__%0A
+ if prefix is None:%0A prefix = DEFAULT_ENV_PREFIX%0A
|
a59f86ea4905534237f9a1e055bce6a3a3d5fb81
|
add migration so edit_messaging is set to True when the role has edit_data set to True
|
corehq/apps/users/migrations/0037_add_edit_messaging_permission.py
|
corehq/apps/users/migrations/0037_add_edit_messaging_permission.py
|
Python
| 0.012809 |
@@ -0,0 +1,1008 @@
+%0Afrom django.db import migrations%0A%0Afrom corehq.apps.users.models_role import SQLPermission, UserRole%0Afrom corehq.util.django_migrations import skip_on_fresh_install%0A%0A%0A@skip_on_fresh_install%0Adef migrate_edit_migrations_permissions(apps, schema_editor):%0A permission, created = SQLPermission.objects.get_or_create(value='edit_messaging')%0A edit_data_permission = SQLPermission.objects.get(value='edit_data')%0A role_ids_with_edit_data = set(UserRole.objects.filter(rolepermission__permission_fk_id=edit_data_permission.id)%0A .values_list(%22id%22, flat=True))%0A for role in UserRole.objects.filter(id__in=role_ids_with_edit_data):%0A role.rolepermission_set.get_or_create(permission_fk=permission, defaults=%7B%22allow_all%22: True%7D)%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('users', '0036_reset_user_history_records'),%0A %5D%0A%0A operations = %5B%0A migrations.RunPython(migrate_edit_migrations_permissions, migrations.RunPython.noop)%0A %5D%0A
|
|
72467acd590ae5a3494e5059ce2ba99cf656baaa
|
Add IsAdminOrReadOnly permission class
|
registries/permissions.py
|
registries/permissions.py
|
Python
| 0.000002 |
@@ -0,0 +1,387 @@
+from rest_framework.permissions import IsAdminUser, SAFE_METHODS%0A%0Aclass IsAdminOrReadOnly(IsAdminUser):%0A %22%22%22%0A Allows read-only access to all users (including anonymous users) and write access to admin users only%0A %22%22%22%0A%0A def has_permission(self, request, view):%0A is_admin = super().has_permission(request, view)%0A return is_admin or request.method in SAFE_METHODS%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.