commit
stringlengths 40
40
| subject
stringlengths 1
3.25k
| old_file
stringlengths 4
311
| new_file
stringlengths 4
311
| old_contents
stringlengths 0
26.3k
| lang
stringclasses 3
values | proba
float64 0
1
| diff
stringlengths 0
7.82k
|
---|---|---|---|---|---|---|---|
5e7f29a66e440e440b1f7a4848d17bb7ae01139b
|
Update from template.
|
ci/bootstrap.py
|
ci/bootstrap.py
|
Python
| 0 |
@@ -0,0 +1,2189 @@
+#!/usr/bin/env python%0A# -*- coding: utf-8 -*-%0Afrom __future__ import absolute_import, print_function, unicode_literals%0A%0Aimport os%0Aimport sys%0Afrom os.path import exists%0Afrom os.path import join%0A%0A%0Aif __name__ == %22__main__%22:%0A base_path = join(%22.tox%22, %22configure%22)%0A if sys.platform == %22win32%22:%0A bin_path = join(base_path, %22Scripts%22)%0A else:%0A bin_path = join(base_path, %22bin%22)%0A if not exists(base_path):%0A import subprocess%0A print(%22Bootstrapping ...%22)%0A try:%0A subprocess.check_call(%5B%22virtualenv%22, base_path%5D)%0A except Exception:%0A subprocess.check_call(%5Bsys.executable, %22-m%22, %22virtualenv%22, base_path%5D)%0A print(%22Installing %60jinja2%60 and %60matrix%60 into bootstrap environment ...%22)%0A subprocess.check_call(%5Bjoin(bin_path, %22pip%22), %22install%22, %22jinja2%22, %22matrix%22%5D)%0A activate = join(bin_path, %22activate_this.py%22)%0A exec(compile(open(activate, %22rb%22).read(), activate, %22exec%22), dict(__file__=activate))%0A%0A import jinja2%0A import matrix%0A%0A jinja = jinja2.Environment(%0A loader=jinja2.FileSystemLoader(join(%22ci%22, %22templates%22)),%0A trim_blocks=True,%0A lstrip_blocks=True,%0A keep_trailing_newline=True%0A )%0A tox_environments = %7B%7D%0A for (alias, conf) in matrix.from_file(%22setup.cfg%22).items():%0A python = conf%5B%22python_versions%22%5D%0A deps = conf%5B%22dependencies%22%5D%0A if %22coverage_flags%22 in conf:%0A cover = %7B%22false%22: False, %22true%22: True%7D%5Bconf%5B%22coverage_flags%22%5D.lower()%5D%0A if %22environment_variables%22 in conf:%0A env_vars = conf%5B%22environment_variables%22%5D%0A%0A tox_environments%5Balias%5D = %7B%0A %22python%22: %22python%22 + python if %22py%22 not in python else python,%0A %22deps%22: deps.split(),%0A %7D%0A if %22coverage_flags%22 in conf:%0A tox_environments%5Balias%5D.update(cover=cover)%0A if %22environment_variables%22 in conf:%0A tox_environments%5Balias%5D.update(env_vars=env_vars.split())%0A%0A for name in os.listdir(join(%22ci%22, %22templates%22)):%0A with open(name, %22w%22) as fh:%0A fh.write(jinja.get_template(name).render(tox_environments=tox_environments))%0A print(%22Wrote %7B%7D%22.format(name))%0A print(%22DONE.%22)%0A
|
|
db76495b4f41021e2613d79b6f5cb30c96fb4290
|
Add PriorityStore and PriorityItem
|
desmod/prioritystore.py
|
desmod/prioritystore.py
|
Python
| 0 |
@@ -0,0 +1,517 @@
+from collections import namedtuple%0Afrom heapq import heappush, heappop%0A%0Afrom simpy import Store%0A%0A%0Aclass PriorityItem(namedtuple('PriorityItem', 'priority item')):%0A def __lt__(self, other):%0A return self.priority %3C other.priority%0A%0A%0Aclass PriorityStore(Store):%0A def _do_put(self, event):%0A if len(self.items) %3C self._capacity:%0A heappush(self.items, event.item)%0A event.succeed()%0A%0A def _do_get(self, event):%0A if self.items:%0A event.succeed(heappop(self.items))%0A
|
|
c74170968b3a200f17af083f027fe3b657cf6041
|
Add giveup function (#12)
|
singer/requests.py
|
singer/requests.py
|
Python
| 0 |
@@ -0,0 +1,210 @@
+def giveup_on_http_4xx_except_429(error):%0A response = error.response%0A if response is None:%0A return False%0A return not (response.status_code == 429 or%0A response.status_code %3E= 500)%0A
|
|
b59745e72a3c0a98da517f00e95fbafcff0cee3d
|
Remove unnecessary import.
|
tests/test_charts_snep_input.py
|
tests/test_charts_snep_input.py
|
Python
| 0 |
@@ -0,0 +1,421 @@
+from __future__ import unicode_literals, division, absolute_import%0A%0Afrom tests import FlexGetBase, use_vcr%0A%0Aclass TestChartsSnepInput(FlexGetBase):%0A __yaml__ = %22%22%22%0A tasks:%0A test:%0A charts_snep_input: radio%0A %22%22%22%0A%0A @use_vcr%0A def test_input(self):%0A self.execute_task('test')%0A assert len(self.task.entries) == 60, 'Produces %25i entries, expected 60' %25 len(self.task.entries)
|
|
ce01cc61ea62c717503d991826e6b9915b23900b
|
Fix usage print
|
check-bugs.py
|
check-bugs.py
|
#!/usr/bin/python
import sys
import re
import pprint
import collections
Head = collections.namedtuple("Head", "file line")
def parse(pn):
ans = collections.defaultdict(str)
head = None
for l in open(pn):
# ignore comments
if l.startswith("#"):
continue
# found a header
m = re.match("^\[(\S+):(\d+)\]+.*", l)
if m:
head = Head._make(m.groups())
continue
# collect descriptions
if head:
ans[head] += l
# chomp
return dict((h, d.strip()) for (h, d) in ans.items())
def say_pass(reason):
print "\033[1;32mPASS\033[m", reason
def say_fail(reason):
print "\033[1;31mFAIL\033[m", reason
def stat_summary(ans):
print("Summary:")
for (h, d) in ans.items():
desc = d.split("\n")[0]
print(" %-8s %+4s | %-30s .." % (h.file, h.line, desc))
if len(ans) >= 5:
say_pass("found enough bugs")
else:
say_fail("found %s bugs, but need at least 5" % len(ans))
if __name__ == "__main__":
if len(sys.argv) != 2:
print("usage: %s [bugs.txt]", sys.argv[0])
exit(1)
ans = parse(sys.argv[1])
stat_summary(ans)
|
Python
| 0.000255 |
@@ -1120,17 +1120,18 @@
gs.txt%5D%22
-,
+ %25
sys.arg
|
c1695c6ddfb6889b193b1d076e839488549e671b
|
Add note option to tack on personal notes to top of emails. typically Sorry...
|
dj/scripts/email_ab.py
|
dj/scripts/email_ab.py
|
#!/usr/bin/python
# email_ab.py
# abstract email class
from django.core.mail import get_connection, EmailMessage
from django.template import Context, Template
from process import process
from django.conf import settings
import random
class email_ab(process):
subject_template = "stub testing:{{ep.name}}"
body_header = """
Hi,
This is Veyepar, the automated video processing system.
{% if ep.reviewers %}
Note to {{ep.reviewers}}: You get to be in on this because presenters have plenty to do and we don't want to burn them out, so please help out and look things over for them. What is expected of a reviewer: Hurry up and watch the video. https://github.com/CarlFK/veyepar/wiki/Reviewer Thanks!
{% endif %}
"""
body_body = "stub testing:{{ep.description}}"
body_footer = """
Email generated by https://github.com/CarlFK/veyepar/blob/master/dj/scripts/{{py_name}}
but replies go to real people.
Reference: https://veyepar.nextdayvideo.com/main/E/{{ep.id}}/
"""
def context(self, ep):
# collect values to be used by the templates.
ctx = { 'ep':ep,
'py_name': "email_ab.py",
# 'MEDIA_URL':settings.MEDIA_URL,
}
return ctx
def mk_body(self, ep):
context = self.context(ep)
body_template = \
self.body_header + self.body_body + self.body_footer
if not ep.emails:
body_alert = """
Hello show organizer(s)!
This item does not have an email address, so it is getting sent to you.
Please review and forward it on to the presenter.
In case it isn't clear what this item is about, here is some context:
name: {{ep.name}}
authors: {{ep.authors}}
reviewers: {{ep.reviewers}}
released: {{ep.released}}
conf_url: {{ep.conf_url}}
conf_key: {{ep.conf_key}}
room: {{ep.location}}
start: {{ep.start}}
What follows is what was intended to be sent to the presenter:
"""
body_template = body_alert + body_template\
body = Template(
body_template
).render(Context(context, autoescape=False))
return body
def process_ep(self, ep):
# if there is no email, use the client's.
# like for lightning talks.
emails = ep.emails or ep.show.client.contacts
if self.options.verbose: print(emails)
if emails:
tos = [e.strip() for e in emails.split(',')]
subject = Template(self.subject_template).render(
Context({'ep':ep}, autoescape=False))
body = self.mk_body(ep)
sender = settings.EMAIL_SENDER
ccs = [cc.strip() for cc in settings.EMAIL_CC.split(',')]
ccs.extend([cc.strip() for cc in ep.reviewers.split(',')])
ccs = list(set([a.strip() for a in ccs if a]))
# make a list of addresses:
# [a for a if a] is to get rid of the empty CC.
# set to get rid of dupes
# .strip() do remove the spaces from the front of things.
reply_tos = set([a.strip() for a in
[sender,] \
+ ep.show.client.contacts.split(',') \
+ ccs \
if a] )
# headers={Reply-To... needs to be a string of comma seperated
print(1, reply_tos)
reply_to = ','.join( reply_tos )
print(2, reply_to)
print(3, ccs)
headers = {
'Reply-To': reply_to,
'X-veyepar': ep.show.slug,
}
if self.options.test:
print("tos:", tos)
print("ccs:", ccs)
print("subject:", subject)
print("headers:", headers)
# print("context:", context)
print("body:", body)
ret = False
else:
email = EmailMessage(
subject, body, sender, tos,
headers=headers, cc=ccs )
connection = get_connection()
ret = connection.send_messages([email])
print("subject:", subject)
print("tos:", tos)
ret = True # need to figure out what .send_messages returns
else:
print("no emails!")
ret = False
return ret
if __name__ == '__main__':
p=email_ab()
p.main()
|
Python
| 0 |
@@ -222,23 +222,8 @@
gs%0A%0A
-import random%0A%0A
clas
@@ -1326,38 +1326,112 @@
elf.
-body_header + self.body_body +
+options.note + %5C%0A self.body_header + %5C%0A self.body_body + %5C%0A
sel
@@ -3389,40 +3389,8 @@
ted%0A
- print(1, reply_tos)%0A
@@ -3434,65 +3434,8 @@
s )%0A
- print(2, reply_to)%0A print(3, ccs)%0A
@@ -4352,32 +4352,32 @@
ret = False%0A%0A
-
return r
@@ -4380,16 +4380,166 @@
rn ret%0A%0A
+ def add_more_options(self, parser):%0A parser.add_option('--note',%0A default=%22%22,%0A help=%22Prepend a note above the Hi.%22)%0A%0A
if __nam
|
d6fa7713556582bab54efc3ba53d27b411d8e23c
|
update import statements
|
tamil/__init__.py
|
tamil/__init__.py
|
# -*- coding: utf-8 -*-
#
# (C) 2013 Muthiah Annamalai <[email protected]>
# Library provides various encoding services for Tamil libraries
#
from . import utf8
from . import tscii
from . import txt2unicode
from . import txt2ipa
def printchar( letters ):
for c in letters:
print(c, u"\\u%04x"%ord(c))
P = lambda x: u" ".join(x)
|
Python
| 0 |
@@ -140,23 +140,16 @@
ies%0A# %0A%0A
-from .
import u
@@ -152,23 +152,16 @@
rt utf8%0A
-from .
import t
@@ -165,23 +165,16 @@
t tscii%0A
-from .
import t
@@ -188,15 +188,8 @@
ode%0A
-from .
impo
|
97a8c8c2baffdeaf6b710cf875d2f8641b999338
|
Create adapter_16mers.py
|
select_random_subset/adapter_16mers.py
|
select_random_subset/adapter_16mers.py
|
Python
| 0.000001 |
@@ -0,0 +1,1493 @@
+%0A%0A#!/usr/bin/env python2%0A# -*- coding: utf-8 -*-%0A%22%22%22%0ACreated on Sat Oct 21 14:15:18 2017%0A%0A@author: nikka.keivanfar%0A%22%22%22%0A%0A#see also: adapters.fa%0A%0AP5 = 'AATGATACGGCGACCACCGA'%0AP7 = 'CAAGCAGAAGACGGCATACGAGAT'%0Aread1 = 'GATCTACACTCTTTCCCTACACGACGCTC'%0Aread2 = 'GTGACTGGAGTTCAGACGTGT'%0A%0Aadapters = %5BP5, P7, read1, read2%5D #to do: streamline loops for all adapters combined%0A%0A%0AP5_kmers = %7B%7D%0AP7_kmers = %7B%7D%0Aread1_kmers = %7B%7D%0Aread2_kmers = %7B%7D%0Ak = 16%0A%0A#P5 16mers%0A%0Afor i in range(len(P5) - k + 1):%0A kmer = P5%5Bi:i+k%5D%0A if P5_kmers.has_key(kmer):%0A P5_kmers%5Bkmer%5D += 1%0A else:%0A P5_kmers%5Bkmer%5D = 1%0A%0Afor kmer, count in P5_kmers.items():%0A print kmer + %22%5Ct%22 + str(count)%0A P5mers = set(kmer)%0A %0A#P7 16mers%0A %0Afor i in range(len(P7) - k + 1):%0A kmer = P7%5Bi:i+k%5D%0A if P7_kmers.has_key(kmer):%0A P7_kmers%5Bkmer%5D += 1%0A else:%0A P7_kmers%5Bkmer%5D = 1%0A%0Afor kmer, count in P7_kmers.items():%0A print kmer + %22%5Ct%22 + str(count)%0A P7mers = set(kmer)%0A %0A#read1 16mers%0A%0Afor i in range(len(read1) - k + 1):%0A kmer = read1%5Bi:i+k%5D%0A if read1_kmers.has_key(kmer):%0A read1_kmers%5Bkmer%5D += 1%0A else:%0A read1_kmers%5Bkmer%5D = 1%0A%0Afor kmer, count in read1_kmers.items():%0A print kmer + %22%5Ct%22 + str(count)%0A read1mers = set(kmer)%0A %0A#read2 16mers%0A%0Afor i in range(len(read2) - k + 1):%0A kmer = read2%5Bi:i+k%5D%0A if read2_kmers.has_key(kmer):%0A read2_kmers%5Bkmer%5D += 1%0A else:%0A read2_kmers%5Bkmer%5D = 1%0A%0Afor kmer, count in read2_kmers.items():%0A print kmer + %22%5Ct%22 + str(count)%0A read2mers = set(kmer)%0A
|
|
8a4931dd810079c1f78858c5058df74b0e696f72
|
Correct IrFilters_eval_domain when the third item starts with 'object.'
|
smile_action_rule/models/ir_filters.py
|
smile_action_rule/models/ir_filters.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Smile (<http://www.smile.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp import api, models, fields
from ..tools import unquote
class ActionFilter(models.Model):
_inherit = 'ir.filters'
@api.one
@api.depends('domain')
def _get_action_rule(self):
localdict = {'object': unquote('object'), 'time': time,
'active_id': unquote("active_id"), 'uid': self._uid}
eval_domain = eval(self.domain.replace(' ', ''), localdict)
self.action_rule = ',object.' in eval_domain
action_rule = fields.Boolean('Only for action rules', compute='_get_action_rule', store=True)
def get_filters(self, cr, uid, model, action_id=None):
action_domain = self._get_action_domain(cr, uid, action_id)
filter_ids = self.search(cr, uid, action_domain + [
('model_id', '=', model),
('user_id', 'in', (uid, False)),
('action_rule', '=', False),
])
my_filters = self.read(cr, uid, filter_ids, ['name', 'is_default', 'domain', 'context', 'user_id'])
return my_filters
@api.multi
def _eval_domain(self, record_ids=None):
self.ensure_one()
domain = []
eval_domain = eval(self.domain, {'object': unquote('object')})
for cond in eval_domain:
if isinstance(cond, tuple) and 'object' in cond[2]:
subdomain = []
records = self.env[self.model_id].browse(record_ids)
for record in records:
new_cond = (cond[0], cond[1], eval(cond[2], {'object': record}))
subdomain.append(new_cond)
subdomain = list(set(subdomain))
subdomain = ['|'] * (len(subdomain) - 1) + subdomain
domain.extend(subdomain)
else:
domain.append(cond)
return domain
|
Python
| 0 |
@@ -1020,16 +1020,101 @@
, fields
+%0Afrom openerp.tools import ustr%0Afrom openerp.tools.safe_eval import safe_eval as eval
%0A%0Afrom .
@@ -1452,25 +1452,8 @@
main
-.replace(' ', '')
, lo
@@ -1490,16 +1490,17 @@
ule = ',
+
object.'
@@ -1499,24 +1499,29 @@
object.' in
+repr(
eval_domain%0A
@@ -1519,16 +1519,17 @@
l_domain
+)
%0A%0A ac
@@ -1760,20 +1760,14 @@
-filter_ids =
+return
sel
@@ -1774,16 +1774,21 @@
f.search
+_read
(cr, uid
@@ -1944,60 +1944,8 @@
%5D
-)%0A my_filters = self.read(cr, uid, filter_ids
, %5B'
@@ -2001,34 +2001,8 @@
d'%5D)
-%0A return my_filters
%0A%0A
@@ -2467,23 +2467,29 @@
%5D, eval(
+ustr(
cond%5B2%5D
+)
, %7B'obje
|
ca043c2d3fe5fbdd37f372e8a3ad8bfd1b501c89
|
Correct Workitem._execute monkeypatch
|
smile_action_rule/workflow/workitem.py
|
smile_action_rule/workflow/workitem.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2014 Smile (<http://www.smile.fr>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import registry
from openerp.workflow.workitem import WorkflowItem
native_execute = WorkflowItem._execute
def new_execute(self, activity, stack):
cr, uid, ids = self.session.cr, self.session.uid, [self.record.id]
# Retrieve the action rules to possibly execute
rule_obj = registry(self.session.cr.dbname)['base.action.rule']
rules = rule_obj._get_action_rules_on_wkf(cr, uid, activity['id'])
# Check preconditions
pre_ids = {}
for rule in rules:
if rule.kind not in ('on_create', 'on_create_or_write'):
pre_ids[rule] = rule_obj._filter(cr, uid, rule, rule.filter_pre_id, ids)
# Call original method
result = native_execute(self, activity, stack)
# Check postconditions, and execute actions on the records that satisfy them
for rule in rules:
if rule.kind != 'on_unlink':
post_ids = rule_obj._filter(cr, uid, rule, rule.filter_id, pre_ids[rule])
else:
post_ids = pre_ids[rule]
if post_ids:
rule_obj._process(cr, uid, rule, post_ids)
return result
WorkflowItem._execute = new_execute
|
Python
| 0.000004 |
@@ -1131,16 +1131,139 @@
tack):%0A%0A
+ if not registry(self.session.cr.dbname).get('base.action.rule'):%0A return native_execute(self, activity, stack)%0A%0A
cr,
|
107e33ab3982f3f7fb56a1a2ac2b0eec0b67091b
|
Use universal newlines in gyp_helper.
|
build/gyp_helper.py
|
build/gyp_helper.py
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file helps gyp_chromium and landmines correctly set up the gyp
# environment from chromium.gyp_env on disk
import os
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
CHROME_SRC = os.path.dirname(SCRIPT_DIR)
def apply_gyp_environment_from_file(file_path):
"""Reads in a *.gyp_env file and applies the valid keys to os.environ."""
if not os.path.exists(file_path):
return
with open(file_path) as f:
file_contents = f.read()
try:
file_data = eval(file_contents, {'__builtins__': None}, None)
except SyntaxError, e:
e.filename = os.path.abspath(file_path)
raise
supported_vars = (
'CC',
'CHROMIUM_GYP_FILE',
'CHROMIUM_GYP_SYNTAX_CHECK',
'CXX',
'GYP_DEFINES',
'GYP_GENERATOR_FLAGS',
'GYP_GENERATOR_OUTPUT',
'GYP_GENERATORS',
)
for var in supported_vars:
file_val = file_data.get(var)
if file_val:
if var in os.environ:
print 'INFO: Environment value for "%s" overrides value in %s.' % (
var, os.path.abspath(file_path)
)
else:
os.environ[var] = file_val
def apply_chromium_gyp_env():
if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
# Update the environment based on chromium.gyp_env
path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
apply_gyp_environment_from_file(path)
|
Python
| 0.000007 |
@@ -578,16 +578,22 @@
ile_path
+, 'rU'
) as f:%0A
|
fbbaa3fc5b99eed88e039c232f129aaeab0a6f54
|
Bring table test coverage to 100%
|
tests/test_table.py
|
tests/test_table.py
|
Python
| 0 |
@@ -0,0 +1,2604 @@
+#!/usr/bin/env python3%0A%0Aimport nose.tools as nose%0Afrom table import Table%0A%0A%0Adef test_init_default():%0A %22%22%22should initialize table with required parameters and default values%22%22%22%0A table = Table(num_cols=5, width=78)%0A nose.assert_equal(table.num_cols, 5)%0A nose.assert_equal(table.width, 78)%0A nose.assert_equal(table.alignment, 'left')%0A nose.assert_equal(table.title, None)%0A nose.assert_equal(table.header, %5B%5D)%0A nose.assert_equal(table.rows, %5B%5D)%0A%0A%0Adef test_init_optional():%0A %22%22%22should initialize table with optional parameters if supplied%22%22%22%0A table = Table(num_cols=5, width=78, alignment='right', title='Cache')%0A nose.assert_equal(table.num_cols, 5)%0A nose.assert_equal(table.width, 78)%0A nose.assert_equal(table.alignment, 'right')%0A nose.assert_equal(table.title, 'Cache')%0A%0A%0Adef test_get_separator():%0A %22%22%22should return the correct ASCII separator string%22%22%22%0A table = Table(num_cols=5, width=78)%0A nose.assert_equal(table.get_separator(), '-' * 78)%0A%0A%0Adef test_str_title():%0A %22%22%22should correctly display title%22%22%22%0A table = Table(num_cols=5, width=12, title='Cache')%0A nose.assert_regexp_matches(%0A ''.join(('Cache'.center(12), '%5Cn', ('-' * 12))), str(table))%0A%0A%0Adef test_str_no_title():%0A %22%22%22should not display title if not originally supplied%22%22%22%0A table = Table(num_cols=5, width=12)%0A nose.assert_equal(str(table).strip(), '')%0A%0A%0Aclass TestAlignment(object):%0A%0A def _test_str_align(self, alignment, just):%0A table_width = 16%0A num_cols = 2%0A col_width = table_width // num_cols%0A table = Table(%0A num_cols=num_cols, width=table_width, alignment=alignment)%0A table.header = %5B'First', 'Last'%5D%0A table.rows.append(%5B'Bob', 'Smith'%5D)%0A table.rows.append(%5B'John', 'Earl'%5D)%0A nose.assert_equal(str(table), '%7B%7D%7B%7D%5Cn%7B%7D%5Cn%7B%7D%7B%7D%5Cn%7B%7D%7B%7D'.format(%0A just('First', col_width), just('Last', col_width),%0A '-' * table_width,%0A just('Bob', col_width), just('Smith', col_width),%0A just('John', col_width), just('Earl', col_width)))%0A%0A def test_str_align_left(self):%0A %22%22%22should correctly display table when left-aligned%22%22%22%0A self._test_str_align(%0A alignment='left', just=str.ljust)%0A%0A def test_str_align_center(self):%0A %22%22%22should correctly display table when center-aligned%22%22%22%0A self._test_str_align(%0A alignment='center', just=str.center)%0A%0A def test_str_align_right(self):%0A %22%22%22should correctly display table when right-aligned%22%22%22%0A self._test_str_align(%0A alignment='right', just=str.rjust)%0A
|
|
f392a90ae12a5f9aab04b22e82d493d0f93db9fd
|
Add first test
|
tests/test_utils.py
|
tests/test_utils.py
|
Python
| 0.000029 |
@@ -0,0 +1,31 @@
+def test_ok():%0A assert True%0A
|
|
e56a9781f4e7e8042c29c9e54966659c87c5c05c
|
Add a test for our more general views.
|
tests/test_views.py
|
tests/test_views.py
|
Python
| 0 |
@@ -0,0 +1,268 @@
+import pytest%0A%0Afrom django.core.urlresolvers import reverse%0A%0A%0Adef test_site_view(client):%0A response = client.get(reverse('site-home'))%0A assert response.status_code == 200%0A assert 'landings/home_site.html' in %5Btemplate.name for template in response.templates%5D%0A
|
|
675b7fc917b5f99120ca4d6dcb79b3e821dbe72a
|
add Olin specific script
|
downloadOlin.py
|
downloadOlin.py
|
Python
| 0 |
@@ -0,0 +1,502 @@
+import os%0Afrom downloadMailmanArchives import main%0Aclass Struct:%0A def __init__(self, **entries): %0A self.__dict__.update(entries)%0A%0Aif __name__ == '__main__':%0A args = %7B%0A 'archive_root_url': %5B'https://lists.olin.edu/mailman/private/carpediem/', 'https://lists.olin.edu/mailman/private/helpme/'%5D,%0A 'password' : os.environ.get('ARCHIVE_LOGIN', %22fail%22),%0A 'username' : os.environ.get('ARCHIVE_PASS', %22fail%22),%0A 'force' : True,%0A 'dest' : './archives'%0A %7D%0A main(Struct(**args))%0A%0A
|
|
a46f0a709747dbe90f0495e7e6b12c7b511baa7f
|
Delete duplicate wapp images
|
dupe_deleter.py
|
dupe_deleter.py
|
Python
| 0.000001 |
@@ -0,0 +1,1045 @@
+%22%22%22%0A# Install QPython3 for android%0A# https://github.com/qpython-android/qpython3/releases%0A%0A# Execute the below script in QPython3%0A%22%22%22%0A%0Aimport os, hashlib%0Afrom operator import itemgetter%0Afrom itertools import groupby%0Aimage_list = %5B%5D%0Afolder_list = %5Br'/storage/emulated/0/whatsapp/media/whatsapp images/',%0A r'/storage/emulated/0/whatsapp/media/whatsapp images/Sent'%5D%0Afor folder in folder_list:%0A file_list = os.listdir(folder)%0A for img_file in file_list:%0A file_path = os.path.join(folder, img_file)%0A if os.path.isfile(file_path):%0A try:%0A image_list.append(%5Bfile_path, hashlib.sha1(open(file_path, 'rb').read()).hexdigest()%5D)%0A except IOError:%0A raise Exception('Error reading the file')%0Aimage_list.sort(key=itemgetter(1))%0Agroups = groupby(image_list, itemgetter(1))%0Afor (img_hash, img_list_same_hash) in groups:%0A z = %5Bimg for img in img_list_same_hash%5D%0A i = 1%0A while i %3C len(z):%0A os.remove(z%5Bi%5D%5B0%5D)%0A print('Deleted ' + z%5Bi%5D%5B0%5D)%0A i += 1%0A
|
|
9f4452983e38d002e141ed0d2a9c865656a553ce
|
add todo stuff
|
main.py
|
main.py
|
Python
| 0 |
@@ -0,0 +1,146 @@
+%EF%BB%BF#!/usr/bin/env python%0A# __author__ = 'Dmitry Shihaleev'%0A# __version__= '0.2'%0A# __email__ = '[email protected]'%0A# __license__ = 'MIT License'%0A%0A
|
|
ab7c0d05a6bcf8be83409ccd96f2ed4a6fe65a73
|
Create main.py
|
main.py
|
main.py
|
Python
| 0 |
@@ -0,0 +1,1684 @@
+#!/usr/bin/env python3%0A%0Aimport sys%0Aimport socket%0Aimport string%0A%0AHOST = %22chat.freenode.net%22 # You can change this to whatever you want%0APORT = 6667%0A%0ANICK = %22Your Nick Name%22%0AIDENT = %22Your Identity%22%0AREALNAME = %22Your REAL Name%22%0AMASTER = %22The Master of this particular Slave%22%0A%0ACHANNEL = %22The Channel To join%22%0A%0Areadbuffer = %22%22%0A%0As = socket.socket()%0As.connect((HOST, PORT))%0A# sets the nickname inside IRC channel%0As.send(bytes(%22NICK %25s%5Cr%5Cn%22 %25 NICK, %22UTF-8%22))%0A# Connects to the server using the provided inforamtion above. The 'bla' is irrelevent%0As.send(bytes(%22USER %25s %25s bla :%25s%5Cr%5Cn%22 %25 (IDENT, HOST, REALNAME), %22UTF-8%22))%0A# Joins the Channel%0As.send(bytes(%22JOIN #%25s %5Cr%5Cn%22 %25 (CHANNEL), %22UTF-8%22))%0A# starts a conversation with the 'master' when joining%0As.send(bytes(%22PRIVMSG %25s :Hello Master%5Cr%5Cn%22 %25 MASTER, %22UTF-8%22))%0A%0Awhile True:%0A readbuffer = readbuffer+s.recv(1024).decode(%22UTF-8%22)%0A temp = str.split(readbuffer, %22%5Cn%22)%0A readbuffer = temp.pop()%0A%0A for line in temp:%0A line = str.rstrip(line)%0A line = str.split(line)%0A%0A if(line%5B0%5D == %22PING%22):%0A s.send(bytes(%22PONG %25s%5Cr%5Cn%22 %25 line%5B1%5D, %22UTF-8%22))%0A if(line%5B1%5D == %22PRIVMSG%22):%0A sender = %22%22%0A for char in line%5B0%5D:%0A if(char == %22!%22):%0A break%0A if(char != %22:%22):%0A sender += char%0A size = len(line)%0A i = 3%0A message = %22%22%0A while(i %3C size):%0A message += line%5Bi%5D + %22 %22%0A i = i + 1%0A message.lstrip(%22:%22)%0A s.send(bytes(%22PRIVMSG %25s %25s %5Cr%5Cn%22 %25 (sender, message), %22UTF-8%22))%0A for index, i in enumerate(line):%0A print(line%5Bindex%5D)%0A
|
|
12490fa92e54becca77c70d124d807b19d71afa1
|
Create main.py
|
main.py
|
main.py
|
Python
| 0.000001 |
@@ -0,0 +1,3018 @@
+# waypointviewer.py Waypoint Viewer Google Maps/Google AppEngine application%0A# Copyright (C) 2011 Tom Payne%0A#%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as published by%0A# the Free Software Foundation, either version 3 of the License, or%0A# (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A%0A%0Aimport json%0Afrom google.appengine.api.urlfetch import fetch%0Afrom google.appengine.ext import webapp%0Afrom google.appengine.ext.webapp import template%0Afrom google.appengine.ext.webapp.util import run_wsgi_app%0Aimport os.path%0A%0Aimport waypoint%0A%0A%0Aclass MainPage(webapp.RequestHandler):%0A%0A def get(self):%0A template_values = dict((key, self.request.get(key)) for key in ('kml', 'logo', 'tsk', 'title', 'wpt'))%0A path = os.path.join(os.path.dirname(__file__), 'templates', 'index.html')%0A self.response.out.write(template.render(path, template_values))%0A%0A%0Aclass WaypointviewerJs(webapp.RequestHandler):%0A%0A def get(self):%0A template_values = dict((key, self.request.get(key)) for key in ('kml', 'logo', 'tsk', 'wpt'))%0A path = os.path.join(os.path.dirname(__file__), 'templates', 'waypointviewer.js')%0A self.response.headers%5B'content-type'%5D = 'text/javascript'%0A self.response.out.write(template.render(path, template_values))%0A%0A%0Aclass Wpt2json(webapp.RequestHandler):%0A%0A def get(self):%0A debug = self.request.get('debug')%0A wpt = self.request.get('wpt')%0A response = fetch(wpt)%0A content = response.content.decode('latin_1')%0A feature_collection = waypoint.feature_collection(content.splitlines(), debug=debug)%0A if debug:%0A feature_collection_properties%5B'content'%5D = content%0A feature_collection_properties%5B'content_was_truncated'%5D = response.content_was_truncated%0A feature_collection_properties%5B'final_url'%5D = response.final_url%0A headers = dict((key, response.headers%5Bkey%5D) for key in response.headers)%0A feature_collection_properties%5B'headers'%5D = headers%0A feature_collection_properties%5B'status_code'%5D = response.status_code%0A keywords = %7B'indent': 4, 'sort_keys': True%7D%0A else:%0A keywords = %7B%7D%0A self.response.headers%5B'content-type'%5D = 'application/json'%0A self.response.out.write(json.dumps(feature_collection, **keywords))%0A%0A%0Aapp = webapp.WSGIApplication(%5B('/', MainPage), ('/waypointviewer.js', WaypointviewerJs), ('/wpt2json.json', Wpt2json)%5D, debug=True)%0A%0A%0Adef main():%0A run_wsgi_app(app)%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
4f118c457c738a1f65001e7c8d02349923545634
|
put units in brackets
|
web/Regulatory.py
|
web/Regulatory.py
|
# -*- coding: iso-8859-1 -*-
"""
Regulatory Database
@copyright: 2008 Johannes Berg
@license: GNU GPL, see COPYING for details.
"""
import codecs, math
from dbparse import DBParser, flag_definitions
Dependencies = ["time"]
def _country(macro, countries, code):
result = []
f = macro.formatter
result.extend([
f.heading(1, 1),
f.text('Regulatory definition for %s' % _get_iso_code(code)),
f.heading(0, 1),
])
try:
country = countries[code]
except:
result.append(f.text('No information available'))
return ''.join(result)
if country.comments:
result.extend([
f.preformatted(1),
f.text('\n'.join(country.comments)),
f.preformatted(0),
])
result.append(f.table(1))
result.extend([
f.table_row(1),
f.table_cell(1), f.strong(1),
f.text('Band (MHz)'),
f.strong(0), f.table_cell(0),
f.table_cell(1), f.strong(1),
f.text('Max BW (MHz)'),
f.strong(0), f.table_cell(0),
f.table_cell(1), f.strong(1),
f.text('Flags'),
f.strong(0), f.table_cell(0),
f.table_cell(1), f.strong(1),
f.text('Max antenna gain (dBi)'),
f.strong(0), f.table_cell(0),
f.table_cell(1), f.strong(1),
f.text('Max IR (dBm'),
f.hardspace,
f.text('(mW))'),
f.strong(0), f.table_cell(0),
f.table_cell(1), f.strong(1),
f.text('Max EIRP (dBm'),
f.hardspace,
f.text('(mW))'),
f.strong(0), f.table_cell(0),
f.table_row(0),
])
for perm in country.permissions:
def str_or_na(val, dBm=False):
if val and not dBm:
return '%.3f' % val
elif val:
return '%.3f (%.2f)' % (val, math.pow(10, val/10.0))
return 'N/A'
result.extend([
f.table_row(1),
f.table_cell(1),
f.text('%.3f - %.3f' % (perm.freqband.start, perm.freqband.end)),
f.table_cell(0),
f.table_cell(1),
f.text('%.3f' % (perm.freqband.maxbw,)),
f.table_cell(0),
f.table_cell(1),
f.text(', '.join(perm.textflags)),
f.table_cell(0),
f.table_cell(1),
f.text(str_or_na(perm.power.max_ant_gain)),
f.table_cell(0),
f.table_cell(1),
f.text(str_or_na(perm.power.max_ir, dBm=True)),
f.table_cell(0),
f.table_cell(1),
f.text(str_or_na(perm.power.max_eirp, dBm=True)),
f.table_cell(0),
f.table_row(0),
])
result.append(f.table(0))
result.append(f.linebreak(0))
result.append(f.linebreak(0))
result.append(macro.request.page.link_to(macro.request, 'return to country list'))
return ''.join(result)
_iso_list = {}
def _get_iso_code(code):
if not _iso_list:
for line in codecs.open('/usr/share/iso-codes/iso_3166.tab', encoding='utf-8'):
line = line.strip()
c, name = line.split('\t')
_iso_list[c] = name
return _iso_list.get(code, 'Unknown (%s)' % code)
def macro_Regulatory(macro):
_ = macro.request.getText
request = macro.request
f = macro.formatter
country = request.form.get('alpha2', [None])[0]
dbpath = '/tmp/db.txt'
if hasattr(request.cfg, 'regdb_path'):
dbpath = request.cfg.regdb_path
result = []
if request.form.get('raw', [None])[0]:
result.append(f.code_area(1, 'db-raw', show=1, start=1, step=1))
for line in open(dbpath):
result.extend([
f.code_line(1),
f.text(line.rstrip()),
f.code_line(0),
])
result.append(f.code_area(0, 'db-raw'))
result.append(macro.request.page.link_to(macro.request, 'return to country list'))
return ''.join(result)
warnings = []
countries = DBParser(warn=lambda x: warnings.append(x)).parse(open(dbpath))
if country:
return _country(macro, countries, country)
countries = countries.keys()
countries = [(_get_iso_code(code), code) for code in countries]
countries.sort()
result.extend([
f.heading(1, 1),
f.text('Countries'),
f.heading(0, 1),
])
result.append(f.bullet_list(1))
for name, code in countries:
result.extend([
f.listitem(1),
request.page.link_to(request, name, querystr={'alpha2': code}),
f.listitem(0),
])
result.append(f.bullet_list(0))
if warnings:
result.append(f.heading(1, 2))
result.append(f.text("Warnings"))
result.append(f.heading(0, 2))
result.append(f.preformatted(1))
result.extend(warnings)
result.append(f.preformatted(0))
result.append(request.page.link_to(request, 'view raw database', querystr={'raw': 1}))
return ''.join(result)
|
Python
| 0.000011 |
@@ -921,21 +921,21 @@
t('Band
-(MHz)
+%5BMHz%5D
'),%0A
@@ -1041,13 +1041,13 @@
BW
-(MHz)
+%5BMHz%5D
'),%0A
@@ -1276,13 +1276,13 @@
ain
-(dBi)
+%5BdBi%5D
'),%0A
@@ -1388,17 +1388,17 @@
'Max IR
-(
+%5B
dBm'),%0A
@@ -1437,33 +1437,33 @@
f.text('(mW)
-)
+%5D
'),%0A f.
@@ -1559,17 +1559,17 @@
ax EIRP
-(
+%5B
dBm'),%0A
@@ -1616,17 +1616,17 @@
xt('(mW)
-)
+%5D
'),%0A
|
34847fbe0e04a2da8957a0ba5de92856ca73c8cc
|
Add missing migration
|
osf/migrations/0054_auto_20170823_1555.py
|
osf/migrations/0054_auto_20170823_1555.py
|
Python
| 0.0002 |
@@ -0,0 +1,611 @@
+# -*- coding: utf-8 -*-%0A# Generated by Django 1.11.4 on 2017-08-23 20:55%0Afrom __future__ import unicode_literals%0A%0Afrom django.db import migrations, models%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('osf', '0053_add_quickfiles'),%0A %5D%0A%0A operations = %5B%0A migrations.AlterField(%0A model_name='abstractnode',%0A name='type',%0A field=models.CharField(choices=%5B('osf.node', 'node'), ('osf.collection', 'collection'), ('osf.registration', 'registration'), ('osf.quickfilesnode', 'quick files node')%5D, db_index=True, max_length=255),%0A ),%0A %5D%0A
|
|
4102ab8fc24265aaee1ecbf673bec260b3b3e5df
|
add max sub arr impl
|
bioinformatics/dynamic_programming/max_sum_sub_arr1.py
|
bioinformatics/dynamic_programming/max_sum_sub_arr1.py
|
Python
| 0.000001 |
@@ -0,0 +1,647 @@
+def max_sum_sub_arr(arr):%0A score_vector = %5B0 for _ in range(len(arr))%5D%0A%0A def max_sum_sub_arr_detail(beg_idx):%0A if beg_idx %3E= len(arr):%0A return 0%0A elif arr%5Bbeg_idx%5D %3E= 0:%0A score_vector%5Bbeg_idx%5D = arr%5Bbeg_idx%5D + max_sum_sub_arr_detail(beg_idx + 1)%0A return score_vector%5Bbeg_idx%5D%0A else:%0A score_vector%5Bbeg_idx%5D = max(0, arr%5Bbeg_idx%5D + max_sum_sub_arr_detail(beg_idx + 1))%0A return score_vector%5Bbeg_idx%5D%0A%0A max_sum_sub_arr_detail(0)%0A print score_vector%0A return max(score_vector)%0A%0A%0Aif __name__ == '__main__':%0A print max_sum_sub_arr(%5B1, -2, 3, 10, -4, 7, 2, -5%5D)%0A
|
|
1043acdfe324e02bc2a8629ef8a47d6ae9befd7c
|
Add python script to get ECC608 Public Key
|
src/aiy/_drivers/_ecc608_pubkey.py
|
src/aiy/_drivers/_ecc608_pubkey.py
|
Python
| 0.000001 |
@@ -0,0 +1,2142 @@
+#!/usr/bin/env python3%0Aimport base64%0Aimport ctypes%0Aimport sys%0A%0ACRYPTO_ADDRESS_DICT = %7B%0A 'Vision Bonnet': 0x60,%0A 'Voice Bonnet': 0x62,%0A%7D%0A%0A%0Aclass AtcaIfaceCfgLong(ctypes.Structure):%0A _fields_ = (%0A ('iface_type', ctypes.c_ulong),%0A ('devtype', ctypes.c_ulong),%0A ('slave_address', ctypes.c_ubyte),%0A ('bus', ctypes.c_ubyte),%0A ('baud', ctypes.c_ulong)%0A )%0A%0A%0Adef main():%0A try:%0A cryptolib = ctypes.cdll.LoadLibrary('libcryptoauth.so')%0A except Exception:%0A print('Unable to load crypto library, SW authentication required')%0A sys.exit()%0A%0A try:%0A for name, addr in CRYPTO_ADDRESS_DICT.items():%0A cfg = AtcaIfaceCfgLong.in_dll(cryptolib, 'cfg_ateccx08a_i2c_default')%0A cfg.slave_address = addr %3C%3C 1%0A cfg.bus = 1 # ARM I2C%0A cfg.devtype = 3 # ECC608%0A status = cryptolib.atcab_init(cryptolib.cfg_ateccx08a_i2c_default)%0A if status == 0:%0A # Found a valid crypto chip.%0A break%0A else:%0A cryptolib.atcab_release()%0A%0A if status:%0A raise Exception%0A%0A serial = ctypes.create_string_buffer(9)%0A status = cryptolib.atcab_read_serial_number(ctypes.byref(serial))%0A if status:%0A raise Exception%0A%0A serial = ''.join('%2502X' %25 x for x in serial.raw)%0A print('Serial Number: %25s%5Cn' %25 serial, file=sys.stderr)%0A%0A pubkey = ctypes.create_string_buffer(64)%0A status = cryptolib.atcab_genkey_base(0, 0, None, ctypes.byref(pubkey))%0A if status:%0A raise Exception%0A%0A public_key = bytearray.fromhex(%0A '3059301306072A8648CE3D020106082A8648CE3D03010703420004') + bytes(pubkey.raw)%0A public_key = '-----BEGIN PUBLIC KEY-----%5Cn' + %5C%0A base64.b64encode(public_key).decode('ascii') + '%5Cn-----END PUBLIC KEY-----'%0A print(public_key)%0A%0A status = cryptolib.atcab_release()%0A if status:%0A raise Exception%0A except Exception:%0A print('Unable to communicate with crypto, SW authentication required')%0A%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
b04503bddfa3b0d737308ac8ecb7f06ac866e6eb
|
Create __init__.py
|
__init__.py
|
__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
0b891e401bf0e671d3bc6f0347a456f1cc5b07b3
|
add __init__.py for root package
|
__init__.py
|
__init__.py
|
Python
| 0.000018 |
@@ -0,0 +1,14 @@
+import sysdic%0A
|
|
14f175c294ec6b5dcd75887a031386c1c9d7060d
|
add __main__
|
__main__.py
|
__main__.py
|
Python
| 0.000247 |
@@ -0,0 +1,606 @@
+from . import parser%0Aimport sys%0A%0Aif len(sys.argv) == 1:%0A print(%22compile yarh to html%22)%0A print(%22usage: yarh %5BYARH_FILE%5D...%22)%0A print(%22 yarh -- %5BYARH_STRING%5D%22)%0A print(%22 yarh %5BYARH_FILE%5D... -- %5BYARH_STRING%5D%22)%0A sys.exit(1)%0A%0Afromfile = True%0A%0Afor arg in sys.argv%5B1:%5D:%0A if arg == %22--%22:%0A fromfile = False%0A continue%0A if fromfile:%0A f = open(arg, %22r%22)%0A print(parser.parseyarh(f.read()).html())%0A else:%0A print(parser.parseyarh(arg).html())%0A fromfile = True%0A break%0A%0Aif not fromfile:%0A print(parser.parseyarh(sys.stdin.read()).html())%0A
|
|
e8bd9c6867e5eb5f8c244b5190411ef3c613962b
|
Add TinyMCE to EventForm.
|
events/forms.py
|
events/forms.py
|
# encoding: utf-8
# Copyright 2011 Tree.io Limited
# This file is part of Treeio.
# License www.tree.io/license
"""
Events module forms
"""
from django import forms
from django.utils.translation import ugettext as _
from django.core.urlresolvers import reverse
from treeio.events.models import Event
from treeio.core.models import Object, Location
from treeio.core.decorators import preprocess_form
import datetime
preprocess_form()
class MassActionForm(forms.Form):
""" Mass action form for Reports """
delete = forms.ChoiceField(label=_("Delete"), choices=(('', '-----'), ('delete', _('Delete Completely')),
('trash', _('Move to Trash'))), required=False)
instance = None
def __init__(self, user, *args, **kwargs):
if 'instance' in kwargs:
self.instance = kwargs['instance']
del kwargs['instance']
super(MassActionForm, self).__init__(*args, **kwargs)
def save(self, *args, **kwargs):
"Process form"
if self.instance:
if self.is_valid():
if self.cleaned_data['delete']:
if self.cleaned_data['delete'] == 'delete':
self.instance.delete()
if self.cleaned_data['delete'] == 'trash':
self.instance.trash = True
self.instance.save()
class EventForm(forms.ModelForm):
""" Event form """
def _set_initial(self, field, value):
"Sets initial value"
def __init__(self, user=None, date=None, hour=None, *args, **kwargs):
super(EventForm, self).__init__(*args, **kwargs)
self.fields['name'].label = _('Title')
self.fields['name'].widget = forms.TextInput(attrs={'size': '30'})
self.fields['location'].queryset = Object.filter_permitted(
user, Location.objects, mode='x')
self.fields['location'].widget.attrs.update(
{'popuplink': reverse('identities_location_add')})
self.fields['location'].widget.attrs.update({'class': 'autocomplete',
'callback': reverse('identities_ajax_location_lookup')})
self.fields['location'].label = _("Location")
self.fields['start'].label = _("Start")
self.fields['end'].label = _("End")
self.fields['details'].label = _("Details")
if date:
rdate = None
try:
rdate = datetime.datetime.strptime(date, "%Y-%m-%d")
if hour:
hour = int(hour)
else:
hour = 12
rdate = datetime.datetime(year=rdate.year,
month=rdate.month,
day=rdate.day,
hour=hour)
self.fields['end'].initial = rdate
except ValueError:
pass
# Set datepicker
self.fields['start'].widget.attrs.update({'class': 'datetimepicker'})
self.fields['end'].widget.attrs.update({'class': 'datetimepicker'})
if self.fields['start'].initial:
self.fields['start'].widget.attrs.update(
{'initial': self.fields['start'].initial.strftime('%s')})
if self.fields['end'].initial:
self.fields['end'].widget.attrs.update(
{'initial': self.fields['end'].initial.strftime('%s')})
if 'instance' in kwargs:
instance = kwargs['instance']
if instance.start:
self.fields['start'].widget.attrs.update(
{'initial': instance.start.strftime('%s')})
if instance.end:
self.fields['end'].widget.attrs.update(
{'initial': instance.end.strftime('%s')})
def clean_end(self):
"Make sure end date is greater than start date, when specified"
try:
start = self.cleaned_data['start']
if start:
end = self.cleaned_data['end']
if end < start:
raise forms.ValidationError(
_("End date can not be before the start date"))
except:
pass
return self.cleaned_data['end']
class Meta:
"Event"
model = Event
fields = ('name', 'location', 'start', 'end', 'details')
class GoToDateForm(forms.Form):
""" Go to date form definition """
def __init__(self, date, *args, **kwargs):
super(GoToDateForm, self).__init__(*args, **kwargs)
self.fields['goto'] = forms.DateField(
label=_("Go to date"), required=False)
self.fields['goto'].widget.attrs.update({'class': 'datepicker'})
class FilterForm(forms.Form):
""" Filters for Events """
def __init__(self, *args, **kwargs):
super(FilterForm, self).__init__(*args, **kwargs)
self.fields['datefrom'] = forms.DateField(label=_("Date From"))
self.fields['datefrom'].widget.attrs.update({'class': 'datepicker'})
self.fields['dateto'] = forms.DateField(label=_("Date To"))
self.fields['dateto'].widget.attrs.update({'class': 'datepicker'})
def clean_dateto(self):
"Clean date_to"
if not self.cleaned_data['dateto'] >= self.cleaned_data['datefrom']:
raise forms.ValidationError(
"From date can not be greater than To date.")
|
Python
| 0 |
@@ -2411,16 +2411,93 @@
etails%22)
+%0A self.fields%5B'details'%5D.widget.attrs.update(%7B'class': 'full-editor'%7D)
%0A%0A
|
e29c8e2d55464ac765db60a5cc213bb943b60742
|
add [[Portal:Beer]] and [[Portal:Wine]] tagging request script
|
trunk/portalbeer.py
|
trunk/portalbeer.py
|
Python
| 0.000001 |
@@ -0,0 +1,1137 @@
+#!usr/bin/python%0Aimport sys, os, re%0Asys.path.append(os.environ%5B'HOME'%5D + '/stuffs/pywiki/pywikipedia')%0A%0Aimport wikipedia as wiki%0A%0A%0Asite = wiki.getSite()%0Apage = 'Portal:Beer/Selected picture/'%0A%0A#get page list%0Apages = %5B%5D%0Anum = 0%0Acontent = %22%22%22%5C%0A%7B%7BSelected picture%0A%7C image = %0A%7C size = %0A%7C caption = %0A%7C text = %0A%7C credit = %0A%7C link = %0A%7D%7D%0A%22%22%22%0Awhile num %3C=50:%0A%09num +=1%0A%09pages.append(wiki.Page(site,page + str(num)))%0A#for page in pages:%0A#%09print page%0A#%09wiki.showDiff(page.get(), content)%0A#%09page.put(content, 'Updating per %5B%5BWP:BOTREQ%5D%5D')%0A%0Araw_content = %22%22%22%5C%0A%7B%7BSelected picture%0A%7C image = %0A%7C size = %0A%7C caption = %0A%7C text = %0A%7C credit = %0A%7C link = %0A%7D%7D%0A%0A%5B%5BCategory:Wine Portal%5D%5D%0A%22%22%22%0A%0A%0Apagewine = 'Portal:Wine/Selected picture/'%0Apages1 = %5B%5D%0Anum = 0%0Awhile num %3C50:%0A%09num +=1%0A%09pages1.append(wiki.Page(site,pagewine + str(num)))%0Afor page in pages1:%0A%09print page%0A%09try:%0A%09%09wikitext = page.get()%0A%09%09newtext = re.sub('Portal:Wine/Selected picture/Layout','Selected picture', wikitext)%0A%09%09wiki.showDiff(wikitext, newtext)%0A%09%09page.put(newtext, 'Updating per %5B%5BWP:BOTREQ%5D%5D')%0A%09except wiki.NoPage:%0A%09%09page.put(raw_content, 'Updating per %5B%5BWP:BOTREQ%5D%5D')
|
|
525a6d214d3f6f9731c16bbf10ed150d1fa24021
|
Create betweenness.py
|
src/betweenness.py
|
src/betweenness.py
|
Python
| 0.000139 |
@@ -0,0 +1,667 @@
+'''%0ACreated on Feb 20, 2019%0A%0A@author: Victor%0A'''%0A%0Afrom neo4j.v1 import GraphDatabase, basic_auth%0Adriver = GraphDatabase.driver(%22bolt://localhost%22)%0Asession = driver.session()%0A%0Aquery = '''CALL algo.betweenness.sampled.stream(null, null,%0A %7Bstrategy:'random', probability:1.0, maxDepth:1, direction: 'both'%7D)%0A%0AYIELD nodeId, centrality%0A%0AMATCH (actor) WHERE id(actor) = nodeId and exists(actor.name)%0ARETURN actor.name AS actor,centrality%0AORDER BY centrality DESC LIMIT 10;'''%0A%0Aresult = session.run(query)%0Aprint(%22Top Central Nodes%22)%0Aprint(%22-----------------%22)%0Afor record in result:%0A print(%22%25s %25s%22 %25 (record%5B%22actor%22%5D.ljust(25, %22 %22), record%5B%22centrality%22%5D))%0A%0Asession.close()%0A
|
|
2e467774d83e14baf6fb2fec1fa4e0f6c1f8f88d
|
Disable webrtc benchmark on Android
|
tools/perf/benchmarks/webrtc.py
|
tools/perf/benchmarks/webrtc.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from measurements import webrtc
import page_sets
from telemetry import benchmark
class WebRTC(benchmark.Benchmark):
"""Obtains WebRTC metrics for a real-time video tests."""
test = webrtc.WebRTC
page_set = page_sets.WebrtcCasesPageSet
|
Python
| 0.000005 |
@@ -239,16 +239,67 @@
hmark%0A%0A%0A
[email protected]('android') # crbug.com/390233%0A
class We
|
d171d316eb45bdd6ce9d3a80c2ca91ae8b3cf1b2
|
Clarify how long a year is
|
tapiriik/auth/__init__.py
|
tapiriik/auth/__init__.py
|
from .payment import *
from .totp import *
from tapiriik.database import db
from tapiriik.sync import Sync
from datetime import datetime, timedelta
from bson.objectid import ObjectId
class User:
def Get(id):
return db.users.find_one({"_id": ObjectId(id)})
def Ensure(req):
if req.user == None:
req.user = User.Create()
User.Login(req.user, req)
return req.user
def Login(user, req):
req.session["userid"] = str(user["_id"])
req.user = user
def Create():
uid = db.users.insert({"Created": datetime.utcnow()}) # will mongodb insert an almost empty doc, i.e. _id?
return db.users.find_one({"_id": uid})
def GetConnectionRecordsByUser(user):
return db.connections.find({"_id": {"$in": [x["ID"] for x in user["ConnectedServices"]]}})
def AssociatePayment(user, payment):
db.users.update({"_id": {'$ne': ObjectId(user["_id"])}}, {"$pull": {"Payments": payment}}, multi=True) # deassociate payment ids from other accounts that may be using them
db.users.update({"_id": ObjectId(user["_id"])}, {"$addToSet": {"Payments": payment}})
def HasActivePayment(user):
if "Payments" not in user:
return False
for payment in user["Payments"]:
if payment["Timestamp"] > (datetime.utcnow() - timedelta(years=1)):
return True
return False
def ConnectService(user, serviceRecord):
existingUser = db.users.find_one({"_id": {'$ne': ObjectId(user["_id"])}, "ConnectedServices.ID": ObjectId(serviceRecord["_id"])})
if "ConnectedServices" not in user:
user["ConnectedServices"] = []
delta = False
if existingUser is not None:
# merge merge merge
user["ConnectedServices"] += existingUser["ConnectedServices"]
user["Payments"] += existingUser["Payments"]
delta = True
db.users.remove({"_id": existingUser["_id"]})
else:
if serviceRecord["_id"] not in [x["ID"] for x in user["ConnectedServices"]]:
user["ConnectedServices"].append({"Service": serviceRecord["Service"], "ID": serviceRecord["_id"]})
delta = True
db.users.update({"_id": user["_id"]}, {"$set": {"ConnectedServices": user["ConnectedServices"]}})
if delta or ("SyncErrors" in serviceRecord and len(serviceRecord["SyncErrors"]) > 0): # also schedule an immediate sync if there is an outstanding error (i.e. user reconnected)
Sync.ScheduleImmediateSync(user, True) # exhaustive, so it'll pick up activities from newly added services / ones lost during an error
def DisconnectService(serviceRecord):
# not that >1 user should have this connection
activeUsers = list(db.users.find({"ConnectedServices.ID": serviceRecord["_id"]}))
if len(activeUsers) == 0:
raise Exception("No users found with service " + serviceRecord["_id"])
db.users.update({}, {"$pull": {"ConnectedServices": {"ID": serviceRecord["_id"]}}}, multi=True)
for user in activeUsers:
if len(user["ConnectedServices"]) - 1 == 0:
# I guess we're done here?
db.users.remove({"_id": user["_id"]})
def AuthByService(serviceRecord):
return db.users.find_one({"ConnectedServices.ID": serviceRecord["_id"]})
class SessionAuth:
def process_request(self, req):
userId = req.session.get("userid")
if userId == None:
req.user = None
else:
req.user = db.users.find_one({"_id": ObjectId(userId)})
|
Python
| 0.99958 |
@@ -1359,15 +1359,19 @@
lta(
-years=1
+days=365.25
)):%0A
|
d9e11e2c5f14cee0ead87ced9afe85bdd299ab35
|
Add python script to extract
|
extract_text.py
|
extract_text.py
|
Python
| 0.000002 |
@@ -0,0 +1,203 @@
+import json%0Af=open('raw.json')%0Ag=open('extracted1','a')%0Ai=1%0Afor s in f:%0A j=json.loads(s)%0A j=j%5B'text'%5D%0A h=json.dumps(j)%0A number=str(i) + ':' + ' '%0A g.write(h)%0A g.write('%5Cn%5Cn')%0A i=i+1%0A
|
|
370d3a122fa0bfc4c6f57a5cd6e518968205611a
|
add another linechart example showing new features
|
examples/lineChartXY.py
|
examples/lineChartXY.py
|
Python
| 0 |
@@ -0,0 +1,894 @@
+#!/usr/bin/python%0A# -*- coding: utf-8 -*-%0A%0A%22%22%22%0AExamples for Python-nvd3 is a Python wrapper for NVD3 graph library.%0ANVD3 is an attempt to build re-usable charts and chart components%0Afor d3.js without taking away the power that d3.js gives you.%0A%0AProject location : https://github.com/areski/python-nvd3%0A%22%22%22%0A%0Afrom nvd3 import lineChart%0Aimport math%0Afrom numpy import sin,pi,linspace%0A%0Aoutput_file = open('test_lineChartXY.html', 'w')%0A%0Achart = lineChart(name=%22lineChart%22, date=False, x_format=%22f%22,y_format=%22f%22, width=500, height=500, show_legend=False)%0A%0A#lissajous parameters of a/b%0Aa = %5B1,3,5,3%5D%0Ab = %5B1,5,7,4%5D%0Adelta = pi/2%0At = linspace(-pi,pi,300)%0A%0Afor i in range(0,4):%0A x = sin(a%5Bi%5D * t + delta)%0A y = sin(b%5Bi%5D * t)%0A %0A chart.add_serie(y=y, x=x, name='lissajous-n%25d' %25 i, color='red' if i == 0 else 'black')%0A%0Achart.buildhtml()%0A%0Aoutput_file.write(chart.htmlcontent)%0A%0Aoutput_file.close()%0A
|
|
aa4a3011775c12c19d690bbca91a07df4e033b1f
|
add urls for admin, zinnia, and serving static files
|
src/phyton/urls.py
|
src/phyton/urls.py
|
Python
| 0 |
@@ -0,0 +1,541 @@
+from django.conf.urls import patterns, include, url%0Afrom django.contrib import admin%0Afrom django.conf import settings%0Afrom django.conf.urls.static import static%0Aadmin.autodiscover()%0A%0Aurlpatterns = patterns('',%0A%0A # Admin URLs%0A url(r'%5Eadmin/doc/', include('django.contrib.admindocs.urls')),%0A url(r'%5Eadmin/', include(admin.site.urls)),%0A%0A # Zinnia's URLs%0A url(r'%5Eweblog/', include('zinnia.urls')),%0A url(r'%5Ecomments/', include('django.contrib.comments.urls')),%0A%0A) + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)%0A
|
|
b31def01a04a6ddb90e780985e43e8ad8e57e457
|
Create uber.py
|
modules/uber.py
|
modules/uber.py
|
Python
| 0.000036 |
@@ -0,0 +1,43 @@
+def uber(self):%0A self.send_chan(%22Moi%22)%0A %0A
|
|
27ab8b0436d784f44220512a91e699006b735d82
|
test mpi
|
tests/test_mpi.py
|
tests/test_mpi.py
|
Python
| 0.000001 |
@@ -0,0 +1,291 @@
+# coding: utf-8%0A%0Aierr = mpi_init()%0Acomm = mpi_comm_world%0Aprint(%22mpi_comm = %22, comm)%0A%0Asize, ierr = mpi_comm_size(comm)%0Aprint(%22mpi_size = %22, size)%0A%0Arank, ierr = mpi_comm_rank(comm)%0Aprint(%22mpi_rank = %22, rank)%0A%0A#abort, ierr = mpi_abort(comm)%0A#print(%22mpi_abort = %22, abort)%0A%0Aierr = mpi_finalize()%0A
|
|
2ad40dc0e7f61e37ab768bedd53572959a088bb0
|
Make app package
|
app/__init__.py
|
app/__init__.py
|
Python
| 0 |
@@ -0,0 +1,64 @@
+from flask import Flask%0A%0A%0Adef create_app(config_name):%0A pass%0A
|
|
4981a1fa0d94020e20a8e7714af62a075f7d7874
|
delete customers
|
delete_customers.py
|
delete_customers.py
|
Python
| 0 |
@@ -0,0 +1,264 @@
[email protected]('/customers/%3Cint:id%3E', methods=%5B'DELETE'%5D)%0Adef delete_customers(id):%0A index = %5Bi for i, customer in enumerate(customers) if customer%5B'id'%5D == id%5D%0A if len(index) %3E 0:%0A del customers%5Bindex%5B0%5D%5D%0A return make_response('', HTTP_204_NO_CONTENT)
|
|
bc34aa231a3838ad7686541ed4bce58374a40b19
|
Create __init__.py
|
physt/__init__.py
|
physt/__init__.py
|
Python
| 0.000429 |
@@ -0,0 +1 @@
+%0A
|
|
e93bbc1b5091f9b6d583437aea05aa59c8233d2d
|
add audiotsmcli
|
examples/audiotsmcli.py
|
examples/audiotsmcli.py
|
Python
| 0.000001 |
@@ -0,0 +1,2638 @@
+#!/usr/bin/env python3%0A# -*- coding: utf-8 -*-%0A%0A%22%22%22%0Aaudiotsmcli%0A~~~~~~~~~~~%0A%0AChange the speed of an audio file without changing its pitch.%0A%22%22%22%0A%0Aimport argparse%0Aimport os%0A%0Afrom audiotsm.ola import ola%0Afrom audiotsm.io.wav import WavReader, WavWriter%0A%0A%0Adef main():%0A %22%22%22Change the speed of an audio file without changing its pitch.%22%22%22%0A%0A # Parse command line arguments%0A parser = argparse.ArgumentParser(description=(%0A %22Change the speed of an audio file without changing its pitch.%22))%0A parser.add_argument('-s', '--speed', metavar=%22S%22, type=float, default=1.,%0A help=(%22Set the speed ratio (e.g 0.5 to play at half %22%0A %22speed)%22))%0A parser.add_argument('-l', '--frame-length', metavar='N', type=int,%0A default=None, help=(%22Set the frame length to N.%22))%0A parser.add_argument('-a', '--analysis-hop', metavar='N', type=int,%0A default=None, help=(%22Set the analysis hop to N.%22))%0A parser.add_argument('--synthesis-hop', metavar='N', type=int, default=None,%0A help=(%22Set the synthesis hop to N.%22))%0A parser.add_argument('input_filename', metavar='INPUT_FILENAME', type=str,%0A help=(%22The audio input file%22))%0A parser.add_argument('output_filename', metavar='OUTPUT_FILENAME', type=str,%0A help=(%22The audio output file%22))%0A%0A args = parser.parse_args()%0A%0A if not os.path.isfile(args.input_filename):%0A parser.error(%0A 'The input file %22%7B%7D%22 does not exist.'.format(args.input_filename))%0A%0A # Get TSM method parameters%0A parameters = %7B%7D%0A if args.speed is not None:%0A parameters%5B'speed'%5D = args.speed%0A if args.frame_length is not None:%0A parameters%5B'frame_length'%5D = args.frame_length%0A if args.analysis_hop is not None:%0A parameters%5B'analysis_hop'%5D = args.analysis_hop%0A if args.speed is not None:%0A parameters%5B'speed'%5D = args.speed%0A%0A # Get input and output files%0A input_filename = args.input_filename%0A output_filename = args.output_filename%0A%0A # Run the TSM procedure%0A with WavReader(input_filename) as reader:%0A channels = reader.channels%0A with WavWriter(output_filename, channels, reader.samplerate) as writer:%0A tsm = ola(channels, **parameters)%0A%0A finished = False%0A while not (finished and reader.empty):%0A tsm.read_from(reader)%0A _, finished = tsm.write_to(writer)%0A%0A finished = False%0A while not finished:%0A _, finished = tsm.flush_to(writer)%0A%0A%0Aif __name__ == %22__main__%22:%0A main()%0A
|
|
6dd52499de049d76a1bea5914f47dc5b6aae23d7
|
Add gspread example
|
xl.py
|
xl.py
|
Python
| 0.000001 |
@@ -0,0 +1,2005 @@
+#!/usr/bin/python%0A%0A#csv upload to gsheet%0A%0Aimport logging%0Aimport json%0Aimport gspread%0Aimport time%0Aimport re%0Afrom oauth2client.client import SignedJwtAssertionCredentials%0Afrom Naked.toolshed.shell import muterun_rb%0A%0Alogging.basicConfig(filename='/var/log/gspread.log',format='%25(asctime)s %25(levelname)s:%25(message)s',level=logging.INFO)%0A%0Afilename = '%3Cgoogle sheet name%3E'%0A%0A#OAuth login%0Ajson_key = json.load(open('oauth.json'))%0A%22%22%22%0A JSON in the form:%0A %7B%0A %22private_key_id%22: %22%22,%0A %22private_key%22: %22%22,%0A %22client_email%22: %22%22,%0A %22client_id%22: %22%22,%0A %22type%22: %22service_account%22%0A %7D%0A%22%22%22%0Ascope = %5B'https://spreadsheets.google.com/feeds'%5D%0Acredentials = SignedJwtAssertionCredentials(json_key%5B'client_email'%5D, json_key%5B'private_key'%5D, scope)%0Agc = gspread.authorize(credentials)%0Aif gc:%0A logging.info('OAuth succeeded')%0Aelse:%0A logging.warn('Oauth failed')%0A%0Anow = time.strftime(%22%25c%22)%0A%0A# get data from ruby script%0Aresponse = muterun_rb('script')%0Aif response:%0A logging.info('Data collected')%0Aelse:%0A logging.warn('Could not collect data')%0A%0Acsv = response.stdout%0Acsv = re.sub('/%7C%22%7C,%5B0-9%5D%5B0-9%5D%5B0-9%5DZ%7CZ', '', csv)%0Acsv_lines = csv.split('%5Cn')%0A%0A#get columns and rows for cell list%0Acolumn = len(csv_lines%5B0%5D.split(%22,%22))%0Arow = 1%0Afor line in csv_lines:%0A row += 1%0A%0A#create cell range%0Acolumnletter = chr((column - 1) + ord('A'))%0Acell_range = 'A1:%25s%25s' %25 (columnletter, row)%0A%0A#open the worksheet and create a new sheet%0Awks = gc.open(filename)%0Aif wks:%0A logging.info('%25s file opened for writing', filename)%0Aelse:%0A logging.warn('%25s file could not be opened', filename)%0A%0Asheet = wks.add_worksheet(title=now, rows=(row + 2), cols=(column + 2))%0Acell_list = sheet.range(cell_range)%0A%0A#create values list%0Acsv = re.split(%22%5Cn%7C,%22, csv)%0Afor item, cell in zip(csv, cell_list):%0A cell.value = item%0A%0A# Update in batch%0Aif sheet.update_cells(cell_list):%0A logging.info('upload to %25s sheet in %25s file done', now, filename)%0Aelse:%0A logging.warn('upload to %25s sheet in %25s file failed', now, filename)%0A
|
|
3e10a9fcb15e06699aa90016917b4ec5ec857faa
|
Solve task #506
|
506.py
|
506.py
|
Python
| 0.999999 |
@@ -0,0 +1,622 @@
+class Solution(object):%0A def findRelativeRanks(self, nums):%0A %22%22%22%0A :type nums: List%5Bint%5D%0A :rtype: List%5Bstr%5D%0A %22%22%22%0A def reverse_numeric(x, y):%0A return y - x%0A kek = sorted(nums, cmp=reverse_numeric)%0A l = len(nums)%0A if l %3E 0:%0A nums%5Bnums.index(kek%5B0%5D)%5D = %22Gold Medal%22%0A if l %3E 1:%0A nums%5Bnums.index(kek%5B1%5D)%5D = %22Silver Medal%22%0A if l %3E 2:%0A nums%5Bnums.index(kek%5B2%5D)%5D = %22Bronze Medal%22%0A if l %3E 3:%0A for i in range(3, l):%0A nums%5Bnums.index(kek%5Bi%5D)%5D = str(i + 1)%0A return nums%0A %0A
|
|
6c3867275693d4a771b9ff8df55aab18818344cd
|
add first app
|
app.py
|
app.py
|
Python
| 0.000001 |
@@ -0,0 +1,346 @@
+import tornado.ioloop%0Aimport tornado.web%0A%0Aclass MainHandler(tornado.web.RequestHandler):%0A def get(self):%0A self.write(%22Hello, world%22)%0A%0Adef make_app():%0A return tornado.web.Application(%5B%0A (r%22/%22, MainHandler),%0A %5D)%0A%0Aif __name__ == %22__main__%22:%0A app = make_app()%0A app.listen(8888)%0A tornado.ioloop.IOLoop.current().start()
|
|
8080153c65f4aa1d875a495caeee290fb1945081
|
Add migration
|
media_management_api/media_auth/migrations/0005_delete_token.py
|
media_management_api/media_auth/migrations/0005_delete_token.py
|
Python
| 0.000002 |
@@ -0,0 +1,297 @@
+# Generated by Django 3.2.7 on 2021-09-15 20:00%0A%0Afrom django.db import migrations%0A%0A%0Aclass Migration(migrations.Migration):%0A%0A dependencies = %5B%0A ('media_auth', '0004_auto_20160209_1902'),%0A %5D%0A%0A operations = %5B%0A migrations.DeleteModel(%0A name='Token',%0A ),%0A %5D%0A
|
|
40b0f1cd33a053be5ab528b4a50bda404f0756dc
|
Add managment command Add_images_to_sections
|
gem/management/commands/Add_images_to_sections.py
|
gem/management/commands/Add_images_to_sections.py
|
Python
| 0.000003 |
@@ -0,0 +1,3341 @@
+from __future__ import absolute_import, unicode_literals%0A%0Aimport csv%0Afrom babel import Locale%0Afrom django.core.management.base import BaseCommand%0Afrom wagtail.wagtailimages.tests.utils import Image%0Afrom molo.core.models import Languages, SectionPage, Main, SectionIndexPage%0A%0A%0Aclass Command(BaseCommand):%0A def add_arguments(self, parser):%0A parser.add_argument('csv_name', type=str)%0A parser.add_argument('locale', type=str)%0A%0A def handle(self, *args, **options):%0A csv_name = options.get('csv_name', None)%0A locale_code = options.get('locale', None)%0A mains = Main.objects.all()%0A sections = %7B%7D%0A with open(csv_name) as sections_images:%0A reader = csv.reader(sections_images)%0A if mains:%0A for row in reader:%0A key = row%5B0%5D%0A sections%5Bkey%5D = row%5B1:%5D%0A%0A for main in mains:%0A section_index = SectionIndexPage.objects.child_of(main).first()%0A main_lang = Languages.for_site(main.get_site()).languages.filter(%0A is_active=True, is_main_language=True).first()%0A translated_sections = SectionPage.objects.descendant_of(%0A section_index).filter(%0A languages__language__is_main_language=False).live()%0A for translated_section in translated_sections:%0A translated_section.image = None%0A translated_section.save_revision().publish()%0A%0A if section_index and main_lang:%0A if main_lang.locale == locale_code:%0A for section_slug in sections:%0A section = SectionPage.objects.descendant_of(%0A section_index).filter(slug=section_slug).first()%0A if section:%0A for image_title in sections.get(section_slug):%0A image = Image.objects.filter(%0A title=image_title + %22.jpg%22).first()%0A if image:%0A section.image = image%0A section.extra_style_hints = section.slug%0A section.save_revision().publish()%0A%0A else:%0A self.stdout.write(self.style.NOTICE(%0A 'Image %22%25s%22 does not exist in %22%25s%22'%0A %25 (image_title, main)))%0A else:%0A self.stdout.write(self.style.ERROR(%0A 'section %22%25s%22 does not exist in %22%25s%22'%0A %25 (section_slug, main.get_site())))%0A else:%0A self.stdout.write(self.style.NOTICE(%0A 'Main language of %22%25s%22 is not %22%25s%22.'%0A ' The main language is %22%25s%22'%0A %25 (main.get_site(), locale_code, main_lang)))%0A else:%0A if not section_index:%0A self.stdout.write(self.style.NOTICE(%0A 'Section Index Page does not exist in %22%25s%22' %25 main))%0A if not main_lang:%0A self.stdout.write(self.style.NOTICE(%0A 'Main language does not exist in %22%25s%22' %25 main))%0A
|
|
69b900580e614ce494b9d1be0bee61464470cef7
|
Create 6kyu_digital_root.py
|
Solutions/6kyu_digital_root.py
|
Solutions/6kyu_digital_root.py
|
Python
| 0.000013 |
@@ -0,0 +1,142 @@
+from functools import reduce%0A%0Adef digital_root(n):%0A while n%3E10:%0A n = reduce(lambda x,y: x+y, %5Bint(d) for d in str(n)%5D)%0A return n%0A
|
|
6c08f3d3441cf660de910b0f3c49c3385f4469f4
|
Add "Secret" channel/emoji example
|
examples/secret.py
|
examples/secret.py
|
Python
| 0.000003 |
@@ -0,0 +1,3240 @@
+import typing%0A%0Aimport discord%0Afrom discord.ext import commands%0A%0Abot = commands.Bot(command_prefix=commands.when_mentioned, description=%22Nothing to see here!%22)%0A%0A# the %60hidden%60 keyword argument hides it from the help command. %[email protected](hidden=True)%0Aasync def secret(ctx: commands.Context):%0A %22%22%22What is this %22secret%22 you speak of?%22%22%22%0A if ctx.invoked_subcommand is None:%0A await ctx.send('Shh!', delete_after=5)%0A%0Adef create_overwrites(ctx, *objects):%0A %22%22%22This is just a helper function that creates the overwrites for the %0A voice/text channels.%0A%0A A %60discord.PermissionOverwrite%60 allows you to determine the permissions%0A of an object, whether it be a %60discord.Role%60 or a %60discord.Member%60.%0A%0A In this case, the %60view_channel%60 permission is being used to hide the channel%0A from being viewed by whoever does not meet the criteria, thus creating a%0A secret channel.%0A %22%22%22%0A%0A # a dict comprehension is being utilised here to set the same permission overwrites%0A # for each %60discord.Role%60 or %60discord.Member%60.%0A overwrites = %7B%0A obj: discord.PermissionOverwrite(view_channel=True)%0A for obj in objects%0A %7D%0A%0A # prevents the default role (@everyone) from viewing the channel%0A # if it isn't already allowed to view the channel.%0A overwrites.setdefault(ctx.guild.default_role, discord.PermissionOverwrite(view_channel=False))%0A%0A # makes sure the client is always allowed to view the channel.%0A overwrites%5Bctx.guild.me%5D = discord.PermissionOverwrite(view_channel=True)%0A%0A return overwrites%0A%0A# since these commands rely on guild related features,%0A# it is best to lock it to be guild-only.%[email protected]()%[email protected]_only()%0Aasync def text(ctx: commands.Context, name: str, *objects: typing.Union%5Bdiscord.Role, discord.Member%5D):%0A %22%22%22This makes a text channel with a specified name %0A that is only visible to roles or members that are specified.%0A %22%22%22%0A %0A overwrites = create_overwrites(ctx, *objects)%0A%0A await ctx.guild.create_text_channel(%0A name,%0A overwrites=overwrites,%0A topic='Top secret text channel. Any leakage of this channel may result in serious trouble.',%0A reason='Very secret business.',%0A )%0A%[email protected]()%[email protected]_only()%0Aasync def voice(ctx: commands.Context, name: str, *objects: typing.Union%5Bdiscord.Role, discord.Member%5D):%0A %22%22%22This does the same thing as the %60text%60 subcommand%0A but instead creates a voice channel.%0A %22%22%22%0A%0A overwrites = create_overwrites(ctx, *objects)%0A%0A await ctx.guild.create_voice_channel(%0A name,%0A overwrites=overwrites,%0A reason='Very secret business.'%0A )%0A%[email protected]()%[email protected]_only()%0Aasync def emoji(ctx: commands.Context, emoji: discord.PartialEmoji, *roles: discord.Role):%0A %22%22%22This clones a specified emoji that only specified roles%0A are allowed to use.%0A %22%22%22%0A%0A # fetch the emoji asset and read it as bytes.%0A emoji_bytes = await emoji.url.read()%0A%0A # the key parameter here is %60roles%60, which controls%0A # what roles are able to use the emoji.%0A await ctx.guild.create_custom_emoji(%0A name=emoji.name,%0A image=emoji_bytes,%0A roles=roles,%0A reason='Very secret business.'%0A )%0A%0A%0Abot.run('token')%0A
|
|
e0338d39f611b2ca3f202151c49fc6a4b35bd580
|
Add WallBuilder
|
exercise/WallBuilder.py
|
exercise/WallBuilder.py
|
Python
| 0.000001 |
@@ -0,0 +1,881 @@
+#!/usr/bin/env python3%0D%0A%0D%0A%0D%0Aclass Block(object):%0D%0A def __init__(self, width, height, **attr):%0D%0A self.__width = width%0D%0A self.__height = height%0D%0A%0D%0A def __eq__(self, another):%0D%0A return (self.__width == another.__width) and %5C%0D%0A (self.__height == another.__height)%0D%0A%0D%0A%0D%0Aclass Brick(Block):%0D%0A pass%0D%0A%0D%0A%0D%0Aclass Layer(Block):%0D%0A def build(self, brick, **more):%0D%0A%0D%0A pass%0D%0A%0D%0A%0D%0Aclass Wall(Block):%0D%0A pass%0D%0A%0D%0A%0D%0A%0D%0A%0D%0Aclass WallBuilder(object):%0D%0A def __init__(self, brick, *more):%0D%0A self.__bricks = %5Bbrick%5D%0D%0A for i in more:%0D%0A if i not in self.__bricks:%0D%0A self.__bricks.append(i)%0D%0A%0D%0A%0D%0Adef get(x, y, z):%0D%0A m1 = (z//x)%0D%0A m2 = (z//y)%0D%0A return %5B(i, j) for i in range(0, m1+1) for j in range(0, m2+1) if (x*i + y*j) == z%5D%0D%0A%0D%0Ab1 = Brick(2, 1)%0D%0Ab2 = Brick(3, 1)%0D%0A%0D%0Ac = WallBuilder(b1, b2)%0D%0A%0D%0Apass%0D%0A
|
|
8a30bcc511647ed0c994cb2103dd5bed8d4671a8
|
Create B_Temperature.py
|
Cas_4/Temperature/B_Temperature.py
|
Cas_4/Temperature/B_Temperature.py
|
Python
| 0.998596 |
@@ -0,0 +1,1156 @@
+import numpy as np %0Aimport matplotlib.pyplot as plt%0Aimport matplotlib.colors as colors%0Aimport cartopy.crs as ccrs%0A%0Afrom xmitgcm import open_mdsdataset %0Afrom cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER%0Aplt.ion()%0A%0Adir0 = '/homedata/bderembl/runmit/test_southatlgyre3'%0A%0Ads0 = open_mdsdataset(dir0,prefix=%5B'T'%5D)%0A%0Ant = 0%0Anz = 0%0Awhile (nt %3C 150) :%0A nt = nt+1%0A print(nt)%0A plt.figure(1)%0A ax = plt.subplot(projection=ccrs.PlateCarree());%0A ds0%5B'T'%5D%5Bnt,nz,:,:%5D.plot.pcolormesh('XC', 'YC',ax=ax,vmin=-10,vmax=35,cmap='ocean')%0A plt.title('Case 4 : Temperature ')%0A plt.text(5,5,nt,ha='center',wrap=True)%0A ax.coastlines()%0A gl = ax.gridlines(draw_labels=True, alpha = 0.5, linestyle='--');%0A gl.xlabels_top = False%0A gl.ylabels_right = False%0A gl.xformatter = LONGITUDE_FORMATTER%0A gl.yformatter = LATITUDE_FORMATTER%0A if (nt %3C 10):%0A plt.savefig('Temperature_cas4-'+'00'+str(nt)+'.png')%0A plt.clf()%0A elif (nt %3E 9) and (nt %3C 100):%0A plt.savefig('Temperature_cas4-'+'0'+str(nt)+'.png')%0A plt.clf()%0A else:%0A plt.savefig('Temperature_cas4-'+str(nt)+'.png')%0A plt.clf()%0A
|
|
a70490e52bde05d2afc6ea59416a50e11119d060
|
Add migration for Comment schema upgrade. ...
|
raggregate/rg_migrations/versions/002_Add_metadata_to_Comment_to_allow_it_to_masquerade_as_epistle.py
|
raggregate/rg_migrations/versions/002_Add_metadata_to_Comment_to_allow_it_to_masquerade_as_epistle.py
|
Python
| 0 |
@@ -0,0 +1,636 @@
+from sqlalchemy import *%0Afrom migrate import *%0Afrom raggregate.guid_recipe import GUID%0A%0Adef upgrade(migrate_engine):%0A meta = MetaData(bind=migrate_engine)%0A comments = Table('comments', meta, autoload=True)%0A unreadc = Column('unread', Boolean, default=True)%0A in_reply_toc = Column('in_reply_to', GUID, nullable=True)%0A unreadc.create(comments)%0A in_reply_toc.create(comments)%0A%0Adef downgrade(migrate_engine):%0A # Operations to reverse the above upgrade go here.%0A meta = MetaData(bind=migrate_engine)%0A comments = Table('comments', meta, autoload=True)%0A comments.c.unread.drop()%0A comments.c.in_reply_to.drop()%0A
|
|
88d480f2c97bf7779afea34798c6c082f127f3a6
|
Add missing client.py (#703)
|
client/client.py
|
client/client.py
|
Python
| 0 |
@@ -0,0 +1,325 @@
+import webapp2%0Aimport re%0A%0Aclass RedirectResource(webapp2.RequestHandler):%0A def get(self, path):%0A path = re.sub(r'/$', '', path)%0A self.redirect('/community/%25s' %25 path, permanent=True)%0A%0A# pylint: disable=invalid-name%0Aapp = webapp2.WSGIApplication(%5B%0A webapp2.Route(r'/%3C:.*%3E', handler=RedirectResource),%0A%5D, debug=True)%0A
|
|
38cf2a9f0c964c69df084d80ded6cf161ba7eb16
|
Add elf read elf file.
|
elf.py
|
elf.py
|
Python
| 0 |
@@ -0,0 +1,814 @@
+import sys%0Afrom elftools.elf.elffile import ELFFile%0Afrom elftools.common.exceptions import ELFError%0Afrom elftools.elf.segments import NoteSegment%0A%0Aclass ReadELF(object):%0A%0A def __init__(self, file):%0A self.elffile = ELFFile(file)%0A%0A def get_build(self):%0A for segment in self.elffile.iter_segments():%0A if isinstance(segment, NoteSegment):%0A for note in segment.iter_notes():%0A print note%0A%0Adef main():%0A if(len(sys.argv) %3C 2):%0A print %22Missing argument%22%0A sys.exit(1)%0A%0A with open(sys.argv%5B1%5D, 'rb') as file:%0A try:%0A readelf = ReadELF(file)%0A readelf.get_build()%0A%0A except ELFError as err:%0A sys.stderr.write('ELF error: %25s%5Cn' %25 err)%0A sys.exit(1)%0A%0Aif __name__ == '__main__':%0A main()%0A
|
|
d2f13fb17d3f9998af1a175dfd4e2bea4544fb3d
|
add example to just serialize matrix
|
examples/undocumented/python_modular/serialization_matrix_modular.py
|
examples/undocumented/python_modular/serialization_matrix_modular.py
|
Python
| 0 |
@@ -0,0 +1,535 @@
+from modshogun import *%0Afrom numpy import array%0A%0Aparameter_list=%5B%5B%5B%5B1.0,2,3%5D,%5B4,5,6%5D%5D%5D%5D%0A%0Adef serialization_matrix_modular(m):%0A%09feats=RealFeatures(array(m))%0A%09#feats.io.set_loglevel(0)%0A%09fstream = SerializableAsciiFile(%22foo.asc%22, %22w%22)%0A%09feats.save_serializable(fstream)%0A%0A%09l=Labels(array(%5B1.0,2,3%5D))%0A%09fstream = SerializableAsciiFile(%22foo2.asc%22, %22w%22)%0A%09l.save_serializable(fstream)%0A%0A%09os.unlink(%22foo.asc%22)%0A%09os.unlink(%22foo2.asc%22)%0A%0Aif __name__=='__main__':%0A%09print 'Serialization Matrix Modular'%0A%09serialization_matrix_modular(*parameter_list%5B0%5D)%0A
|
|
e7f1439cae37facaedce9c33244b58584e219869
|
Initialize P01_sendingEmail
|
books/AutomateTheBoringStuffWithPython/Chapter16/P01_sendingEmail.py
|
books/AutomateTheBoringStuffWithPython/Chapter16/P01_sendingEmail.py
|
Python
| 0.000286 |
@@ -0,0 +1,309 @@
+# This program uses the smtplib module to send emails%0A%0A# Connecting to an SMTP Server%0Aimport smtplib%0A%0Awith open('smtp_info') as config:%0A # smtp_cfg = %5Bemail, password, smtp server, port%5D%0A smtp_cfg = config.read().splitlines()%0A%0Asmtp_obj = smtplib.SMTP_SSL(smtp_cfg%5B2%5D, smtp_cfg%5B3%5D)%0Aprint(type(smtp_obj))%0A
|
|
e86047546693290556494bf00b493aa4ae770482
|
add binding.gyp for node-gyp
|
binding.gyp
|
binding.gyp
|
Python
| 0.000001 |
@@ -0,0 +1,638 @@
+%7B%0A %22targets%22: %5B%0A %7B%0A %22target_name%22: %22rawhash%22,%0A %22sources%22: %5B%0A %22src/rawhash.cpp%22,%0A %22src/MurmurHash3.h%22,%0A %22src/MurmurHash3.cpp%22%0A %5D,%0A 'cflags': %5B '%3C!@(pkg-config --cflags libsparsehash)' %5D,%0A 'conditions': %5B%0A %5B 'OS==%22linux%22 or OS==%22freebsd%22 or OS==%22openbsd%22 or OS==%22solaris%22', %7B%0A 'cflags_cc!': %5B'-fno-rtti', '-fno-exceptions'%5D,%0A 'cflags_cc+': %5B'-frtti', '-fexceptions'%5D,%0A %7D%5D,%0A %5B'OS==%22mac%22', %7B%0A 'xcode_settings': %7B%0A 'GCC_ENABLE_CPP_RTTI': 'YES',%0A 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES'%0A %7D%0A %7D%5D%0A %5D%0A %7D%0A %5D%0A%7D
|
|
888b27db4d91ebba91eb935532f961943453b7c8
|
add update command to new certificate data model
|
paralapraca/management/commands/update_certificate_templates.py
|
paralapraca/management/commands/update_certificate_templates.py
|
Python
| 0.000001 |
@@ -0,0 +1,2094 @@
+# -*- coding: utf-8 -*-%0Afrom django.core.management.base import BaseCommand%0Afrom django.core.files import File%0A%0Afrom core.models import CertificateTemplate%0Afrom timtec.settings import STATIC_ROOT%0Afrom paralapraca.models import CertificateData, Contract%0A%0Aimport os%0A%0Aclass Command(BaseCommand):%0A help = 'Create certificate and receipt templates data for CertificateTemplate existent'%0A%0A receipt_text = '%3Cp%3Einscrita no cadastro de pessoa física sob o número %7BCPF%7D %3C/p%3E%5C%0A%3Cp%3Eparticipou do %3Cem%3E%7BMODULO%7D%3C/em%3E%3C/p%3E%5C%0A%3Cp%3Eno Ambiente Virtual de Aprendizagem do Programa Paralapracá.%3C/p%3E'%0A%0A certificate_text = '%3Cp style=%22text-align: center;%22%3Einscrita no cadastro de pessoa física sob o número %7BCPF%7D%3C/p%3E%5C%0A%3Cp style=%22text-align: center;%22%3Econcluiu o %3Cstrong%3E%7BMODULO%7D%3C/strong%3E,%3C/p%3E%5C%0A%3Cp style=%22text-align: center;%22%3Ecom carga horária total de 40 horas, no %3C/p%3E%5C%0A%3Cp style=%22text-align: center;%22%3EAmbiente Virtual de Aprendizagem do Programa Paralapracá.%3C/p%3E'%0A%0A%0A def handle(self, *files, **options):%0A types = CertificateData.TYPES%0A%0A cts = CertificateTemplate.objects.all()%0A plpc = Contract.objects.first()%0A%0A plpc_path = os.path.join(STATIC_ROOT, 'img/site-logo-orange.svg')%0A avante_path = os.path.join(STATIC_ROOT, 'img/logo-avante.png')%0A plpc_logo = File(open(plpc_path, 'r'))%0A avante_logo = File(open(avante_path, 'r'))%0A%0A for ct in cts:%0A ct.base_logo = avante_logo%0A ct.save()%0A cdr = CertificateData(contract=plpc, type=types%5B0%5D%5B0%5D,%0A certificate_template=ct,%0A site_logo=plpc_logo,%0A text=self.receipt_text)%0A cdr.save()%0A%0A ct.pk = None%0A ct.save()%0A%0A cdc = CertificateData(contract=plpc, type=types%5B1%5D%5B0%5D,%0A certificate_template=ct,%0A site_logo=plpc_logo,%0A text=self.certificate_text)%0A cdc.save()%0A
|
|
a0d3ae80a2f4f9ae76aaa4d672be460ce3a657d4
|
add command to populate change messages
|
corehq/apps/users/management/commands/add_location_change_message.py
|
corehq/apps/users/management/commands/add_location_change_message.py
|
Python
| 0.000001 |
@@ -0,0 +1,2016 @@
+from django.core.management.base import BaseCommand%0Afrom django.db.models import Q%0A%0Afrom corehq.apps.users.audit.change_messages import UserChangeMessage%0Afrom corehq.apps.users.models import UserHistory%0A%0A%0Aclass Command(BaseCommand):%0A help = %22Add locations removed change messages on commcare user's User History records %22 %5C%0A %22for https://github.com/dimagi/commcare-hq/pull/30253/commits/76996b5a129be4e95f5c5bedd0aba74c50088d15%22%0A%0A def add_arguments(self, parser):%0A parser.add_argument(%0A '--save',%0A action='store_true',%0A dest='save',%0A default=False,%0A help=%22actually update records else just log%22,%0A )%0A%0A def handle(self, *args, **options):%0A save = options%5B'save'%5D%0A # since we need locations removed, filter for update logs%0A records = UserHistory.objects.filter(%0A Q(changes__has_key='location_id') %7C Q(changes__has_key='assigned_location_ids'),%0A user_type='CommCareUser',%0A action=UserHistory.UPDATE,%0A )%0A with open(%22add_location_change_message.csv%22, %22w%22) as _file:%0A for record in records:%0A updated = False%0A if 'location_id' in record.changes and record.changes%5B'location_id'%5D is None:%0A if 'location' not in record.change_messages:%0A record.change_messages.update(UserChangeMessage.primary_location_removed())%0A updated = True%0A if record.changes.get('assigned_location_ids') == %5B%5D:%0A if 'assigned_locations' not in record.change_messages:%0A record.change_messages.update(UserChangeMessage.assigned_locations_info(%5B%5D))%0A updated = True%0A if updated:%0A _file.write(%0A f%22%7Brecord.pk%7D,%7Brecord.user_id%7D,%7Brecord.changes%7D,%7Brecord.change_messages%7D%5Cn%22%0A )%0A if save:%0A record.save()%0A
|
|
2a55c12d5f4e0bddd5fcb3ba7cc5ddf9bbd17764
|
Update forward compatibility horizon to 2021-01-27
|
tensorflow/python/compat/compat.py
|
tensorflow/python/compat/compat.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2021, 1, 26)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
"""Update the base date to compare in forward_compatible function."""
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
if date < _FORWARD_COMPATIBILITY_HORIZON:
logging.warning("Trying to set the forward compatibility date to the past"
" date %s. This will be ignored by TensorFlow." % (date))
return
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibility, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Yields:
Nothing.
"""
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
|
Python
| 0 |
@@ -1449,9 +1449,9 @@
1, 2
-6
+7
)%0A_F
|
a35e7746844af3443f3fa6c19bd32e3f3c64da82
|
Update compat.py
|
tensorflow/python/compat/compat.py
|
tensorflow/python/compat/compat.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
import datetime
import os
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2022, 7, 8)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
"""Update the base date to compare in forward_compatible function."""
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
if date < _FORWARD_COMPATIBILITY_HORIZON:
logging.warning("Trying to set the forward compatibility date to the past"
" date %s. This will be ignored by TensorFlow." % (date))
return
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibility, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Yields:
Nothing.
"""
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
|
Python
| 0 |
@@ -4855,32 +4855,36 @@
bility%5D(https://
+www.
tensorflow.org/g
@@ -4899,32 +4899,9 @@
sion
-_compat#backward_forward
+s
).%0A%0A
|
545fa4ec537ba9615fb2a14fd1de24f4348f0d6c
|
Update forward compatibility horizon to 2020-04-21
|
tensorflow/python/compat/compat.py
|
tensorflow/python/compat/compat.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2020, 4, 20)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
"""Update the base date to compare in forward_compatible function."""
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibility, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Yields:
Nothing.
"""
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
|
Python
| 0 |
@@ -1383,17 +1383,17 @@
20, 4, 2
-0
+1
)%0A_FORWA
|
22b5de61e0ea458ae8a786886a5bdff4545b2f76
|
Update forward compatibility horizon to 2018-11-19
|
tensorflow/python/compat/compat.py
|
tensorflow/python/compat/compat.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2018, 11, 18)
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibiltiy, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018).
month: A month (1 <= month <= 12) in year.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_HORIZON > datetime.date(year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args :
year: A year (e.g. 2018).
month: A month (1 <= month <= 12) in year.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month.
Yields:
Nothing.
"""
global _FORWARD_COMPATIBILITY_HORIZON
try:
old_compat_date = _FORWARD_COMPATIBILITY_HORIZON
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(year, month, day)
yield
finally:
_FORWARD_COMPATIBILITY_HORIZON = old_compat_date
|
Python
| 0 |
@@ -1139,17 +1139,17 @@
8, 11, 1
-8
+9
)%0A%0A%0A@tf_
|
942e71215ffb5baf5f23d6dd532f99832b161d0f
|
Update forward compatibility horizon to 2020-11-02
|
tensorflow/python/compat/compat.py
|
tensorflow/python/compat/compat.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2020, 11, 1)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
"""Update the base date to compare in forward_compatible function."""
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
if date < _FORWARD_COMPATIBILITY_HORIZON:
logging.warning("Trying to set the forward compatibility date to the past"
" date %s. This will be ignored by TensorFlow." % (date))
return
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibility, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Yields:
Nothing.
"""
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
|
Python
| 0 |
@@ -1445,17 +1445,17 @@
20, 11,
-1
+2
)%0A_FORWA
|
da9706d518adf45b5d2dff480d80e78be12575ca
|
Update forward compatibility horizon to 2020-08-25
|
tensorflow/python/compat/compat.py
|
tensorflow/python/compat/compat.py
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for API compatibility between TensorFlow release versions.
See [Version
Compatibility](https://tensorflow.org/guide/version_compat#backward_forward)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import datetime
import os
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util.tf_export import tf_export
# This value changes every day with an automatic CL. It can be modified in code
# via `forward_compatibility_horizon()` or with the environment variable
# TF_FORWARD_COMPATIBILITY_DELTA_DAYS, which is added to the compatibility date.
_FORWARD_COMPATIBILITY_HORIZON = datetime.date(2020, 8, 24)
_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME = "TF_FORWARD_COMPATIBILITY_DELTA_DAYS"
_FORWARD_COMPATIBILITY_DATE_NUMBER = None
def _date_to_date_number(year, month, day):
return (year << 9) | (month << 5) | day
def _update_forward_compatibility_date_number(date_to_override=None):
"""Update the base date to compare in forward_compatible function."""
global _FORWARD_COMPATIBILITY_DATE_NUMBER
if date_to_override:
date = date_to_override
else:
date = _FORWARD_COMPATIBILITY_HORIZON
delta_days = os.getenv(_FORWARD_COMPATIBILITY_DELTA_DAYS_VAR_NAME)
if delta_days:
date += datetime.timedelta(days=int(delta_days))
if date < _FORWARD_COMPATIBILITY_HORIZON:
logging.warning("Trying to set the forward compatibility date to the past"
" date %s. This will be ignored by TensorFlow." % (date))
return
_FORWARD_COMPATIBILITY_DATE_NUMBER = _date_to_date_number(
date.year, date.month, date.day)
_update_forward_compatibility_date_number()
@tf_export("compat.forward_compatible")
def forward_compatible(year, month, day):
"""Return true if the forward compatibility window has expired.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
Forward-compatibility refers to scenarios where the producer of a TensorFlow
model (a GraphDef or SavedModel) is compiled against a version of the
TensorFlow library newer than what the consumer was compiled against. The
"producer" is typically a Python program that constructs and trains a model
while the "consumer" is typically another program that loads and serves the
model.
TensorFlow has been supporting a 3 week forward-compatibility window for
programs compiled from source at HEAD.
For example, consider the case where a new operation `MyNewAwesomeAdd` is
created with the intent of replacing the implementation of an existing Python
wrapper - `tf.add`. The Python wrapper implementation should change from
something like:
```python
def add(inputs, name=None):
return gen_math_ops.add(inputs, name)
```
to:
```python
from tensorflow.python.compat import compat
def add(inputs, name=None):
if compat.forward_compatible(year, month, day):
# Can use the awesome new implementation.
return gen_math_ops.my_new_awesome_add(inputs, name)
# To maintain forward compatibility, use the old implementation.
return gen_math_ops.add(inputs, name)
```
Where `year`, `month`, and `day` specify the date beyond which binaries
that consume a model are expected to have been updated to include the
new operations. This date is typically at least 3 weeks beyond the date
the code that adds the new operation is committed.
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Returns:
True if the caller can expect that serialized TensorFlow graphs produced
can be consumed by programs that are compiled with the TensorFlow library
source code after (year, month, day).
"""
return _FORWARD_COMPATIBILITY_DATE_NUMBER > _date_to_date_number(
year, month, day)
@tf_export("compat.forward_compatibility_horizon")
@tf_contextlib.contextmanager
def forward_compatibility_horizon(year, month, day):
"""Context manager for testing forward compatibility of generated graphs.
See [Version
compatibility](https://tensorflow.org/guide/version_compat#backward_forward).
To ensure forward compatibility of generated graphs (see `forward_compatible`)
with older binaries, new features can be gated with:
```python
if compat.forward_compatible(year=2018, month=08, date=01):
generate_graph_with_new_features()
else:
generate_graph_so_older_binaries_can_consume_it()
```
However, when adding new features, one may want to unittest it before
the forward compatibility window expires. This context manager enables
such tests. For example:
```python
from tensorflow.python.compat import compat
def testMyNewFeature(self):
with compat.forward_compatibility_horizon(2018, 08, 02):
# Test that generate_graph_with_new_features() has an effect
```
Args:
year: A year (e.g., 2018). Must be an `int`.
month: A month (1 <= month <= 12) in year. Must be an `int`.
day: A day (1 <= day <= 31, or 30, or 29, or 28) in month. Must be an
`int`.
Yields:
Nothing.
"""
try:
_update_forward_compatibility_date_number(datetime.date(year, month, day))
yield
finally:
_update_forward_compatibility_date_number()
|
Python
| 0 |
@@ -1449,9 +1449,9 @@
8, 2
-4
+5
)%0A_F
|
fb133e260722fd02cb6f14ede15dbdb1fdf91af7
|
Add gtk dependencies tests
|
test/test_dependencies.py
|
test/test_dependencies.py
|
Python
| 0 |
@@ -0,0 +1,1725 @@
+%22%22%22%0ACopyright (c) 2017, Michael Sonntag ([email protected])%0A%0AAll rights reserved.%0A%0ARedistribution and use in source and binary forms, with or without%0Amodification, are permitted under the terms of the BSD License. See%0ALICENSE file in the root of the project.%0A%22%22%22%0A%0Aimport unittest%0A%0A%0Aclass DependencyTest(unittest.TestCase):%0A %22%22%22%0A This class checks for non python gtk3 dependencies.%0A%0A This Class will be removed, it is testing how travis and conda%0A can play nice with gtk3.%0A %22%22%22%0A def test_gi_dependency(self):%0A has_error = False%0A try:%0A import gi%0A except (ImportError, ValueError) as _:%0A has_error = True%0A%0A self.assertFalse(has_error)%0A%0A def test_pygtkcompat(self):%0A has_error = False%0A try:%0A import gi%0A import pygtkcompat%0A pygtkcompat.enable()%0A pygtkcompat.enable_gtk(version='3.0')%0A except (ImportError, ValueError) as _:%0A has_error = True%0A%0A self.assertFalse(has_error)%0A%0A def test_gtk(self):%0A has_error = False%0A try:%0A import gi%0A import pygtkcompat%0A pygtkcompat.enable()%0A pygtkcompat.enable_gtk(version='3.0')%0A import gtk%0A except (ImportError, ValueError) as _:%0A has_error = True%0A%0A self.assertFalse(has_error)%0A%0A def test_gobject(self):%0A has_error = False%0A try:%0A import gi%0A import pygtkcompat%0A pygtkcompat.enable()%0A pygtkcompat.enable_gtk(version='3.0')%0A import gtk%0A import gobject%0A except (ImportError, ValueError) as _:%0A has_error = True%0A%0A self.assertFalse(has_error)%0A
|
|
c09356487360ec373c98ec50800a450a3966a60f
|
define prompt function
|
lib.py
|
lib.py
|
Python
| 0.000196 |
@@ -0,0 +1,221 @@
+# weather-app%0A# lib.py%0A# Classes and functions for weather-app.%0A%0A%0A# Function definitions.%0A# ---------------------%0A%0A# Prompt for user input. Accepts a prompt message we want to show.%0Adef prompt(msg):%0A return input(msg)%0A
|
|
e51185a5538ab20250d94c4fe5e71bcfcfed0e1e
|
trying to get the hash 1dbd981fe6985776b644b173a4d0385ddc1aa2a829688d1e0000000000000000
|
btcrelay.py
|
btcrelay.py
|
# Stored variables:
#
# Last known block
# 10: version
# 11: hashPrevBlock
# 12: hashMerkleRoot
# 13: time
# 14: bits
# 15: nonce
# 16: blockHash / lastKnownBlock
# 17: score
#
def shared():
TWO_POW_24 = 2 ^ 24
def init():
self.storage[16] = 0x00000000000000000cfdd50d917943949fa708829ab70108c98cdb9f7d62339d
def code():
return(-1)
def storeBlockHeader(version, hashPrevBlock, hashMerkleRoot, time, bits, nonce):
exp = bits / TWO_POW_24
mant = bits & 0xffffff
target = mant * slt(1, (8*(exp - 3)))
def flipBytes(n):
numByte = 32
mask = 0xff
result = 0
i = 0
while i < numByte:
b = n & mask
b /= 2^(i*8)
b *= 2^((numByte-i-1)*8)
mask *= 256
result = result | b
i += 1
return(result)
# shift left
def slt(n, x):
return(n * 2^x)
|
Python
| 0.999988 |
@@ -321,24 +321,48 @@
def code():%0A
+ ret = self.slt(2,4)%0A
return(-
@@ -360,18 +360,19 @@
return(
--1
+ret
)%0A%0Adef s
@@ -545,16 +545,17 @@
- 3)))%0A%0A
+%0A
def flip
@@ -833,16 +833,16 @@
(n, x):%0A
-
retu
@@ -853,8 +853,301 @@
* 2%5Ex)%0A
+%0A%0Adef test():%0A b1 = 0x0100000081cd02ab7e569e8bcd9317e2%0A b2 = 0xfe99f2de44d49ab2b8851ba4a308000000000000e320b6c2fffc8d750423db8b%0A b3 = 0x1eb942ae710e951ed797f7affc8892b0f1fc122bc7f5d74df2b9441a42a14695%0A hash1 = sha256(%5Bb1,b2,b3%5D, 3)%0A hash2 = sha256(%5Bhash1%5D, 1)%0A return(hash2)%0A
|
e6bd5bbb3a46413b1ad164e0ef6ab66e89d9c95f
|
Add buildbot.py
|
buildbot.py
|
buildbot.py
|
Python
| 0.000001 |
@@ -0,0 +1,240 @@
+%EF%BB%BF#!/usr/bin/env python%0A# encoding: utf-8%0A%0Aproject_name = 'stub'%0A%0Adef configure(options):%0A pass%0A%0Adef build(options):%0A pass%0A%0Adef run_tests(options):%0A pass%0A%0Adef coverage_settings(options):%0A options%5B'required_line_coverage'%5D = 80.0%0A
|
|
399cd799ae993412a6ad2455b8e11f4019aa9509
|
Add models admin
|
td_biblio/admin.py
|
td_biblio/admin.py
|
Python
| 0 |
@@ -0,0 +1,946 @@
+# -*- coding: utf-8 -*-%0Afrom django.contrib import admin%0A%0Afrom .models import Author, Editor, Journal, Publisher, Entry, Collection%0A%0A%0Aclass AbstractHumanAdmin(admin.ModelAdmin):%0A list_display = ('last_name', 'first_name')%0A%0A%0Aclass AuthorAdmin(AbstractHumanAdmin):%0A pass%0A%0A%0Aclass EditorAdmin(AbstractHumanAdmin):%0A pass%0A%0A%0Aclass JournalAdmin(admin.ModelAdmin):%0A pass%0A%0A%0Aclass PublisherAdmin(admin.ModelAdmin):%0A pass%0A%0A%0Aclass EntryAdmin(admin.ModelAdmin):%0A list_display = ('title', 'type', 'publication_date', 'journal')%0A list_filter = ('publication_date', 'journal', 'authors')%0A date_hierarchy = 'publication_date'%0A%0A%0Aclass CollectionAdmin(admin.ModelAdmin):%0A pass%0A%0A%0Aadmin.site.register(Author, AuthorAdmin)%0Aadmin.site.register(Editor, EditorAdmin)%0Aadmin.site.register(Journal, JournalAdmin)%0Aadmin.site.register(Publisher, PublisherAdmin)%0Aadmin.site.register(Entry, EntryAdmin)%0Aadmin.site.register(Collection, CollectionAdmin)%0A
|
|
6d3f6951d846c50fcc1ff011f9129a4e1e3f7de1
|
Add unit tests for BMI version of storm
|
testing/test_storm_bmi.py
|
testing/test_storm_bmi.py
|
Python
| 0 |
@@ -0,0 +1,1589 @@
+#! /usr/bin/env python%0A#%0A# Tests for the BMI version of %60storm%60.%0A#%0A# Call with:%0A# $ nosetests -sv%0A#%0A# Mark Piper ([email protected])%0A%0Afrom nose.tools import *%0Aimport os%0Aimport shutil%0Afrom subprocess import call%0A%0A# Global variables%0Astart_dir = os.getcwd()%0Adata_dir = os.path.join(start_dir, 'testing', 'data')%0Ainput_file1 = os.path.join(data_dir, 'test1.in')%0Ainput_file2 = os.path.join(data_dir, 'test2.in')%0Abuild_dir = os.path.join(start_dir, 'build')%0Aexe = './bmi/storm'%0A%0A# Fixtures -------------------------------------------------------------%0A%0Adef setup_module():%0A '''%0A Called before any tests are performed.%0A '''%0A print('*** BMI tests')%0A os.mkdir(build_dir)%0A os.chdir(build_dir)%0A%0Adef teardown_module():%0A '''%0A Called after all tests have completed.%0A '''%0A os.chdir(start_dir)%0A shutil.rmtree(build_dir)%0A%0A# Tests ----------------------------------------------------------------%0A%0Adef test_configure():%0A '''%0A Test whether CMake executes successfully%0A '''%0A call(%5B'cmake', '..'%5D)%0A%0Adef test_compile():%0A '''%0A Test whether %60storm%60 compiles%0A '''%0A call(%5B'make'%5D)%0A%0Adef test_without_input_file():%0A '''%0A Check that storm runs without an input file%0A '''%0A r = call(%5Bexe%5D)%0A assert_equal(r, 0)%0A%0Adef test_with_singlestep_input_file():%0A '''%0A Check that storm runs with a one-step input file%0A '''%0A r = call(%5Bexe, input_file1%5D)%0A assert_equal(r, 0)%0A%0Adef test_with_multistep_input_file():%0A '''%0A Check that storm runs with a multi-step input file%0A '''%0A r = call(%5Bexe, input_file2%5D)%0A assert_equal(r, 0)%0A
|
|
9ef0e5e6dc50af7d5ccc27cc4d41abce72b51456
|
Create runcount.py
|
bin/runcount.py
|
bin/runcount.py
|
Python
| 0.000002 |
@@ -0,0 +1,18 @@
+#!/usr/bin/python%0A
|
|
c3a281c9271163ed1b68da52c6c43af50c18d4e8
|
fix mempool_persist.py dump issue, missing sync with validation interface.
|
test/functional/mempool_persist.py
|
test/functional/mempool_persist.py
|
#!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or https://www.opensource.org/licenses/mit-license.php.
"""Test mempool persistence.
By default, bitcoind will dump mempool on shutdown and
then reload it on startup. This can be overridden with
the -persistmempool=false command line option.
Test is as follows:
- start node0, node1 and node2. node1 has -persistmempool=false
- create 5 transactions on node2 to its own address. Note that these
are not sent to node0 or node1 addresses because we don't want
them to be saved in the wallet.
- check that node0 and node1 have 5 transactions in their mempools
- shutdown all nodes.
- startup node0. Verify that it still has 5 transactions
in its mempool. Shutdown node0. This tests that by default the
mempool is persistent.
- startup node1. Verify that its mempool is empty. Shutdown node1.
This tests that with -persistmempool=false, the mempool is not
dumped to disk when the node is shut down.
- Restart node0 with -persistmempool=false. Verify that its mempool is
empty. Shutdown node0. This tests that with -persistmempool=false,
the mempool is not loaded from disk on start up.
- Restart node0 with -persistmempool=true. Verify that it has 5
transactions in its mempool. This tests that -persistmempool=false
does not overwrite a previously valid mempool stored on disk.
"""
from test_framework.test_framework import PivxTestFramework
from test_framework.util import *
class MempoolPersistTest(PivxTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
self.log.debug("Mine a single block to get out of IBD")
self.nodes[0].generate(1)
self.sync_all()
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Stop-start node0 and node1. Verify that node0 has the transactions in its mempool and node1 does not.")
self.stop_nodes()
self.start_node(1) # Give this one a head-start, so we can be "extra-sure" that it didn't load anything later
self.start_node(0)
self.start_node(2)
# Give pivxd a second to reload the mempool
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5, timeout=1)
wait_until(lambda: len(self.nodes[2].getrawmempool()) == 5, timeout=1)
# The others loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
# Verify accounting of mempool transactions after restart is correct
#self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
# Give bitcoind a second to reload the mempool
time.sleep(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
# Following code is ahead of our current repository state. Future back port.
# mempooldat0 = os.path.join(self.options.tmpdir, 'node0', 'regtest', 'mempool.dat')
# mempooldat1 = os.path.join(self.options.tmpdir, 'node1', 'regtest', 'mempool.dat')
# self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
# os.remove(mempooldat0)
# self.nodes[0].savemempool()
# assert os.path.isfile(mempooldat0)
#
# self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
# os.rename(mempooldat0, mempooldat1)
# self.stop_nodes()
# self.start_node(1, extra_args=[])
# wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5)
#
# self.log.debug("Prevent bitcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# # to test the exception we are setting bad permissions on a tmp file called mempool.dat.new
# # which is an implementation detail that could change and break this test
# mempooldotnew1 = mempooldat1 + '.new'
# with os.fdopen(os.open(mempooldotnew1, os.O_CREAT, 0o000), 'w'):
# pass
# assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
# os.remove(mempooldotnew1)
if __name__ == '__main__':
MempoolPersistTest().main()
|
Python
| 0 |
@@ -3252,17 +3252,16 @@
-#
self.nod
|
5c5bf274c72ef67a3a2a2e5d6713df910026dcdb
|
Add hash plugin
|
plugins/hash.py
|
plugins/hash.py
|
Python
| 0.000001 |
@@ -0,0 +1,524 @@
+import hashlib%0Aimport sys%0A%0A%0Aclass Plugin:%0A def on_command(self, bot, msg):%0A if len(sys.argv) %3E= 2:%0A algorithm = sys.argv%5B1%5D%0A contents = %22 %22.join(sys.argv%5B2:%5D)%0A if not contents:%0A contents = sys.stdin.read().strip()%0A%0A h = hashlib.new(algorithm)%0A h.update(bytes(contents, %22utf-8%22))%0A print(h.hexdigest())%0A else:%0A print(self.on_help(bot))%0A%0A def on_help(self, bot):%0A return %22Usage: hash %3Calgorithm%3E %3Ccontents%3E%22%0A
|
|
086371f56748da9fb68acc4aaa10094b6cf24fcb
|
Revert "Remove pgjsonb returner unit tests"
|
tests/unit/returners/test_pgjsonb.py
|
tests/unit/returners/test_pgjsonb.py
|
Python
| 0 |
@@ -0,0 +1,1706 @@
+# -*- coding: utf-8 -*-%0A'''%0Atests.unit.returners.pgjsonb_test%0A~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~%0A%0AUnit tests for the PGJsonb returner (pgjsonb).%0A'''%0A%0A# Import Python libs%0Afrom __future__ import absolute_import, print_function, unicode_literals%0Aimport logging%0A%0A# Import Salt Testing libs%0Afrom tests.support.mixins import LoaderModuleMockMixin%0Afrom tests.support.unit import TestCase, skipIf%0Afrom tests.support.mock import (%0A MagicMock,%0A NO_MOCK,%0A NO_MOCK_REASON,%0A patch%0A)%0A%0A# Import Salt libs%0Aimport salt.returners.pgjsonb as pgjsonb%0A%0Alog = logging.getLogger(__name__)%0A%0A%0A@skipIf(NO_MOCK, NO_MOCK_REASON)%0Aclass PGJsonbCleanOldJobsTestCase(TestCase, LoaderModuleMockMixin):%0A '''%0A Tests for the local_cache.clean_old_jobs function.%0A '''%0A def setup_loader_modules(self):%0A return %7Bpgjsonb: %7B'__opts__': %7B'keep_jobs': 1, 'archive_jobs': 0%7D%7D%7D%0A%0A def test_clean_old_jobs_purge(self):%0A '''%0A Tests that the function returns None when no jid_root is found.%0A '''%0A connect_mock = MagicMock()%0A with patch.object(pgjsonb, '_get_serv', connect_mock):%0A with patch.dict(pgjsonb.__salt__, %7B'config.option': MagicMock()%7D):%0A self.assertEqual(pgjsonb.clean_old_jobs(), None)%0A%0A def test_clean_old_jobs_archive(self):%0A '''%0A Tests that the function returns None when no jid_root is found.%0A '''%0A connect_mock = MagicMock()%0A with patch.object(pgjsonb, '_get_serv', connect_mock):%0A with patch.dict(pgjsonb.__salt__, %7B'config.option': MagicMock()%7D):%0A with patch.dict(pgjsonb.__opts__, %7B'archive_jobs': 1%7D):%0A self.assertEqual(pgjsonb.clean_old_jobs(), None)%0A
|
|
077607b1b7fe705992c9f59f7dc94f2386aef4bb
|
add memcached
|
testutils/servers/memcache_server.py
|
testutils/servers/memcache_server.py
|
Python
| 0.000001 |
@@ -0,0 +1,1342 @@
+#%0A# Copyright (c) SAS Institute Inc.%0A#%0A# Licensed under the Apache License, Version 2.0 (the %22License%22);%0A# you may not use this file except in compliance with the License.%0A# You may obtain a copy of the License at%0A#%0A# http://www.apache.org/licenses/LICENSE-2.0%0A#%0A# Unless required by applicable law or agreed to in writing, software%0A# distributed under the License is distributed on an %22AS IS%22 BASIS,%0A# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.%0A# See the License for the specific language governing permissions and%0A# limitations under the License.%0A#%0A%0Aimport signal%0Afrom testutils import sock_utils%0Afrom testutils import subprocutil%0A%0A%0Aclass MemcacheServer(object):%0A def __init__(self, port=None):%0A self.port = port if port else sock_utils.findPorts(num=1)%5B0%5D%0A self.server = subprocutil.GenericSubprocess(%0A args=%5B'memcached',%0A '-p', str(self.port),%0A %5D,%0A )%0A%0A def start(self):%0A self.server.start()%0A sock_utils.tryConnect('::', self.port, abortFunc=self.server.check)%0A%0A def check(self):%0A return self.server.check()%0A%0A def stop(self):%0A self.server.kill(signum=signal.SIGQUIT, timeout=3)%0A%0A def reset(self):%0A pass%0A%0A def getHostPort(self):%0A return '127.0.0.1:%25d' %25 self.port%0A
|
|
d95f24d43f3925a91176429cca1aaac30a0c55aa
|
Create java module main
|
genes/java/main.py
|
genes/java/main.py
|
Python
| 0.000001 |
@@ -0,0 +1,861 @@
+from genes import apt, debconf%0Aimport platform%0A%0Aclass Config:%0A OS = platform.system()%0A (DIST, _, CODE) = platform.linux_distribution()%0A REPO = DIST.lower() + '-' + CODE%0A%0Adef main():%0A if Config.OS == 'Linux':%0A if Config.DIST == 'Ubuntu' or Config.DIST == 'Debian':%0A #FIXME: debian needs ppa software%0A apt.add_repo('ppa:webupd8team/java')%0A apt.update()%0A debconf.set_selections('oracle-java8-installer',%0A 'shared/accepted-oracle-license-v1-1',%0A 'select', 'true')%0A apt.install('oracle-java8-installer')%0A else:%0A #FIXME: print failure case%0A pass%0A elif Config.OS == 'Darwin':%0A #brew_cask.install('java8')%0A pass%0A else:%0A #FIXME: print failure, handle windows%0A pass%0A
|
|
6d87badb68f2e20a3907f670b9190956ebd127e8
|
Create AddBinaryNumbers.py
|
math/AddBinaryNumbers/Python/AddBinaryNumbers.py
|
math/AddBinaryNumbers/Python/AddBinaryNumbers.py
|
Python
| 0.000004 |
@@ -0,0 +1,177 @@
+number1 = input(%22Enter the first number: %22)%0Anumber2 = input(%22Enter the second number: %22)%0A%0Aresult = (int(number1, 2) + int(number2, 2))%0A%0Aresult = bin(result)%0A%0Aprint(result%5B2:%5D)%0A%0A
|
|
c71a43dae259299952cec082d33f003ecaeb9eab
|
Add marky test.
|
tests/py/test_markdown.py
|
tests/py/test_markdown.py
|
Python
| 0 |
@@ -0,0 +1,362 @@
+from gratipay.testing import Harness%0Afrom gratipay.utils import markdown%0A%0Afrom HTMLParser import HTMLParser%0A%0Aclass TestMarkdown(Harness):%0A%0A def test_marky_works(self):%0A md = %22**Hello World!**%22%0A actual = HTMLParser().unescape(markdown.marky(md)).strip()%0A expected = '%3Cp%3E%3Cstrong%3EHello World!%3C/strong%3E%3C/p%3E'%0A assert actual == expected%0A
|
|
722b1d55c771e628ba82bbd5b8f8f5de047112af
|
Add a hex dump utility class.
|
tests/hexdumper.py
|
tests/hexdumper.py
|
Python
| 0 |
@@ -0,0 +1,553 @@
+# This hack by: Raymond Hettinger%0Aclass hexdumper:%0A %22%22%22Given a byte array, turn it into a string. hex bytes to stdout.%22%22%22%0A def __init__(self):%0A%09self.FILTER=''.join(%5B(len(repr(chr(x)))==3) and chr(x) or '.' %5C%0A%09%09%09%09%09%09 for x in range(256)%5D)%0A%0A def dump(self, src, length=8):%0A%09result=%5B%5D%0A%09for i in xrange(0, len(src), length):%0A%09 s = src%5Bi:i+length%5D%0A%09 hexa = ' '.join(%5B%22%2502X%22%25ord(x) for x in s%5D)%0A%09 printable = s.translate(self.FILTER)%0A%09 result.append(%22%2504X %25-*s %25s%5Cn%22 %25 %5C%0A%09%09%09 (i, length*3, hexa, printable))%0A%09return ''.join(result)%0A
|
|
f7a8c0b6e361ce3e5f0980b539b843b33fea258d
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/4464fe2aad5cccfd7935b0f1767901eb08e99784.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "191a16a25cc901e12535893b94aca169916d378c"
TFRT_SHA256 = "11b5d8d41bc4a6c1c6c7f9c6958c834aef832162ca75806f799bb51c9119b93d"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0 |
@@ -228,48 +228,48 @@
= %22
-191a16a25cc901e12535893b94aca169916d378c
+4464fe2aad5cccfd7935b0f1767901eb08e99784
%22%0A
@@ -289,72 +289,72 @@
= %22
-11b5d8d41bc4a6c1c6c7f9c6958c834aef832162ca75806f799bb51c9119b93d
+cc3b5b95a2da47710ade8b2d3c0046cd05750f94db5f3feb58a224ae7163db2f
%22%0A%0A
|
c5b0c56f53dee5577641a668019f40f9468017ea
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/83d3045fb5476bed115ae438871a228c1c682af1.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "6ca793b5d862ef6c50f242d77a811f06cce9b60a"
TFRT_SHA256 = "720b059a6b1d5757a76e56cf4a3a791b58e5d020858f6b67b077839963bffe8c"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0 |
@@ -228,132 +228,132 @@
= %22
-6ca793b5d862ef6c50f242d77a811f06cce9b60a%22%0A TFRT_SHA256 = %22720b059a6b1d5757a76e56cf4a3a791b58e5d020858f6b67b077839963bffe8
+83d3045fb5476bed115ae438871a228c1c682af1%22%0A TFRT_SHA256 = %22bdde8691c6a17c803de04423271b3534a421fd323627dc607b1fddf2f454e52
c%22%0A%0A
|
a3a9d4d6538b025d0c6c821a72076e084a5b597b
|
Update TFRT dependency to use revision http://github.com/tensorflow/runtime/commit/9dac1ed1ebc2350ada97b16093174a1a0bbd56d0.
|
third_party/tf_runtime/workspace.bzl
|
third_party/tf_runtime/workspace.bzl
|
"""Provides the repository macro to import TFRT."""
load("//third_party:repo.bzl", "tf_http_archive", "tf_mirror_urls")
def repo():
"""Imports TFRT."""
# Attention: tools parse and update these lines.
TFRT_COMMIT = "553df8c12e9ba5930b9b8065f1d012ea07c6044c"
TFRT_SHA256 = "477d0374b044c60cd018fdb17e7c6054e190e59e36e1a442eb5d1628efb2341d"
tf_http_archive(
name = "tf_runtime",
sha256 = TFRT_SHA256,
strip_prefix = "runtime-{commit}".format(commit = TFRT_COMMIT),
urls = tf_mirror_urls("https://github.com/tensorflow/runtime/archive/{commit}.tar.gz".format(commit = TFRT_COMMIT)),
# A patch file can be provided for atomic commits to both TF and TFRT.
# The job that bumps the TFRT_COMMIT also resets patch_file to 'None'.
patch_file = None,
)
|
Python
| 0.000001 |
@@ -228,133 +228,133 @@
= %22
-553df8c12e9ba5930b9b8065f1d012ea07c6044c%22%0A TFRT_SHA256 = %22477d0374b044c60cd018fdb17e7c6054e190e59e36e1a442eb5d1628efb2341d
+9dac1ed1ebc2350ada97b16093174a1a0bbd56d0%22%0A TFRT_SHA256 = %2289eea9ff0c9dfca61037c4da051a6ddf4d4598614f7ca08a240355d1635f8786
%22%0A%0A
|
2e53ae34ec03485302d5d7e6e5dd05707bbd1cf6
|
Add camera tests
|
tests/test_camera.py
|
tests/test_camera.py
|
Python
| 0 |
@@ -0,0 +1,1695 @@
+import os%0A%0Aimport pygame%0A%0Afrom ..sappho import Camera%0Afrom .common import compare_pygame_surfaces%0A%0Aclass TestCamera(object):%0A def test_scroll(self):%0A # Create surface to render to%0A output_surface = pygame.surface.Surface((1, 1))%0A%0A # Create fixtures%0A red_surface = pygame.surface.Surface((1, 1))%0A blue_surface = pygame.surface.Surface((1, 1))%0A red_surface.fill((255, 0, 0))%0A blue_surface.fill((0, 255, 0))%0A%0A # Create the camera and blit colors to it%0A camera = Camera((2, 1), (1, 1), (1, 1))%0A camera.blit(red_surface, (0, 0))%0A camera.blit(blue_surface, (1, 0))%0A%0A # We should be at (0, 0) so blitting should get us a red pixel%0A output_surface.blit(camera, (0, 0))%0A assert(compare_pygame_surfaces(red_surface, output_surface))%0A%0A # Scroll one pixel to the left, and we should get a blue pixel%0A # when blitting%0A camera.scroll(1, 0)%0A output_surface.blit(camera, (0, 0))%0A assert(compare_pygame_surfaces(blue_surface, output_surface))%0A%0A def test_scale(self):%0A # Create surface to render to%0A output_surface = pygame.surface.Surface((10, 10))%0A%0A # Create fixtures%0A red_small = pygame.surface.Surface((1, 1))%0A red_large = pygame.surface.Surface((10, 10))%0A red_small.fill((255, 0, 0))%0A red_large.fill((255, 0, 0))%0A%0A # Create the camera with scaling enabled and blit our red pixel to it%0A camera = Camera((1, 1), (10, 10), (1, 1))%0A camera.blit(red_small, (0, 0))%0A%0A # Blit and compare%0A output_surface.blit(camera, (0, 0))%0A assert(compare_pygame_surfaces(output_surface, red_large))%0A
|
|
349918610081c8c02dc75fdafd47f647814dd63c
|
add converter of string to format maya understands for changing setting of fps
|
mindbender/maya/pythonpath/mayafpsconverter.py
|
mindbender/maya/pythonpath/mayafpsconverter.py
|
Python
| 0 |
@@ -0,0 +1,953 @@
+def mayafpsconverter(Sfps):%0A condition = 0%0A if Sfps == %22%22:%0A condition = 1%0A return Sfps%0A if Sfps == %2215%22:%0A condition = 1%0A return %22game%22%0A if Sfps == %2224%22:%0A condition = 1%0A return %22film%22%0A if Sfps == %2225%22:%0A condition = 1%0A return %22pal%22%0A if Sfps == %2230%22:%0A condition = 1%0A return %22ntsc%22%0A if Sfps == %2248%22:%0A condition = 1%0A return %22show%22%0A if Sfps == %2250%22:%0A condition = 1%0A return %22palf%22%0A if Sfps == %2260%22:%0A condition = 1%0A return %22ntscf%22%0A ERRORSTRING = %22MINDBENDER_FPS has bad value in the bat file%22%0A if str(Sfps).isdigit() is False:%0A cmds.confirmDialog(%0A title=%22Enviroment variable error%22,%0A message=ERRORSTRING,%0A button=%22%22,%0A defaultButton=%22%22,%0A cancelButton=%22%22,%0A dismissString=%22%22)%0A return %22%22%0A if condition == 0:%0A Sfps = str(Sfps) + %22fps%22%0A return Sfps
|
|
3eafac9d71f7f885f66a63218557194291c649f7
|
add config test
|
tests/test_config.py
|
tests/test_config.py
|
Python
| 0.000001 |
@@ -0,0 +1,762 @@
+import pytest%0A%0Afrom pytest_girder.assertions import assertStatusOk, assertStatus%0Afrom slicer_cli_web.config import PluginSettings%0A%0A%[email protected]('slicer_cli_web')%0Adef test_default_task_folder(server, admin, folder):%0A # Test the setting%0A resp = server.request('/system/setting', method='PUT', params=%7B%0A 'key': PluginSettings.SLICER_CLI_WEB_TASK_FOLDER,%0A 'value': 'bad value'%0A %7D, user=admin)%0A assertStatus(resp, 400)%0A resp = server.request('/system/setting', method='PUT', params=%7B%0A 'key': PluginSettings.SLICER_CLI_WEB_TASK_FOLDER,%0A 'value': folder%5B'_id'%5D%0A %7D, user=admin)%0A assertStatusOk(resp)%0A%0A assert PluginSettings.has_task_folder()%0A assert PluginSettings.get_task_folder()%5B'_id'%5D == folder%5B'_id'%5D%0A
|
|
cfc89a542ebb9b1745bb8a7ce30f79dad12a16b7
|
add mslib tool to build static C libraries.
|
yaku/tools/mslib.py
|
yaku/tools/mslib.py
|
Python
| 0 |
@@ -0,0 +1,795 @@
+import yaku.utils%0Aimport yaku.task%0A%0Adef setup(ctx):%0A env = ctx.env%0A%0A ctx.env%5B%22STLINK%22%5D = %5B%22lib.exe%22%5D%0A ctx.env%5B%22STLINK_TGT_F%22%5D = %5B%22/OUT:%22%5D%0A ctx.env%5B%22STLINK_SRC_F%22%5D = %5B%5D%0A ctx.env%5B%22STLINKFLAGS%22%5D = %5B%22/nologo%22%5D%0A ctx.env%5B%22STATICLIB_FMT%22%5D = %22%25s.lib%22%0A%0A # XXX: hack%0A saved = yaku.task.Task.exec_command%0A def msvc_exec_command(self, cmd, cwd):%0A new_cmd = %5B%5D%0A carry = %22%22%0A for c in cmd:%0A if c in %5B%22/OUT:%22%5D:%0A carry = c%0A else:%0A c = carry + c%0A carry = %22%22%0A new_cmd.append(c)%0A saved(self, new_cmd, cwd)%0A yaku.task.Task.exec_command = msvc_exec_command%0A%0Adef detect(ctx):%0A if yaku.utils.find_program(%22lib.exe%22) is None:%0A return False%0A else:%0A return True%0A
|
|
367a7cdcb02d2d8c15e9a2375c5304b2ad9c89ac
|
Add the basic tools as functions to facilitate basic operations
|
ytranslate/tools.py
|
ytranslate/tools.py
|
Python
| 0.000071 |
@@ -0,0 +1,3800 @@
+%EF%BB%BF# Copyright (c) 2015, LE GOFF Vincent%0A# All rights reserved.%0A%0A# Redistribution and use in source and binary forms, with or without%0A# modification, are permitted provided that the following conditions are met:%0A%0A# * Redistributions of source code must retain the above copyright notice, this%0A# list of conditions and the following disclaimer.%0A%0A# * Redistributions in binary form must reproduce the above copyright notice,%0A# this list of conditions and the following disclaimer in the documentation%0A# and/or other materials provided with the distribution.%0A%0A# * Neither the name of ytranslate nor the names of its%0A# contributors may be used to endorse or promote products derived from%0A# this software without specific prior written permission.%0A%0A# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS %22AS IS%22%0A# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE%0A# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE%0A# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE%0A# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL%0A# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR%0A# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER%0A# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,%0A# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE%0A# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.%0A%0A%22%22%22Module containing different tools as functions.%0A%0AThey can be called to interact with the created object of the%0A'ytranslate' library, including the catalogs and loader.%0A%0A%22%22%22%0A%0Afrom ytranslate.fsloader import FSLoader%0A%0Adef init(root_dir):%0A %22%22%22Load the catalogs at a specified location.%0A%0A The 'root_dir', the parent directory, is sent to the FSLoader%0A class which is to create a hierarchy of catalogs. The parent%0A catalogs bear the name of the namespace (that is their%0A directory or their filename without the '.yml' extension).%0A For instance:%0A init(%22path/to/translations%22)%0A%0A Use the 'select' function to then select a catalog.%0A%0A %22%22%22%0A fsloader = FSLoader(root_dir)%0A FSLoader.current_loader = fsloader%0A fsloader.load()%0A%0Adef select(catalog):%0A %22%22%22Select the catalog from the loader.%0A%0A The catalog's name must be specified. If the loader is a%0A FSLoader (the default), then the 'root_dir' directory contains%0A the parent catalogs. You should use one of its contained%0A directoriess' names, or that of a MYL file without the '.yml'%0A extension. For instance:%0A select(%22en%22)%0A%0A %22%22%22%0A if FSLoader.current_loader:%0A FSLoader.current_loader.select(catalog)%0A else:%0A raise ValueError(%22the current loader hasn't been selected%22)%0A%0Adef t(address, count=None, **kwargs):%0A %22%22%22Retrieve the translated message from the selected catalog.%0A%0A You can use this function to obtain the translated message,%0A corresponding to the address, which must represent the list of%0A namespaces separated by '.'. For instance:%0A t(%22ui.title%22)%0A%0A The hierarchy of messages is defined by the catalog's structure%0A (directories and files, if it has been selected by a FSLoader,%0A which is the default choice).%0A%0A You can also use placeholders as named parameters:%0A t(%22welcome.name%22, user=%22John%22)%0A%0A Additionally, you can vary the message according to a number.%0A For instance:%0A t(%22notificaiton.emails%22, 3)%0A%0A See the user documentation for a detailed explanation about%0A the syntax and corresponding catalogs.%0A%0A %22%22%22%0A if FSLoader.current_catalog:%0A return FSLoader.current_catalog.retrieve(address, count, **kwargs)%0A%0A raise ValueError(%22no catalog has been selected%22)%0A
|
|
8660c7fda8cc7290fadeed7a39f06218087d9401
|
Add draft test module for linter
|
tests/test_linter.py
|
tests/test_linter.py
|
Python
| 0 |
@@ -0,0 +1,455 @@
+import logging%0Aimport pytest%0Afrom mappyfile.validator import Validator%0A%0A%0Adef validate(d):%0A v = Validator()%0A return v.validate(d)%0A%0A%0Adef get_from_dict(d, keys):%0A for k in keys:%0A if isinstance(k, int):%0A d = d%5B0%5D%0A else:%0A d = d%5Bk%5D%0A return d%0A%0A%0Adef run_tests():%0A pytest.main(%5B%22tests/test_linter.py%22%5D)%0A%0A%0Aif __name__ == %22__main__%22:%0A logging.basicConfig(level=logging.INFO)%0A # run_tests()%0A print(%22Done!%22)%0A
|
|
ad7f9f785f9a4a4494127a9b2196e1fc64c9f3de
|
Add basic first tests for new report driven by "events"
|
tests/test_report.py
|
tests/test_report.py
|
Python
| 0 |
@@ -0,0 +1,1023 @@
+from django.test import TestCase%0A%0Afrom deep_collector.core import RelatedObjectsCollector%0A%0Afrom .factories import BaseModelFactory%0A%0A%0Aclass TestLogReportGeneration(TestCase):%0A def test_report_with_no_debug_mode(self):%0A obj = BaseModelFactory.create()%0A%0A collector = RelatedObjectsCollector()%0A collector.collect(obj)%0A report = collector.get_report()%0A%0A self.assertDictEqual(report, %7B%0A 'excluded_fields': %5B%5D,%0A 'log': 'Set DEBUG to True if you what collector internal logs'%0A %7D)%0A%0A def test_report_with_debug_mode(self):%0A self.maxDiff = None%0A obj = BaseModelFactory.create()%0A%0A collector = RelatedObjectsCollector()%0A collector.DEBUG = True%0A collector.collect(obj)%0A report = collector.get_report()%0A%0A self.assertEqual(report%5B'excluded_fields'%5D, %5B%5D)%0A # For now, just checking that the log report is not empty.%0A # Some work has to be done to test it more.%0A self.assertNotEqual(report%5B'log'%5D, %5B%5D)%0A
|
|
272371f28369cca514d90f355e7771c133d11dcf
|
Create __openerp__.py
|
project_surgery/__openerp__.py
|
project_surgery/__openerp__.py
|
Python
| 0.005291 |
@@ -0,0 +1,1489 @@
+# -*- coding: utf-8 -*-%0A##############################################################################%0A#%0A# Author: Gideoni Silva (Omnes)%0A# Copyright 2013-2014 Omnes Tecnologia%0A#%0A# This program is free software: you can redistribute it and/or modify%0A# it under the terms of the GNU Affero General Public License as%0A# published by the Free Software Foundation, either version 3 of the%0A# License, or (at your option) any later version.%0A#%0A# This program is distributed in the hope that it will be useful,%0A# but WITHOUT ANY WARRANTY; without even the implied warranty of%0A# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the%0A# GNU Affero General Public License for more details.%0A#%0A# You should have received a copy of the GNU Affero General Public License%0A# along with this program. If not, see %3Chttp://www.gnu.org/licenses/%3E.%0A#%0A##############################################################################%0A%0A%7B%0A 'name': 'Participantes da Cirurgia',%0A 'description': 'Este m%C3%B3dulo adiciona os campos adicionais com os participantes da cirurgia.',%0A 'category': 'Generic Modules/Projects & Services',%0A 'license': 'AGPL-3',%0A 'author': 'Omnes',%0A 'website': 'www.omnes.net.br',%0A 'version': '0.1',%0A 'depends': %5B%0A 'base',%0A 'project',%0A %5D,%0A 'data': %5B%0A 'project_view.xml'%0A %5D,%0A 'demo': %5B%5D,%0A 'installable': True,%0A 'active': False,%0A%7D%0A# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:%0A
|
|
a08452c4ed3338cf43bf2647bcc17a7d66ba4d23
|
call restore config directly
|
corehq/apps/ota/tasks.py
|
corehq/apps/ota/tasks.py
|
from celery.task import task
from couchdbkit.exceptions import ResourceNotFound
from casexml.apps.case.xml import V1
from corehq.apps.users.models import CommCareUser
from soil import DownloadBase
@task
def prime_restore(usernames_or_ids, version=V1, cache_timeout=None, overwrite_cache=False):
from corehq.apps.ota.views import get_restore_response
total = len(usernames_or_ids)
DownloadBase.set_progress(prime_restore, 0, total)
ret = {'messages': []}
for i, username_or_id in enumerate(usernames_or_ids):
couch_user = get_user(username_or_id)
if not couch_user:
ret['messages'].append('User not found: {}'.format(username_or_id))
continue
try:
get_restore_response(
couch_user.domain,
couch_user,
since=None,
version=version,
force_cache=True,
cache_timeout=cache_timeout,
overwrite_cache=overwrite_cache,
items=True
)
except Exception as e:
ret['messages'].append('Error processing user: {}'.format(str(e)))
DownloadBase.set_progress(prime_restore, i + 1, total)
return ret
def get_user(username_or_id):
try:
couch_user = CommCareUser.get(username_or_id)
except ResourceNotFound:
try:
couch_user = CommCareUser.get_by_username(username_or_id)
except ResourceNotFound:
return None
return couch_user
|
Python
| 0 |
@@ -110,16 +110,69 @@
port V1%0A
+from casexml.apps.phone.restore import RestoreConfig%0A
from cor
@@ -347,67 +347,8 @@
e):%0A
- from corehq.apps.ota.views import get_restore_response%0A
@@ -722,63 +722,313 @@
-get_restore_response(%0A couch_user.domain
+project = couch_user.project%0A commtrack_settings = project.commtrack_settings%0A stock_settings = commtrack_settings.get_ota_restore_settings() if commtrack_settings else None%0A restore_config = RestoreConfig(%0A couch_user.to_casexml_user(), None, version, None
,%0A
@@ -1041,26 +1041,26 @@
-couch_user
+items=True
,%0A
@@ -1074,17 +1074,36 @@
s
-ince=None
+tock_settings=stock_settings
,%0A
@@ -1120,23 +1120,22 @@
-version=version
+domain=project
,%0A
@@ -1258,17 +1258,16 @@
te_cache
-,
%0A
@@ -1263,32 +1263,38 @@
che%0A
+ )%0A
items=True%0A
@@ -1285,31 +1285,192 @@
-items=True%0A
+ restore_config.get_payload()%0A%0A ret%5B'messages'%5D.append('Restore generated successfully for user: %7B%7D'.format(%0A couch_user.human_friendly_name,%0A )
)%0A
|
e1a0029488d4cbf0581c21ceb1bd5db3c19bf3eb
|
add readme
|
algorithms/CommonFun.py
|
algorithms/CommonFun.py
|
Python
| 0.000001 |
@@ -0,0 +1,960 @@
+#!user/bin/env python%0A# coding:utf-8%0A%0Aimport sys%0Aimport random%0A%0Areload(sys)%0Asys.setdefaultencoding('utf-8')%0A%0Adef QuickSort(left, right, array):%0A%09l = left%0A%09r = right%0A%09while l %3C r:%0A%09%09base = array%5Br%5D%0A%09%09while (array%5Bl%5D %3C= base and l %3C r):%0A%09%09%09l = l + 1%0A%09%09if(l %3C r):%0A%09%09%09array%5Br%5D = array%5Bl%5D%0A%0A%09%09while (array%5Bl%5D %3C= array%5Br%5D and l %3C r):%0A%09%09%09r = r - 1%0A%09%09if(l %3C r):%0A%09%09%09array%5Bl%5D = array%5Br%5D%0A%09%09%0A%09%09array%5Br%5D = base%0A%09%09QuickSort(left, r - 1, array)%0A%09%09QuickSort(r + 1, right, array)%0A%0A#array %E4%B8%BA%E6%9C%89%E5%BA%8F%E6%95%B0%E7%BB%84%0Adef BinarySearch(left, right, array, target):%0A%09if(left %3C right):%0A%09%09mid = (left + right)/2%0A%09%09if(array%5Bmid%5D %3E target):%0A%09%09%09return BinarySearch(left, mid-1, array, target)%0A%09%09elif(array%5Bmid%5D %3C target):%0A%09%09%09return BinarySearch(mid+1, right, array, target)%0A%09%09else:%0A%09%09%09return mid%0A%09else:%0A%09%09return -1%0A%0Aif __name__ == '__main__':%0A%09array = %5B%5D%0A%09for i in range(10):%0A%09%09it = random.randint(1, 100)%0A%09%09array.append(it)%0A%0A%09QuickSort(0, len(array)-1, array)%0A%0A%09print BinarySearch(0, len(array)-1, array, 15)
|
|
e0a7824253ae412cf7cc27348ee98c919d382cf2
|
verify stderr for a failing clone into a non-empty dir
|
test/test_clone.py
|
test/test_clone.py
|
Python
| 0.000001 |
@@ -0,0 +1,1115 @@
+# -*- coding: utf-8 -*-%0A# This module is part of GitPython and is released under%0A# the BSD License: http://www.opensource.org/licenses/bsd-license.php%0A%0Afrom pathlib import Path%0Aimport re%0A%0Aimport git%0A%0Afrom .lib import (%0A TestBase,%0A with_rw_directory,%0A)%0A%0Aclass TestClone(TestBase):%0A @with_rw_directory%0A def test_checkout_in_non_empty_dir(self, rw_dir):%0A non_empty_dir = Path(rw_dir)%0A garbage_file = non_empty_dir / 'not-empty'%0A garbage_file.write_text('Garbage!')%0A%0A # Verify that cloning into the non-empty dir fails while complaining about the target directory not being empty/non-existent%0A try:%0A self.rorepo.clone(non_empty_dir)%0A except git.GitCommandError as exc:%0A self.assertTrue(exc.stderr, %22GitCommandError's 'stderr' is unexpectedly empty%22)%0A expr = re.compile(r'(?is).*%5Cbfatal:%5Cs+destination%5Cs+path%5Cb.*%5Cbexists%5Cb.*%5Cbnot%5Cb.*%5Cbempty%5Cs+directory%5Cb')%0A self.assertTrue(expr.search(exc.stderr), '%22%25s%22 does not match %22%25s%22' %25 (expr.pattern, exc.stderr))%0A else:%0A self.fail(%22GitCommandError not raised%22)%0A
|
|
4683fc67d5171d8bb0391ac45f587fbc3e3c97fc
|
Add dependency installer for linux and mac osx
|
install_dependencies.py
|
install_dependencies.py
|
Python
| 0 |
@@ -0,0 +1,1928 @@
+import platform%0Aimport subprocess%0A%0A%22%22%22%0AThis is a standalone script that installs the required dependencies to run. It%0A*should* be platform independent, and should work regardless of what platform%0Ayou are running it on.%0A%0ATo install dependencies, download the DevAssist source and run this script by%0Arunning %22python install_dependencies.py%22%0A%22%22%22%0A%0A# Identifying host platform%0Ahost_platform = platform.system()%0A%0Adef install_dependencies():%0A %22%22%22%0A Installs dependencies for DevAssist%0A %22%22%22%0A # Darwin = Mac OSX%0A if host_platform == %22Darwin%22:%0A # Installing portaudio%0A # @TODO: Rewrite to not use shell=True%0A print(%22Installing portaudio...%5Cn%22)%0A portaudio = subprocess.Popen(%5B%22brew install portaudio%22%5D, shell=True)%0A portaudio.communicate()%0A print(%22%5Cnportaudio has been installed...%22)%0A%0A # Installing pyaudio%0A # @TODO: Rewrite to not use shell=True%0A print(%22Installing pyaudio...%5Cn%22)%0A pyaudio = subprocess.Popen(%5B%22pip install pyaudio%22%5D, shell=True)%0A pyaudio.communicate()%0A print(%22%5Cnpyaudio has been installed...%22)%0A elif host_platform == %22Linux%22:%0A # Installing dependencies for portaudio%0A # @TODO: Rewrite to not use shell=True%0A print(%22Installing portaudio & dependencies...%5Cn%22)%0A portaudio = subprocess.Popen(%5B%22apt-get install portaudio19-dev python-all-dev python3-all-dev%22%5D, shell=True)%0A portaudio.communicate()%0A print(%22%5Cnportaudio & dependencies have been installed...%22)%0A%0A # Installing pyaudio%0A # @TODO: Rewrite to not use shell=True%0A print(%22Installing pyaudio...%5Cn%22)%0A pyaudio = subprocess.Popen(%5B%22pip install --global-option='build_ext' --global-option='-I/usr/local/include' --global-option='-L/usr/local/lib' pyaudio%22%5D, shell=True)%0A pyaudio.communicate()%0A print(%22%5Cnpyaudio has been installed...%22)%0A%0Aif __name__ == %22__main__%22:%0A install_dependencies()%0A
|
|
0fd7cdee45b54551bcfc901cece2e5cc9dec4555
|
Add new test setup required for py.test/django test setup
|
test/test_setup.py
|
test/test_setup.py
|
Python
| 0 |
@@ -0,0 +1,292 @@
+import os%0Aimport django%0A%0Aos.environ%5B'DJANGO_SETTINGS_MODULE'%5D = 'testsettings'%0A%0A# run django setup if we are on a version of django that has it%0Aif hasattr(django, 'setup'):%0A # setup doesn't like being run more than once%0A try:%0A django.setup()%0A except RuntimeError:%0A pass
|
|
2014a7e3e785c9826575846a38b4703ef19946f4
|
fix path stuff
|
test/test_tiles.py
|
test/test_tiles.py
|
import math
import pyglet
from pyglet.window import key
import cocos
from cocos import tiles
class CarSprite(cocos.actions.ActionSprite):
speed = 0
def update(self, dt):
# handle input and move the car
self.rotation += (keyboard[key.RIGHT] - keyboard[key.LEFT]) * 150 * dt
speed = self.speed
speed += (keyboard[key.UP] - keyboard[key.DOWN]) * 50
if speed > 200: speed = 200
if speed < -100: speed = -100
self.speed = speed
r = math.radians(self.rotation)
s = dt * speed
self.x += math.sin(r) * s
self.y += math.cos(r) * s
manager.set_focus(self.x, self.y)
if __name__ == "__main__":
from cocos.director import director
#director.init(width=400, height=300)
director.init(width=600, height=300)
car_layer = tiles.ScrollableLayer()
car = pyglet.image.load('car.png')
car.anchor_x = car.width//2
car.anchor_y = car.height//2
car = CarSprite(car)
pyglet.clock.schedule(car.update)
car_layer.add(car)
manager = tiles.ScrollingManager(director.window)
test_layer = tiles.load('road-map.xml')['map0']
manager.append(test_layer)
manager.append(car_layer)
main_scene = cocos.scene.Scene(test_layer, car_layer)
keyboard = key.KeyStateHandler()
director.window.push_handlers(keyboard)
@director.window.event
def on_close():
pyglet.app.exit()
director.run(main_scene)
|
Python
| 0.000001 |
@@ -1,12 +1,173 @@
+# This code is so you can run the samples without installing the package%0Aimport sys%0Aimport os%0Asys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))%0A%0A
import math%0A
|
dbfa14401c0b50eb1a3cac413652cb975ee9d41f
|
Add valid directory cleaner helper test
|
ocw-ui/backend/tests/test_directory_helpers.py
|
ocw-ui/backend/tests/test_directory_helpers.py
|
Python
| 0.000003 |
@@ -0,0 +1,598 @@
+import os%0Aimport unittest%0Afrom webtest import TestApp%0A%0Afrom ..run_webservices import app%0Afrom ..directory_helpers import _get_clean_directory_path%0A%0Atest_app = TestApp(app)%0A%0Aclass TestDirectoryPathCleaner(unittest.TestCase):%0A PATH_LEADER = '/tmp/foo'%0A VALID_CLEAN_DIR = '/tmp/foo/bar'%0A%0A if not os.path.exists(PATH_LEADER): os.mkdir(PATH_LEADER)%0A if not os.path.exists(VALID_CLEAN_DIR): os.mkdir(VALID_CLEAN_DIR)%0A%0A def test_valid_directory_path(self):%0A clean_path = _get_clean_directory_path(self.PATH_LEADER, '/bar')%0A self.assertEquals(clean_path, self.VALID_CLEAN_DIR)%0A
|
|
5ee4f6fd50da0a7115f8ca0ab29c4388eaef13a6
|
add probabitity function decomposition
|
src/probability.py
|
src/probability.py
|
Python
| 0.003014 |
@@ -0,0 +1,978 @@
+from __future__ import division%0A%0Afrom math import log%0Afrom scipy.special import binom%0Aimport numpy as np%0A%0Adef C(p, p0):%0A p1 = 1 - p0%0A return -p0*log(p0, 2) + p0*p*log(p0*p, 2) - (p1+p0*p)*log(p1+p0*p, 2)%0A%0Adef P(c, p0, eps=0.00001):%0A left = 0%0A right = 1%0A while right - left %3E eps:%0A p = (left + right) / 2%0A cp = C(p, p0)%0A if cp %3E c:%0A left = p%0A else:%0A right = p%0A return left%0A%0Adef coef(i, p):%0A return binom(N, i) * p**i*(1-p)**(N-i) %0A%0Adef A(c, N, M):%0A points = (np.array(xrange(M)) + 1) / (M + 1)%0A A = np.matrix(%5Bnp.array(%5Bcoef(i, p) for i in xrange(N)%5D) for p in points%5D)%0A b = np.array(%5BP(c, p) for p in points%5D)%0A a, _, _, _ = np.linalg.lstsq(A, b)%0A return a%0A%0Aif __name__ == '__main__':%0A N = 10 # Buffer size%0A M = 100 # Num of points%0A c = 0.15%0A %0A a = A(c, N)%0A p0 = 0.7%0A x = np.array(%5Bcoef(i, p0) for i in xrange(N)%5D)%0A print(np.dot(a, x))%0A print(P(c, p0))%0A print(a)%0A
|
|
8e8a1a33d8bedcb597020f9723c03d0f6af57522
|
Add python script
|
send.py
|
send.py
|
Python
| 0.000302 |
@@ -0,0 +1,777 @@
+import sys%0Aimport os%0A%0Atry:%0A sys.path.append(os.path.join(os.environ%5B'ANDROID_VIEW_CLIENT_HOME'%5D, 'src'))%0Aexcept:%0A pass%0A%0Afrom com.dtmilano.android.viewclient import ViewClient%0A%0Anumber = sys.argv%5B2%5D%0Atext = sys.argv%5B3%5D%0A%0Aprint(%22Sending WhatsApp...%22)%0Aprint(%22Number: %22 + number)%0Aprint(%22Text: %22 + text)%0A%0Apackage = 'com.android.chrome'%0Aactivity = 'com.google.android.apps.chrome.Main'%0Acomponent = package + %22/%22 + activity%0Auri = 'https://api.whatsapp.com/send?phone=' + number%0A%0Adevice, serialno = ViewClient.connectToDeviceOrExit()%0Avc = ViewClient(device=device, serialno=serialno)%0Adevice.startActivity(component=component, uri=uri)%0Avc.sleep(3)%0Adevice.type(text)%0Avc = ViewClient(device=device, serialno=serialno)%0Asend = vc.findViewByIdOrRaise('com.whatsapp:id/send')%0Asend.touch()%0A
|
|
61f806ffc68c41dfbb926ea6825292eabed46966
|
Add sorting code
|
sort.py
|
sort.py
|
Python
| 0.000007 |
@@ -0,0 +1,322 @@
+#!/usr/bin/env python%0A%0Aimport re%0Aimport sys%0A%0Asort = %7B%7D%0Aregex = re.compile(r'TBX_API %5Cw* %5C*?(%5Cw*)%5C(.*')%0Afor line in sys.stdin.readlines():%0A result = regex.match(line)%0A if not result:%0A sort%5Bline%5D = line%0A else:%0A sort%5Bresult.group(1)%5D = line%0A%0Afor k in sorted(sort.keys()):%0A sys.stdout.write(sort%5Bk%5D)%0A
|
|
3e885137d23e7618b78f207ecd6b2f6118a4a0dc
|
add a test file
|
test.py
|
test.py
|
Python
| 0.000001 |
@@ -0,0 +1,41 @@
+#!/usr/bin/python%0Aimport cgi%0Acgi.test()%0A%0A
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.