hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
48cf0852e6ab6d1a0771fbc1bfea6839386062de | 1,287 | py | Python | tests/test_cmdline.py | lin-zone/scrapyu | abcb8eed2ea02121b74017e007c57c0d3762342d | [
"MIT"
]
| 1 | 2021-01-05T09:11:42.000Z | 2021-01-05T09:11:42.000Z | tests/test_cmdline.py | lin-zone/scrapyu | abcb8eed2ea02121b74017e007c57c0d3762342d | [
"MIT"
]
| null | null | null | tests/test_cmdline.py | lin-zone/scrapyu | abcb8eed2ea02121b74017e007c57c0d3762342d | [
"MIT"
]
| null | null | null | import sys
import subprocess
from tempfile import mkdtemp, TemporaryFile
from path import Path
from tests import TEST_DIR
args = (sys.executable, '-m', 'scrapyu.cmdline')
def test_genspider_list():
new_args = args + ('genspider', '-l')
res = subprocess.check_output(new_args)
assert res.split() == [b'Available', b'templates:', b'single', b'single_splash']
def test_single_template():
single_test_template = Path(TEST_DIR) / 'test_templates' / 'single.py'
cwd = mkdtemp()
new_args = args + ('genspider', 'single', 'www.scrapytest.org', '-t', 'single')
with TemporaryFile() as out:
subprocess.call(new_args, stdout=out, stderr=out, cwd=cwd)
t = Path(cwd) / 'single.py'
assert t.exists() is True
assert t.read_text() == single_test_template.read_text()
def test_single_splash_template():
single_splash_test_template = Path(TEST_DIR) / 'test_templates' / 'single_splash.py'
cwd = mkdtemp()
new_args = args + ('genspider', 'single-splash', 'www.scrapytest.org', '-t', 'single_splash')
with TemporaryFile() as out:
subprocess.call(new_args, stdout=out, stderr=out, cwd=cwd)
t = Path(cwd) / 'single_splash.py'
assert t.exists() is True
assert t.read_text() == single_splash_test_template.read_text() | 33.868421 | 97 | 0.688423 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 292 | 0.226884 |
48cf85efc52b96d39ed18f6149964691786778a9 | 3,257 | py | Python | src/olympia/amo/cron.py | dante381/addons-server | 9702860a19ecca1cb4e4998f37bc43c1b2dd3aa7 | [
"BSD-3-Clause"
]
| null | null | null | src/olympia/amo/cron.py | dante381/addons-server | 9702860a19ecca1cb4e4998f37bc43c1b2dd3aa7 | [
"BSD-3-Clause"
]
| null | null | null | src/olympia/amo/cron.py | dante381/addons-server | 9702860a19ecca1cb4e4998f37bc43c1b2dd3aa7 | [
"BSD-3-Clause"
]
| null | null | null | from datetime import datetime, timedelta
from django.core.files.storage import default_storage as storage
import olympia.core.logger
from olympia import amo
from olympia.activity.models import ActivityLog
from olympia.addons.models import Addon
from olympia.addons.tasks import delete_addons
from olympia.amo.utils import chunked
from olympia.files.models import FileUpload
from olympia.scanners.models import ScannerResult
from olympia.amo.models import FakeEmail
from . import tasks
from .sitemap import (
get_sitemap_path,
get_sitemaps,
get_sitemap_section_pages,
render_index_xml,
)
log = olympia.core.logger.getLogger('z.cron')
def gc(test_result=True):
"""Site-wide garbage collections."""
def days_ago(days):
return datetime.today() - timedelta(days=days)
log.info('Collecting data to delete')
logs = (
ActivityLog.objects.filter(created__lt=days_ago(90))
.exclude(action__in=amo.LOG_KEEP)
.values_list('id', flat=True)
)
for chunk in chunked(logs, 100):
tasks.delete_logs.delay(chunk)
two_weeks_ago = days_ago(15)
# Hard-delete stale add-ons with no versions. No email should be sent.
versionless_addons = Addon.unfiltered.filter(
versions__pk=None, created__lte=two_weeks_ago
).values_list('pk', flat=True)
for chunk in chunked(versionless_addons, 100):
delete_addons.delay(chunk, with_deleted=True)
# Delete stale FileUploads.
stale_uploads = FileUpload.objects.filter(created__lte=two_weeks_ago).order_by('id')
for file_upload in stale_uploads:
log.info(
'[FileUpload:{uuid}] Removing file: {path}'.format(
uuid=file_upload.uuid, path=file_upload.path
)
)
if file_upload.path:
try:
storage.delete(file_upload.path)
except OSError:
pass
file_upload.delete()
# Delete stale ScannerResults.
ScannerResult.objects.filter(upload=None, version=None).delete()
# Delete fake emails older than 90 days
FakeEmail.objects.filter(created__lte=days_ago(90)).delete()
def write_sitemaps(section=None, app_name=None):
index_filename = get_sitemap_path(None, None)
sitemaps = get_sitemaps()
if (not section or section == 'index') and not app_name:
with storage.open(index_filename, 'w') as index_file:
log.info('Writing sitemap index')
index_file.write(render_index_xml(sitemaps))
for _section, _app_name, _page in get_sitemap_section_pages(sitemaps):
if (section and section != _section) or (app_name and app_name != _app_name):
continue
if _page % 1000 == 1:
# log an info message every 1000 pages in a _section, _app_name
log.info(f'Writing sitemap file for {_section}, {_app_name}, {_page}')
filename = get_sitemap_path(_section, _app_name, _page)
with storage.open(filename, 'w') as sitemap_file:
sitemap_object = sitemaps.get((_section, amo.APPS.get(_app_name)))
if not sitemap_object:
continue
content = sitemap_object.render(app_name=_app_name, page=_page)
sitemap_file.write(content)
| 34.648936 | 88 | 0.684986 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 451 | 0.138471 |
48d0551fc7668ef91b0cbb625288bc4330046f92 | 642 | py | Python | day8/test_day8.py | bwbeach/advent-of-code-2020 | 572810c3adae5815543efde17a4bca9596d05a5b | [
"CC0-1.0"
]
| null | null | null | day8/test_day8.py | bwbeach/advent-of-code-2020 | 572810c3adae5815543efde17a4bca9596d05a5b | [
"CC0-1.0"
]
| null | null | null | day8/test_day8.py | bwbeach/advent-of-code-2020 | 572810c3adae5815543efde17a4bca9596d05a5b | [
"CC0-1.0"
]
| null | null | null | from day8.day8 import fix_code, parse_code, run
SAMPLE_CODE_LOOP = """nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
jmp -4
acc +6
"""
SAMPLE_CODE_HALT = """nop +0
acc +1
jmp +4
acc +3
jmp -3
acc -99
acc +1
nop -4
acc +6
"""
def test_parse():
assert parse_code("nop +0\nacc +1\nacc -6") == [("nop", 0), ("acc", 1), ("acc", -6)]
def test_run_loop():
code = parse_code(SAMPLE_CODE_LOOP)
assert run(code) == ("loop", 5)
def test_run_halt():
code = parse_code(SAMPLE_CODE_HALT)
assert run(code) == ("halt", 8)
def test_fix_code():
assert fix_code(parse_code(SAMPLE_CODE_LOOP)) == parse_code(SAMPLE_CODE_HALT)
| 15.285714 | 88 | 0.638629 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 191 | 0.297508 |
48d23528c08e020ee5f13c45ec80e61813e3bd41 | 6,128 | py | Python | biosys/apps/main/tests/api/test_misc.py | florianm/biosys | 934d06ed805b0734f3cb9a00feec6cd81a94e512 | [
"Apache-2.0"
]
| 2 | 2018-04-09T04:02:30.000Z | 2019-08-20T03:12:55.000Z | biosys/apps/main/tests/api/test_misc.py | florianm/biosys | 934d06ed805b0734f3cb9a00feec6cd81a94e512 | [
"Apache-2.0"
]
| 29 | 2016-01-20T08:14:15.000Z | 2017-07-13T07:17:32.000Z | biosys/apps/main/tests/api/test_misc.py | florianm/biosys | 934d06ed805b0734f3cb9a00feec6cd81a94e512 | [
"Apache-2.0"
]
| 5 | 2016-01-14T23:02:36.000Z | 2016-09-21T05:35:03.000Z | from django.shortcuts import reverse
from django.test import TestCase
from rest_framework import status
from rest_framework.test import APIClient
from main.models import Project
from main.tests import factories
from main.tests.api import helpers
class TestWhoAmI(helpers.BaseUserTestCase):
def setUp(self):
super(TestWhoAmI, self).setUp()
self.url = reverse('api:whoami')
def test_get(self):
client = self.anonymous_client
self.assertEqual(
client.get(self.url).status_code,
status.HTTP_200_OK
)
user = factories.UserFactory()
user.set_password('password')
user.save()
client = APIClient()
self.assertTrue(client.login(username=user.username, password='password'))
resp = client.get(self.url)
self.assertEqual(
resp.status_code,
status.HTTP_200_OK
)
# test that the response contains username, first and last name and email at least and the id
data = resp.json()
self.assertEqual(user.username, data['username'])
self.assertEqual(user.first_name, data['first_name'])
self.assertEqual(user.last_name, data['last_name'])
self.assertEqual(user.email, data['email'])
self.assertEqual(user.id, data['id'])
# test that the password is not in the returned fields
self.assertFalse('password' in data)
def test_not_allowed_methods(self):
client = self.readonly_client
self.assertEqual(
client.post(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
self.assertEqual(
client.put(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
self.assertEqual(
client.patch(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
class TestStatistics(TestCase):
def setUp(self):
self.url = reverse('api:statistics')
def test_get(self):
anonymous = APIClient()
client = anonymous
self.assertIn(
client.get(self.url).status_code,
[status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]
)
user = factories.UserFactory.create()
user.set_password('password')
user.save()
client = APIClient()
self.assertTrue(client.login(username=user.username, password='password'))
resp = client.get(self.url)
self.assertEqual(
resp.status_code,
status.HTTP_200_OK
)
# expected response with no data
expected = {
'projects': {'total': 0},
'datasets': {
'total': 0,
'generic': {'total': 0},
'observation': {'total': 0},
'speciesObservation': {'total': 0},
},
'records': {
'total': 0,
'generic': {'total': 0},
'observation': {'total': 0},
'speciesObservation': {'total': 0},
},
'sites': {'total': 0},
}
self.assertEqual(expected, resp.json())
# create one project
program = factories.ProgramFactory.create()
project = factories.ProjectFactory.create(program=program)
expected['projects']['total'] = 1
resp = client.get(self.url)
self.assertEqual(
resp.status_code,
status.HTTP_200_OK
)
self.assertEqual(expected, resp.json())
# create some sites
count = 3
factories.SiteFactory.create_batch(
count,
project=project
)
expected['sites']['total'] = count
resp = client.get(self.url)
self.assertEqual(
resp.status_code,
status.HTTP_200_OK
)
self.assertEqual(expected, resp.json())
def test_not_allowed_methods(self):
user = factories.UserFactory.create()
user.set_password('password')
user.save()
client = APIClient()
self.assertTrue(client.login(username=user.username, password='password'))
self.assertEqual(
client.post(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
self.assertEqual(
client.put(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
self.assertEqual(
client.patch(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
class TestSpecies(TestCase):
# set the species list to be the testing one
species_facade_class = helpers.LightSpeciesFacade
def setUp(self):
from main.api.views import SpeciesMixin
SpeciesMixin.species_facade_class = self.species_facade_class
self.url = reverse('api:species')
def test_get(self):
anonymous = APIClient()
client = anonymous
self.assertEqual(
client.get(self.url).status_code,
status.HTTP_200_OK
)
user = factories.UserFactory.create()
user.set_password('password')
user.save()
client = APIClient()
self.assertTrue(client.login(username=user.username, password='password'))
resp = client.get(self.url)
self.assertEqual(
resp.status_code,
status.HTTP_200_OK
)
def test_not_allowed_methods(self):
user = factories.UserFactory.create()
user.set_password('password')
user.save()
client = APIClient()
self.assertTrue(client.login(username=user.username, password='password'))
self.assertEqual(
client.post(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
self.assertEqual(
client.put(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
self.assertEqual(
client.patch(self.url, {}).status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
| 31.587629 | 101 | 0.590078 | 5,871 | 0.958061 | 0 | 0 | 0 | 0 | 0 | 0 | 678 | 0.11064 |
48d29ebbfa1dba9c5ef7d472e7d45e6999e1c63b | 531 | py | Python | src/netwrok/analytics.py | simonwittber/netwrok-server | d4767faa766e7ecb0de0c912f0c0a26b45b84189 | [
"MIT"
]
| 16 | 2015-12-01T14:42:30.000Z | 2021-04-26T21:16:45.000Z | src/netwrok/analytics.py | DifferentMethods/netwrok-server | d4767faa766e7ecb0de0c912f0c0a26b45b84189 | [
"MIT"
]
| null | null | null | src/netwrok/analytics.py | DifferentMethods/netwrok-server | d4767faa766e7ecb0de0c912f0c0a26b45b84189 | [
"MIT"
]
| 4 | 2015-03-02T07:19:15.000Z | 2015-10-14T07:38:02.000Z | import asyncio
import aiopg
from . import nwdb
from . import core
@core.handler
def register(client, path, event):
"""
Register an event occuring at path. Created time is automatically added.
Useful for generic analytics type stuff.
"""
with (yield from nwdb.connection()) as conn:
cursor = yield from conn.cursor()
yield from cursor.execute("""
insert into analytics(member_id, path, event)
select %s, %s, %s
""", [client.session.get("member_id", None), path, event])
| 27.947368 | 76 | 0.653484 | 0 | 0 | 448 | 0.843691 | 462 | 0.870056 | 0 | 0 | 239 | 0.450094 |
48d3bd9308acb8eb9e29472526d5d05261bbdb90 | 635 | py | Python | monte_carlo/helpers/muaanalytical.py | nathhje/bachelorproject | 4bca826d1e065f647e2088b1fd028b1bdf863124 | [
"MIT"
]
| null | null | null | monte_carlo/helpers/muaanalytical.py | nathhje/bachelorproject | 4bca826d1e065f647e2088b1fd028b1bdf863124 | [
"MIT"
]
| null | null | null | monte_carlo/helpers/muaanalytical.py | nathhje/bachelorproject | 4bca826d1e065f647e2088b1fd028b1bdf863124 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
"""
Deterimines the reflectance based on r and mua.
"""
import math
import helpers.analyticalvalues as av
def reflectance(mua, r):
"""
mua: the absorption coefficient used.
r: the radial distance used.
"""
values = av.analyticalValues(r, mua)
# the value of the reflectance is determined
return (values.z0 * (values.ueff + values.rho1 ** -1) * math.exp( -values.ueff * values.rho1)
/ (values.rho1 ** 2) + (values.z0 + 2 * values.zb) * (values.ueff + values.rho2 ** -1)
* math.exp( -values.ueff * values.rho2) / (values.rho2 ** 2)) / 4 / math.pi
| 30.238095 | 99 | 0.60315 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 208 | 0.327559 |
48d3e34f960926be47270d979dba99f1e974b2b3 | 476 | py | Python | main/test_data.py | anna01111/demo_web_ui_test_suite | 69bedc25126b874774e2f51a83356dc9ee1b7e74 | [
"CC0-1.0"
]
| null | null | null | main/test_data.py | anna01111/demo_web_ui_test_suite | 69bedc25126b874774e2f51a83356dc9ee1b7e74 | [
"CC0-1.0"
]
| null | null | null | main/test_data.py | anna01111/demo_web_ui_test_suite | 69bedc25126b874774e2f51a83356dc9ee1b7e74 | [
"CC0-1.0"
]
| null | null | null | from faker import Faker
"""
More info: https://microservices-demo.github.io/docs/user-accounts.html
"""
# The demo app is shipped with the following account:
username = 'user'
password = 'password'
# Fake data that is used for new registrations:
faker = Faker()
autogenerated_username = faker.user_name()
autogenerated_first_name = faker.first_name()
autogenerated_last_name = faker.last_name()
autogenerated_email = faker.email()
autogenerated_password = faker.password()
| 26.444444 | 71 | 0.779412 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 195 | 0.409664 |
48d3f8d217b00f2ba74165ed887ea259202fee75 | 1,115 | py | Python | pfr/run.py | AnnaMag/pdf-flask-react | de89eb13b2e2e0d4418c28041fe294205f528b96 | [
"BSD-2-Clause"
]
| 2 | 2019-01-04T16:55:05.000Z | 2019-08-28T20:16:47.000Z | pfr/run.py | AnnaMag/pdf-flask-react | de89eb13b2e2e0d4418c28041fe294205f528b96 | [
"BSD-2-Clause"
]
| 2 | 2021-06-01T21:52:21.000Z | 2021-12-13T19:43:43.000Z | pfr/run.py | AnnaMag/pdf-flask-react | de89eb13b2e2e0d4418c28041fe294205f528b96 | [
"BSD-2-Clause"
]
| null | null | null | from io import StringIO
from io import BytesIO
import urllib
from urllib import request
import utils
from pdf_processing import scrape_gazette_names, get_info_outline
from data_parsing import save_to_dict
if __name__ == '__main__':
# not saving anything locally, just the names listed on the webpage to access the files later
url = 'http://www.gpwonline.co.za/Gazettes/Pages/Published-National-Regulation-Gazettes.aspx'
doc_names = scrape_gazette_names(url)
db_name = 'gov_docs'
db_collection = 'nat_reg'
collection = utils.set_collection(db_name, db_collection)
for url in doc_names[0][3:5]:
print(url)
fp = BytesIO(urllib.request.urlopen(url).read())
info, device, pages_skipped = get_info_outline(fp)
print(info)
#pages_skipped should be pages for extraction- for now is to montitore problems
gaz_dict = save_to_dict(device.interesting_text, device.aux_text, \
pages_skipped, info, device.page_number, url)
print(gaz_dict)
utils.write_db(collection, gaz_dict)
| 33.787879 | 97 | 0.699552 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 288 | 0.258296 |
48d4f15c7fa28d9ec9d8b63f2ea935ca7b5152ba | 1,246 | py | Python | day9/day9.py | jaredledvina/adventofcode2020 | 2a31fd88c0b6bddd2c06327d04e6630b8fb29909 | [
"MIT"
]
| 1 | 2020-12-09T14:50:49.000Z | 2020-12-09T14:50:49.000Z | day9/day9.py | jaredledvina/adventofcode2020 | 2a31fd88c0b6bddd2c06327d04e6630b8fb29909 | [
"MIT"
]
| null | null | null | day9/day9.py | jaredledvina/adventofcode2020 | 2a31fd88c0b6bddd2c06327d04e6630b8fb29909 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3
import itertools
def read_input():
with open('input.txt') as f:
puzzle_input = f.read().splitlines()
puzzle_input = [int(num) for num in puzzle_input]
return puzzle_input
def part1(puzzle_input):
preamble = puzzle_input[:25]
remaining = puzzle_input[25:]
for item in remaining:
found_match = False
for product in itertools.product(preamble, preamble):
if product[0] + product[1] == item:
found_match = True
preamble.append(item)
preamble.pop(0)
break
if not found_match:
return item
def part2(puzzle_input):
invalid = part1(puzzle_input)
for position in range(len(puzzle_input)):
combination_position = 0
for combination in itertools.accumulate(puzzle_input[position:]):
if combination == invalid:
return min(puzzle_input[position:combination_position+position]) + max(puzzle_input[position:combination_position+position])
combination_position += 1
def main():
puzzle_input = read_input()
print(part1(puzzle_input))
print(part2(puzzle_input))
if __name__ == '__main__':
main() | 29.666667 | 140 | 0.629213 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 43 | 0.03451 |
48d562ad3234975315fbded1821628c64f55b9d7 | 4,461 | py | Python | streamlitfront/tests/common.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
]
| null | null | null | streamlitfront/tests/common.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
]
| 1 | 2022-02-03T15:21:57.000Z | 2022-02-05T00:51:33.000Z | streamlitfront/tests/common.py | i2mint/streamlitfront | 6fbc03a42cdb7436dcda3da00fb9b42965bbb582 | [
"Apache-2.0"
]
| null | null | null | from contextlib import contextmanager
from functools import partial
from inspect import Parameter
from random import choice, randint, uniform
import string
from typing import Any
from i2 import Sig
from numbers import Number
from sys import platform
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver import Chrome, ChromeOptions
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.chrome.service import Service
from webdriver_manager.chrome import ChromeDriverManager
from strand import run_process
from streamlitfront.run_app import run_app
from time import sleep
import dill
import pickle
STREAMLIT_APP_URL = 'http://localhost:8501'
@contextmanager
def dispatch_funcs_with_selenium(funcs, headless=False):
"""
Dispatches the functions in a streamlit application and build a selenium object
representing the root of the DOM for the application.
"""
serialize_funcs = False
try:
pickle.dumps(funcs)
except:
serialize_funcs = True
_funcs = dill.dumps(funcs) if serialize_funcs else funcs
with run_process(func=run_app, func_kwargs={'funcs': _funcs}, is_ready=3) as proc:
options = ChromeOptions()
# options.add_argument('--no-sandbox')
options.add_argument('--window-size=1920,1080')
if headless:
options.add_argument('--headless')
# options.add_argument('--disable-gpu')
# options.add_argument('--allow-running-insecure-content')
dom = Chrome(service=Service(ChromeDriverManager().install()), options=options)
dom.get(STREAMLIT_APP_URL)
try:
yield dom
finally:
dom.close()
def give_a_chance_to_render_element(func):
"""
Gives a chance to the application to render the element by trying up to three times
with 1 second of interval to find it before raising an error.
"""
# @wrap(func)
def wrapper(*args, **kwargs):
def _try_to_find_element(intent_nb):
try:
return func(*args, **kwargs)
except NoSuchElementException:
if intent_nb < 3:
sleep(1)
return _try_to_find_element(intent_nb + 1)
raise
return _try_to_find_element(1)
return wrapper
@give_a_chance_to_render_element
def find_element_by_css_selector(css_selector, root):
return root.find_element(By.CSS_SELECTOR, css_selector)
def select_func(idx, root):
radio_button = find_element_by_css_selector(
f".block-container .stRadio div[role='radiogroup'] label:nth-child({idx + 1})",
root,
)
radio_button.click()
sleep(0.5)
def send_input(input_, idx, root):
def get_input_type():
if isinstance(input_, Number):
return 'number'
if isinstance(input_, str):
return 'text'
input_type = get_input_type()
input_el = find_element_by_css_selector(
f".main .element-container:nth-child({idx + 2}) input[type='{input_type}']",
root,
)
input_el.click()
select_all_first_key = Keys.COMMAND if platform == 'darwin' else Keys.CONTROL
input_el.send_keys(select_all_first_key, 'a')
input_el.send_keys(str(input_))
def compute_output(func, root):
def get_output(previous_output=None, intent_nb=1):
output_el = find_element_by_css_selector(output_css_selector, root)
if output_el.find_elements(By.TAG_NAME, 'code'):
output_el = find_element_by_css_selector('code', output_el)
output = output_el.text
return_annot = Sig(func).return_annotation
if return_annot not in (Parameter.empty, Any):
output = return_annot(output)
if previous_output is not None and output == previous_output and intent_nb < 3:
sleep(1)
return get_output(previous_output, intent_nb + 1)
return output
def get_previous_output():
if root.find_elements(By.CSS_SELECTOR, output_css_selector):
return get_output()
nb_args = len(Sig(func))
output_css_selector = f'.element-container:nth-child({nb_args + 3}) .stMarkdown p'
previous_output = get_previous_output()
submit_button = find_element_by_css_selector(
f'.element-container:nth-child({nb_args + 2}) button', root
)
submit_button.click()
return get_output(previous_output)
| 33.795455 | 87 | 0.68998 | 0 | 0 | 989 | 0.221699 | 1,151 | 0.258014 | 0 | 0 | 836 | 0.187402 |
48d584fffe50d5a164a634c7bdeab43a85e1c776 | 16,094 | py | Python | Python_files/analyse.py | Deniz-shelby/goodreads_webscrap | 80be6eb85f8a128eeeef2f845726557852991463 | [
"Apache-2.0"
]
| null | null | null | Python_files/analyse.py | Deniz-shelby/goodreads_webscrap | 80be6eb85f8a128eeeef2f845726557852991463 | [
"Apache-2.0"
]
| 1 | 2021-04-14T07:41:49.000Z | 2021-04-14T10:02:45.000Z | Python_files/analyse.py | Deniz-shelby/goodreads_webscrap | 80be6eb85f8a128eeeef2f845726557852991463 | [
"Apache-2.0"
]
| 2 | 2021-04-14T05:31:24.000Z | 2021-04-19T08:00:40.000Z |
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
from matplotlib import rcParams
from sklearn.preprocessing import MinMaxScaler
import warnings
import scipy.stats as st
import statsmodels as sm
def analyse(df_input,df_all_input):
df = df_input
fig, ax = plt.subplots(figsize=(17,8))
plt.scatter(df['num_pages'],df['num_ratings'],
label = 'books',
color = 'lightpink',
edgecolor = 'darkviolet')
plt.xlabel('num_pages', fontsize=20,labelpad=20)
plt.ylabel('num_ratings', fontsize=20,labelpad=20)
plt.title('2D Scatterplot', fontsize=38,y=1.15)
plt.xlim(0,1900)
plt.xticks(np.arange(0,1900,100),fontsize=14, rotation=45)
#plt.ylim(0,max(df['num_ratings']))
plt.yticks(np.arange(0,max(df['num_ratings']),1000000),fontsize=14)
plt.grid(True,linestyle='dashed')
plt.show()
# 3
x=df['num_pages']
y=df['num_ratings']
# Pearson
pearson = st.pearsonr(x, y)
print(f'Pearson: Correlation= {pearson[0]} , p-value= {pearson[1]}')
# Spear
spear = st.spearmanr(x, y)
print(f'Spear: Correlation= {spear[0]} , p-value= {spear[1]}')
# Kendal
kendal = st.kendalltau(x,y)
print(f'Kendal: Correlation= {kendal [0]} , p-value= {kendal [1]}')
# python
python_corr = df['num_pages'].corr(df['num_ratings'])
print(f'Correlation= {python_corr}')
#### avg_rating
fig, ax = plt.subplots(figsize=(17,8))
plt.hist(df.avg_rating,
bins = np.arange(3.5,4.65,0.1), ## change for a better bin scale
color='cornflowerblue',
edgecolor = "white")
plt.xticks(np.arange(3.5,4.65,0.1),fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("Averge Rating", fontsize=20,labelpad=20)
plt.ylabel("Books", fontsize=20,labelpad=20)
plt.title('Distribution of avg_rating', fontsize=28,y=1.15)
plt.grid(True,linestyle='dashed')
plt.show()
fig, ax = plt.subplots(figsize=(17,8))
plt.hist(df.avg_rating,
bins = np.arange(3.5,4.65,0.025), ## change for a better bin scale
color='cornflowerblue',
edgecolor = "white")
plt.xticks(np.arange(3.5,4.65,0.1),fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("Averge Rating", fontsize=20,labelpad=20)
plt.ylabel("Books", fontsize=20,labelpad=20)
plt.title('Distribution of avg_rating', fontsize=28,y=1.15)
plt.grid(True,linestyle='dashed')
plt.show()
fig, ax = plt.subplots(figsize=(17,8))
plt.hist(df.avg_rating,
bins = np.arange(3.5,4.65,0.01), ## change for a better bin scale
color='cornflowerblue',
edgecolor = "white")
plt.xticks(np.arange(3.5,4.65,0.1),fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("Averge Rating", fontsize=20,labelpad=20)
plt.ylabel("Books", fontsize=20,labelpad=20)
plt.title('Distribution of avg_rating', fontsize=28,y=1.15)
plt.grid(True,linestyle='dashed')
plt.show()
### 4
fig, ax = plt.subplots(figsize=(17,8))
plt.hist(df.minmax_norm_ratings,
bins = np.arange(0,10,0.5), ## change for a better bin scale
color='cornflowerblue',
edgecolor = "white")
plt.xticks(np.arange(0,10,1),fontsize=14, rotation=45)
plt.xticks(fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("minmax_norm Rating", fontsize=20,labelpad=20)
plt.ylabel("Books", fontsize=20,labelpad=20)
plt.title('Distribution of minmax_norm_ratings', fontsize=28,y=1.15)
plt.grid(True,linestyle='dashed')
plt.xlim(0,10)
plt.show()
### 4
fig, ax = plt.subplots(figsize=(17,8))
plt.hist(df.minmax_norm_ratings,
bins = np.arange(0,10,0.1), ## change for a better bin scale
color='cornflowerblue',
edgecolor = "white")
plt.xticks(np.arange(0,10,1),fontsize=14, rotation=45)
plt.xticks(fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("minmax_norm Rating", fontsize=20,labelpad=20)
plt.ylabel("Books", fontsize=20,labelpad=20)
plt.title('Distribution of minmax_norm_ratings', fontsize=28,y=1.15)
plt.grid(True,linestyle='dashed')
plt.xlim(0,10)
plt.show()
### 5
fig, ax = plt.subplots(figsize=(17,8))
plt.hist(df.mean_norm_ratings,
bins = np.arange(0,10,0.5), ## change for a better bin scale
color='cornflowerblue',
edgecolor = "white")
plt.xticks(np.arange(2,9,1),fontsize=14, rotation=45)
plt.xticks(fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("mean_norm Rating", fontsize=20,labelpad=20)
plt.ylabel("books", fontsize=20,labelpad=20)
plt.title('Distribution of mean_norm_ratings', fontsize=28,y=1.15)
plt.grid(True,linestyle='dashed')
plt.xlim(2,9)
plt.show()
fig, ax = plt.subplots(figsize=(17,8))
plt.hist(df.mean_norm_ratings,
bins = np.arange(2,9,0.1), ## change for a better bin scale
color='cornflowerblue',
edgecolor = "white")
plt.xticks(np.arange(0,10,1),fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("mean_norm Rating", fontsize=20,labelpad=20)
plt.ylabel("books", fontsize=20,labelpad=20)
plt.title('Distribution of mean_norm_ratings', fontsize=28,y=1.2)
plt.grid(True,linestyle='dashed')
plt.xlim(2,9)
plt.show()
# 6
fig, ax = plt.subplots(figsize=(14,8))
bins =np.arange(0,10,1)
plt.hist([df['minmax_norm_ratings'],df['mean_norm_ratings']],
bins,
label=['minamx_norm_ratings','mean_norm_ratings'],
color=['cornflowerblue','lightpink'],
edgecolor = "white")
plt.xticks(np.arange(0,10,0.5),fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("norm_rating", fontsize=20,labelpad=20)
plt.ylabel("books", fontsize=20,labelpad=20)
plt.title('Distribution of mean_norm_ratings', fontsize=28,y=1.2)
plt.grid(True,linestyle='dashed')
plt.xlim(0,10)
plt.show()
fig, ax = plt.subplots(figsize=(17,8))
bins =np.arange(0,10,0.5)
plt.hist([df['minmax_norm_ratings'],df['mean_norm_ratings']],
bins,
label=['minamx_norm_ratings','mean_norm_ratings'],
color=['cornflowerblue','lightpink'],
edgecolor = "white")
plt.xticks(np.arange(0,10,0.5),fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("norm_rating", fontsize=20,labelpad=20)
plt.ylabel("Books", fontsize=20,labelpad=20)
plt.title('Distribution of mean_norm_ratings', fontsize=28,y=1.2)
plt.grid(True,linestyle='dashed')
plt.xlim(0,10)
plt.show()
matplotlib.rcParams['figure.figsize'] = (18, 10)
matplotlib.style.use('ggplot')
# Create models from data
def best_fit_distribution(data, bins=200, ax=None):
"""Model data by finding best fit distribution to data"""
# Get histogram of original data
y, x = np.histogram(data, bins=bins, density=True)
x = (x + np.roll(x, -1))[:-1] / 2.0
# Distributions to check
DISTRIBUTIONS = [
st.alpha,st.anglit,st.arcsine,st.beta,st.betaprime,st.bradford,st.burr,st.cauchy,st.chi,st.chi2,st.cosine,
st.dgamma,st.dweibull,st.erlang,st.expon,st.exponnorm,st.exponweib,st.exponpow,st.f,st.fatiguelife,st.fisk,
#st.foldcauchy,st.foldnorm,st.frechet_r,st.frechet_l,st.genlogistic,st.genpareto,st.gennorm,st.genexpon,
st.genextreme,st.gausshyper,st.gamma,st.gengamma,st.genhalflogistic,st.gilbrat,st.gompertz,st.gumbel_r,
st.gumbel_l,st.halfcauchy,st.halflogistic,st.halfnorm,st.halfgennorm,st.hypsecant,st.invgamma,st.invgauss,
st.invweibull,st.johnsonsb,st.johnsonsu,st.ksone,st.kstwobign,st.laplace,st.levy,st.levy_l,st.levy_stable,
st.logistic,st.loggamma,st.loglaplace,st.lognorm,st.lomax,st.maxwell,st.mielke,st.nakagami,st.ncx2,st.ncf,
st.nct,st.norm,st.pareto,st.pearson3,st.powerlaw,st.powerlognorm,st.powernorm,st.rdist,st.reciprocal,
st.rayleigh,st.rice,st.recipinvgauss,st.semicircular,st.t,st.triang,st.truncexpon,st.truncnorm,st.tukeylambda,
st.uniform,st.vonmises,st.vonmises_line,st.wald,st.weibull_min,st.weibull_max,st.wrapcauchy
]
# Best holders
best_distribution = st.norm
best_params = (0.0, 1.0)
best_sse = np.inf
# Estimate distribution parameters from data
for distribution in DISTRIBUTIONS:
# Try to fit the distribution
try:
# Ignore warnings from data that can't be fit
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
# fit dist to data
params = distribution.fit(data)
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Calculate fitted PDF and error with fit in distribution
pdf = distribution.pdf(x, loc=loc, scale=scale, *arg)
sse = np.sum(np.power(y - pdf, 2.0))
# if axis pass in add to plot
try:
if ax:
pd.Series(pdf, x).plot(ax=ax)
end
except Exception:
pass
# identify if this distribution is better
if best_sse > sse > 0:
best_distribution = distribution
best_params = params
best_sse = sse
except Exception:
pass
return (best_distribution.name, best_params)
def make_pdf(dist, params, size=10000):
"""Generate distributions's Probability Distribution Function """
# Separate parts of parameters
arg = params[:-2]
loc = params[-2]
scale = params[-1]
# Get sane start and end points of distribution
start = dist.ppf(0.01, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.01, loc=loc, scale=scale)
end = dist.ppf(0.99, *arg, loc=loc, scale=scale) if arg else dist.ppf(0.99, loc=loc, scale=scale)
# Build PDF and turn into pandas Series
x = np.linspace(start, end, size)
y = dist.pdf(x, loc=loc, scale=scale, *arg)
pdf = pd.Series(y, x)
return pdf
# Plot for comparison takes time
plt.figure(figsize=(15,10))
#ax = data.plot(kind='hist', bins=50, normed=True, alpha=0.5, color=plt.rcParams['axes.color_cycle'][1])
ax = df.minmax_norm_ratings.hist(
bins=20,
alpha=0.5,
density=True,
color='cornflowerblue',
edgecolor = 'white')
# Save plot limits
dataYLim = ax.get_ylim()
# Find best fit distribution
best_fit_name, best_fit_params = best_fit_distribution(df.minmax_norm_ratings, 200, ax)
best_dist = getattr(st, best_fit_name)
# Update plots
ax.set_ylim(dataYLim)
ax.set_title(u'Minmax norm rating')
ax.set_xlabel(u'Frequency')
ax.set_ylabel('Frequency')
# runs fast
plt.figure(figsize=(14,8))
ax = pdf.plot(lw=2, label='PDF', legend=True)
df.minmax_norm_ratings.plot(kind='hist',
bins=50,
density=True,
alpha=0.5,
label='Data',
color='cornflowerblue',
legend=True,
ax=ax)
param_names = (best_dist.shapes + ', loc, scale').split(', ') if best_dist.shapes else ['loc', 'scale']
param_str = ', '.join(['{}={:0.2f}'.format(k,v) for k,v in zip(param_names, best_fit_params)])
dist_str = '{}({})'.format(best_fit_name, param_str)
ax.set_title(u'minmax_norm with best fit distribution \n' + dist_str)
ax.set_xlabel(u'norm_ratings')
ax.set_ylabel('Frequency')
########## 8
###
fig, ax = plt.subplots(figsize=(17,8))
plt.hist(df.awards_count,
bins = np.arange(0,30,1), ## change for a better bin scale
color='cornflowerblue',
edgecolor = "white")
plt.xticks(np.arange(1,30,1),fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xlabel("mean_norm awards_count", fontsize=20,labelpad=20)
plt.ylabel("frequency", fontsize=20,labelpad=20)
plt.title('awards_count', fontsize=28,y=1.2)
plt.grid(True,linestyle='dashed')
plt.xlim(1,30)
plt.show()
fig, ax = plt.subplots(figsize=(17,8))
aggregate = df.groupby('original_publish_year')['awards_count'].agg('max','mean')
plt.hist(aggregate,
bins = np.arange(0,30,1), ## change for a better bin scale
color=['cornflowerblue'],
edgecolor = "white")
plt.xticks(fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xticks(np.arange(1,30,1),fontsize=14, rotation=45)
plt.xlabel("mean_norm awards_count", fontsize=20,labelpad=20)
plt.ylabel("awards", fontsize=20,labelpad=20)
plt.title('Aggregation plot for awards', fontsize=28,y=1.2)
plt.grid(True,linestyle='dashed')
plt.xlim(1,30,1)
plt.show()
fig, ax = plt.subplots(figsize=(10,8))
plt.boxplot(df['awards_count'])
plt.xticks(fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xticks()
plt.ylabel("awards", fontsize=20,labelpad=20)
plt.title('Awards distribution', fontsize=28,y=1.2)
plt.grid(True,linestyle='dashed')
ax.set_xticks([])
plt.show()
## 9
year_minmax=df.groupby("original_publish_year")['minmax_norm_ratings'].mean().round(decimals=2)
fig, ax = plt.subplots(figsize=(17,8))
plt.plot(year_minmax,color='cornflowerblue')
plt.xticks(fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xticks(np.arange(1900,2001,10),fontsize=14, rotation=45)
plt.xlabel("year", fontsize=20,labelpad=20)
plt.ylabel("aminmax_norm_ratings", fontsize=20,labelpad=20)
plt.title('Average Ratings by Year', fontsize=28,y=1.2)
plt.grid(True,linestyle='dashed')
plt.xlim(1900,2000)
plt.show()
##10
fig, ax = plt.subplots(figsize=(17,8))
plt.scatter(df['original_publish_year'],df['minmax_norm_ratings'],
label = 'books',
color = 'lightpink',
edgecolor = 'darkviolet')
plt.xticks(fontsize=14, rotation=45)
plt.yticks(fontsize=14)
plt.xticks(np.arange(1900,2001,10),fontsize=14, rotation=45)
plt.xlabel("year", fontsize=20,labelpad=20)
plt.ylabel("aminmax_norm_ratings", fontsize=20,labelpad=20)
plt.title('Average Ratings by Year', fontsize=28,y=1.2)
plt.grid(True,linestyle='dashed')
plt.xlim(1900,2000)
plt.show()
df_all = df_all_input
count_awards = len(df) #allwith awards
count_all = len(df_all) # get all
#Series all
series_count_all = df_all['series'].value_counts()
count_have_series_all = series_count_all[True]
count_no_series_all = series_count_all[False]
prob_series_all=count_have_series_all/count_all
prob_series_perc_all=round((count_have_series_all/count_all)*100,2)
print(f'Probabilty of having a series is in all : {prob_series_perc_all} %')
#Series in award
series_count = df['series'].value_counts()
count_have_series = series_count[True]
count_no_series = series_count[False]
prob_series=count_have_series/count_awards
prob_series_perc=round((count_have_series/count_awards)*100,2)
print(f'Probabilty of having a series is : {prob_series_perc} %')
#Awards
prob_awards=count_all/1100
prob_awards_perc=round((count_awards/1100)*100,2)
print(f'Probabilty of having a awards is : {prob_awards_perc} %')
##
prob=round(prob_awards_perc*prob_series_perc/prob_series_perc_all,2)
print(f'probability that a book that is part of a series has won an award is: {prob} %') | 36.494331 | 122 | 0.626942 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,815 | 0.237045 |
48d7564242b45a65bf822b185e1203ecbd2093a0 | 773 | py | Python | 3 assignment/number_of_digits_unitest.py | nastae/programavimas_python | 7e65ad834c5f52e146fb5fcd0408b344545dc30e | [
"Apache-2.0"
]
| null | null | null | 3 assignment/number_of_digits_unitest.py | nastae/programavimas_python | 7e65ad834c5f52e146fb5fcd0408b344545dc30e | [
"Apache-2.0"
]
| null | null | null | 3 assignment/number_of_digits_unitest.py | nastae/programavimas_python | 7e65ad834c5f52e146fb5fcd0408b344545dc30e | [
"Apache-2.0"
]
| null | null | null | import unittest
def number_of_digits(s):
return sum(c.isdigit() for c in s)
# Parašykite funkcijai X unittest'us
class Test(unittest.TestCase):
def test_only_digits(self):
s = "123456789"
self.assertEqual(number_of_digits(s), 9)
def test_only_letters(self):
s = "abcdef"
self.assertEqual(number_of_digits(s), 0)
def test_digits_between_letters(self):
s = "asd123asd123asd"
self.assertEqual(number_of_digits(s), 6)
def test_letters_between_digits(self):
s = "123asd123asd123"
self.assertEqual(number_of_digits(s), 9)
def test_neither_letter_or_digit(self):
s = ",./;';'[]`"
self.assertEqual(number_of_digits(s), 0)
if __name__ == '__main__':
unittest.main()
| 24.935484 | 48 | 0.65718 | 605 | 0.781654 | 0 | 0 | 0 | 0 | 0 | 0 | 112 | 0.144703 |
48d79b6a3679e4354a437a7315a9dd9bd23f2c50 | 3,971 | py | Python | scraper/edx.py | thanasis457/Mooc-platform | 5ff3b7b43fadc86ec5d4d54db6963449a6610bb5 | [
"MIT"
]
| 4 | 2020-08-30T12:18:27.000Z | 2021-05-19T06:42:13.000Z | scraper/edx.py | thanasis457/Mooc-platform | 5ff3b7b43fadc86ec5d4d54db6963449a6610bb5 | [
"MIT"
]
| 1 | 2021-01-28T20:21:48.000Z | 2021-01-28T20:21:48.000Z | scraper/edx.py | thanasis457/Mooc-platform | 5ff3b7b43fadc86ec5d4d54db6963449a6610bb5 | [
"MIT"
]
| 1 | 2020-09-14T13:20:05.000Z | 2020-09-14T13:20:05.000Z | import requests, json, bs4, urllib.parse, math
from . import Course, Platform
class Edx(Platform):
name = 'edX'
def _urls(self):
res = requests.get(make_url())
count = json.loads(res.text)['objects']['count']
num_pages = math.ceil(count / 20)
urls = [make_url(page=page) for page in range(1, num_pages + 1)]
return urls
def _parse(self, url):
res = requests.get(url)
courses = []
results = res.json()['objects']['results']
for result in results:
title = result['title']
if result['full_description']:
description = html_to_text(result['full_description'])
else:
description = result['short_description']
snippet = ''
if result['short_description'] and result['short_description'] != '.':
snippet = result['short_description']
url = result['marketing_url']
tags = [subject_uuids.get(uuid) for uuid in result['subject_uuids']]
partners = [result.get('org')]
course = Course(title, partners, self.name,
description, tags, url, snippet=snippet)
courses.append(course)
return courses
subject_uuids = {'d8244ef2-45fb-4be3-a9d7-a6749cee3b19': 'Architecture',
'2cc66121-0c07-407b-96c4-99305359a36f': 'Art & Culture',
'9d5b5edb-254a-4d54-b430-776f1f00eaf0': 'Biology & Life Sciences',
'409d43f7-ff36-4834-9c28-252132347d87': 'Business & Management',
'c5ec1f86-4e59-4273-8e22-ceec2b8d10a2': 'Chemistry',
'605bb663-a342-4cf3-b5a5-fee2f33f1642': 'Communication',
'e52e2134-a4e4-4fcb-805f-cbef40812580': 'Computer Science',
'a168a80a-4b6c-4d92-9f1d-4c235206feaf': 'Data Analysis & Statistics',
'34173fb0-fe3d-4715-b4e0-02a9426a873c': 'Design',
'bab458d9-19b3-476e-864f-8abd1d1aab44': 'Economics & Finance',
'8ac7a3da-a60b-4565-b361-384baaa49279': 'Education & Teacher Training',
'337dfb23-571e-49d7-9c8e-385120dea6f3': 'Electronics',
'07406bfc-76c4-46cc-a5bf-2deace7995a6': 'Energy & Earth Sciences',
'0d7bb9ed-4492-419a-bb44-415adafd9406': 'Engineering',
'8aaac548-1930-4614-aeb4-a089dae7ae26': 'Environmental Studies',
'8a552a20-963e-475c-9b0d-4c5efe22d015': 'Ethics',
'caa4db79-f325-41ca-8e09-d5bb6e148240': 'Food & Nutrition',
'51a13a1c-7fc8-42a6-9e96-6636d10056e2': 'Health & Safety',
'c8579e1c-99f2-4a95-988c-3542909f055e': 'Histroy',
'00e5d5e0-ce45-4114-84a1-50a5be706da5': 'Humanities',
'32768203-e738-4627-8b04-78b0ed2b44cb': 'Language',
'4925b67d-01c4-4287-a8d1-a3e0066113b8': 'Law',
'74b6ed2a-3ba0-49be-adc9-53f7256a12e1': 'Literature',
'a669e004-cbc0-4b68-8882-234c12e1cce4': 'Math',
'a5db73b2-05b4-4284-beef-c7876ec1499b': 'Medicine',
'f520dcc1-f5b7-42fe-a757-8acfb1e9e79d': 'Music',
'830f46dc-624e-46f4-9df0-e2bc6b346956': 'Philosophy & Ethics',
'88eb7ca7-2296-457d-8aac-e5f7503a9333': 'Physics',
'f830cfeb-bb7e-46ed-859d-e2a9f136499f': 'Science',
'eefb009b-0a02-49e9-b1b1-249982b6ce86': 'Social Sciences'}
def make_url(page=1):
params = {'selected_facets[]': 'transcript_languages_exact:English',
'partner': 'edx',
'content_type[]': 'courserun',
'page': page,
'page_size': 20}
return 'https://www.edx.org/api/v1/catalog/search?' + urllib.parse.urlencode(params)
def html_to_text(html):
soup = bs4.BeautifulSoup(html, 'lxml')
return soup.text
| 44.617978 | 88 | 0.576681 | 1,257 | 0.316545 | 0 | 0 | 0 | 0 | 0 | 0 | 1,953 | 0.491816 |
48d950cb515fdc01c87e2cf97d07a2e9d9b96b55 | 8,409 | py | Python | main.py | LaudateCorpus1/TotalConnect2.0_API-Arm-Disarm | 96885410defa036b37b5f6ae86b322de89c850ae | [
"MIT"
]
| 1 | 2017-03-06T03:44:40.000Z | 2017-03-06T03:44:40.000Z | main.py | LaudateCorpus1/TotalConnect2.0_API-Arm-Disarm | 96885410defa036b37b5f6ae86b322de89c850ae | [
"MIT"
]
| null | null | null | main.py | LaudateCorpus1/TotalConnect2.0_API-Arm-Disarm | 96885410defa036b37b5f6ae86b322de89c850ae | [
"MIT"
]
| 2 | 2020-01-20T12:57:55.000Z | 2022-02-08T07:03:58.000Z | #!/usr/local/bin/python2.7
#FREEBSD 2 Minutes ARP Expires - /bin/echo "net.link.ether.inet.max_age 300" >> /etc/sysctl.conf
#Crontab -e "* * * * * /usr/local/bin/python2.7 /root/Security.py"
import subprocess
import ConfigParser
import string, os, sys, httplib
import xml.etree.ElementTree as ET
from datetime import datetime, time
now = datetime.now()
now_time = now.time()
#---- BOL FOR CONFIGURTION INI ----#
# Documentation: https://wiki.python.org/moin/ConfigParserExamples #
Config = ConfigParser.ConfigParser()
Config.read("Security.ini")
cfgfile = open("Security.ini")
def BoolConfigSectionMap(section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.getboolean(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
def ConfigSectionMap(section):
dict1 = {}
options = Config.options(section)
for option in options:
try:
dict1[option] = Config.get(section, option)
if dict1[option] == -1:
DebugPrint("skip: %s" % option)
except:
print("exception on %s!" % option)
dict1[option] = None
return dict1
state = BoolConfigSectionMap("Status")['armed']
#---- EOL FOR CONFIGURTION INI ----#
device1 = '00:00:00:00:00:00'
device2 = '00:00:00:00:00:00'
device3 = '00:00:00:00:00:00'
#---- BOL for LOG Output ---- #
Log = open('SecurityAuditlog.txt', 'w')
print >> Log, "---------",now_time,"---------"
#---- BOL API Section ----#
def TC2_SOAPSessionID():
global sessionHash
server_addr = "rs.alarmnet.com"
service_action = "/TC21API/TC2.asmx"
username = ConfigSectionMap("Authentication")['username']
password = ConfigSectionMap("Authentication")['password']
body = """
<soapenv:Envelope xmlns:soapenv="http://schemas.xmlsoap.org/soap/envelope/" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"><soapenv:Header/><soapenv:Body><tns:AuthenticateUserLoginEx xmlns:tns="https://services.alarmnet.com/TC2/"><tns:userName>%s</tns:userName>"""
body1 = """<tns:password>%s</tns:password><tns:ApplicationID>14588</tns:ApplicationID><tns:ApplicationVersion>3.14.2</tns:ApplicationVersion><tns:LocaleCode></tns:LocaleCode></tns:AuthenticateUserLoginEx></soapenv:Body></soapenv:Envelope>"""
request = httplib.HTTPSConnection(server_addr)
request.putrequest("POST", service_action)
request.putheader("Accept", "application/soap+xml, application/dime, multipart/related, text/*")
request.putheader("Content-Type", "text/xml; charset=utf-8")
request.putheader("Cache-Control", "no-cache")
request.putheader("Pragma", "no-cache")
request.putheader("SOAPAction","https://services.alarmnet.com/TC2/AuthenticateUserLoginEx")
request.putheader("Content-Length", str(len(body % username + body1 % password)))
request.endheaders()
request.send(body % username + body1 % password)
response = request.getresponse().read()
tree = ET.fromstring(response)
sessionHash = tree.find('.//{https://services.alarmnet.com/TC2/}SessionID').text
return
def TC2_DisarmSecuritySystem():
TC2_SOAPSessionID()
server_addr = "rs.alarmnet.com"
service_action = "/TC21API/TC2.asmx"
body = ("""<SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/" xmlns:s="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<SOAP-ENV:Body>
<tns:DisarmSecuritySystem xmlns:tns="https://services.alarmnet.com/TC2/">
<tns:SessionID>%s</tns:SessionID>
<tns:LocationID>0</tns:LocationID>
<tns:DeviceID>0</tns:DeviceID>
<tns:UserCode>-1</tns:UserCode>
</tns:DisarmSecuritySystem>
</SOAP-ENV:Body>
</SOAP-ENV:Envelope>""")
request = httplib.HTTPSConnection(server_addr)
request.putrequest("POST", service_action)
request.putheader("Accept", "application/soap+xml, application/dime, multipart/related, text/*")
request.putheader("Content-Type", "text/xml; charset=utf-8")
request.putheader("Cache-Control", "no-cache")
request.putheader("Pragma", "no-cache")
request.putheader("SOAPAction","https://services.alarmnet.com/TC2/DisarmSecuritySystem")
request.putheader("Content-Length", str(len(body % sessionHash)))
request.endheaders()
request.send(body % sessionHash)
response = request.getresponse().read()
tree = ET.fromstring(response)
print >> Log, "API:", tree.find('.//{https://services.alarmnet.com/TC2/}ResultData').text
return
def TC2_ArmSecuritySystem(armInt):
TC2_SOAPSessionID()
server_addr = "rs.alarmnet.com"
service_action = "/TC21API/TC2.asmx"
body = ("""<SOAP-ENV:Envelope xmlns:SOAP-ENV="http://schemas.xmlsoap.org/soap/envelope/" xmlns:s="http://www.w3.org/2001/XMLSchema" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance">
<SOAP-ENV:Body>
<tns:ArmSecuritySystem xmlns:tns="https://services.alarmnet.com/TC2/">
<tns:SessionID>%s</tns:SessionID>
<tns:LocationID>0</tns:LocationID>
<tns:DeviceID>0</tns:DeviceID>""")
body1 = ("""<tns:ArmType>%s</tns:ArmType>
<tns:UserCode>-1</tns:UserCode>
</tns:ArmSecuritySystem>
</SOAP-ENV:Body>
</SOAP-ENV:Envelope>""")
request = httplib.HTTPSConnection(server_addr)
request.putrequest("POST", service_action)
request.putheader("Accept", "application/soap+xml, application/dime, multipart/related, text/*")
request.putheader("Content-Type", "text/xml; charset=utf-8")
request.putheader("Cache-Control", "no-cache")
request.putheader("Pragma", "no-cache")
request.putheader("SOAPAction","https://services.alarmnet.com/TC2/ArmSecuritySystem")
request.putheader("Content-Length", str(len(body % sessionHash + body1 % armInt)))
request.endheaders()
request.send(body % sessionHash + body1 % armInt)
response = request.getresponse().read()
tree = ET.fromstring(response)
print >> Log, "API:", tree.find('.//{https://services.alarmnet.com/TC2/}ResultData').text
return
#---- EOL API Section ----#
def countPeople():
global peopleTotal
peopleTotal=0
cmd = subprocess.Popen('/usr/sbin/arp -a -i re0_vlan4', shell=True, stdout=subprocess.PIPE)
for line in cmd.stdout:
if device1 in line:
peopleTotal += 1
print >> Log, "User1 is present",peopleTotal
if device2 in line:
peopleTotal += 1
print >> Log, "User2 is present",peopleTotal
if device3 in line:
peopleTotal += 1
print >> Log, "User3 is present",peopleTotal
# cfgfile = open("Security.ini",'w')
# Config.set('Status','armed', True)
# Config.write(cfgfile)
# cfgfile.close()
return
# ---- BOL Program Initiation and function mapping ----#
def runcheck():
countPeople()
print state, peopleTotal
#Check ENV with if Statement to see if the "Armed" boolean is true or false
if now_time >= time(23,59) or now_time <= time(5,00):
if state == False and peopleTotal >0:
cfgfile = open("Security.ini",'w')
Config.set('Status','armed', True)
Config.write(cfgfile)
cfgfile.close()
TC2_ArmSecuritySystem(1)
print >> Log, "arming - It's now between 11:59AM and 5:30AM"
else:
if state is True and peopleTotal >0:
print >> Log, "disarming - more then 0"
TC2_DisarmSecuritySystem()
cfgfile = open("Security.ini",'w')
Config.set('Status','armed', False)
Config.write(cfgfile)
cfgfile.close()
print "Disarming", state
else:
if state is False and peopleTotal <=0:
print >> Log, "arming away - less then 1"
TC2_ArmSecuritySystem(0)
cfgfile = open("Security.ini",'w')
Config.set('Status','armed', True)
Config.write(cfgfile)
cfgfile.close()
print "Arming Away", state
return
runcheck()
# ---- EOL Program Initiation and function mapping ----#
#---- Logging ---- #
print >> Log, "- Armed",state,"-",peopleTotal,"DEVICES PRESENT","-"
Log.close()
#---- EOL for LOG Output ---- #
| 39.665094 | 275 | 0.646093 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,945 | 0.46914 |
48d989d7c7b86f58f750e3be1818f6a34de5e9dd | 1,538 | py | Python | prm/relations/migrations/0002_activity.py | justaname94/innovathon2019 | d1a4e9b1b877ba12ab23384b9ee098fcdbf363af | [
"MIT"
]
| null | null | null | prm/relations/migrations/0002_activity.py | justaname94/innovathon2019 | d1a4e9b1b877ba12ab23384b9ee098fcdbf363af | [
"MIT"
]
| 4 | 2021-06-08T20:20:05.000Z | 2022-03-11T23:58:37.000Z | prm/relations/migrations/0002_activity.py | justaname94/personal_crm | d1a4e9b1b877ba12ab23384b9ee098fcdbf363af | [
"MIT"
]
| null | null | null | # Generated by Django 2.2.5 on 2019-09-09 21:21
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('relations', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Activity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(auto_now_add=True, help_text='Datetime on which the object was created.', verbose_name='created at ')),
('modified', models.DateTimeField(auto_now=True, help_text='Datetime on which the object was last modified.', verbose_name='modified at ')),
('name', models.CharField(max_length=50)),
('description', models.TextField()),
('is_active', models.BooleanField(default=True, help_text='Are you currently actively doing it?', verbose_name='Is active')),
('last_time', models.DateField(blank=True, null=True, verbose_name='Last time done')),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ['-created', '-modified'],
'get_latest_by': 'created',
'abstract': False,
},
),
]
| 43.942857 | 156 | 0.617035 | 1,379 | 0.896619 | 0 | 0 | 0 | 0 | 0 | 0 | 406 | 0.263979 |
48da319d7adab06ea080a4bfe703b82db0fccf2a | 1,106 | py | Python | elliptic_moab/Mesh/MeshQL.py | padmec-reservoir/elliptic_moab | a3b15f29a621c35a8279fd163326a0895aa67f30 | [
"MIT"
]
| null | null | null | elliptic_moab/Mesh/MeshQL.py | padmec-reservoir/elliptic_moab | a3b15f29a621c35a8279fd163326a0895aa67f30 | [
"MIT"
]
| null | null | null | elliptic_moab/Mesh/MeshQL.py | padmec-reservoir/elliptic_moab | a3b15f29a621c35a8279fd163326a0895aa67f30 | [
"MIT"
]
| null | null | null | from typing import Type
from elliptic.Kernel.Context import ContextDelegate
from .Selector import SelectorImplementation
from .Manager import ManagerImplementation
from .Computer import ComputerImplementation
class MeshQLImplementation(ComputerImplementation, ManagerImplementation, SelectorImplementation):
def base_delegate(self) -> Type[ContextDelegate]:
class BaseDelegate(ContextDelegate):
def get_template_file(self):
return 'base.pyx.etp'
def template_kwargs(self):
return {'declare_entityhandles': self.context.context['declare_entityhandle'],
'declare_ranges': self.context.context['declare_range'],
'declare_indexes': self.context.context['declare_index'],
'declare_variables': self.context.context['declare_variable'],
'declare_tags': set(self.context.context['declare_tags'])}
def context_enter(self):
pass
def context_exit(self):
pass
return BaseDelegate
| 34.5625 | 98 | 0.654611 | 892 | 0.80651 | 0 | 0 | 0 | 0 | 0 | 0 | 187 | 0.169078 |
48da48030860d7cf05ae6d06f45e092b1b0c01b7 | 1,229 | py | Python | tests/test_quil.py | stjordanis/quantumflow | bf965f0ca70cd69b387f9ca8407ab38da955e925 | [
"Apache-2.0"
]
| 99 | 2018-12-03T20:41:39.000Z | 2022-02-21T13:56:08.000Z | tests/test_quil.py | stjordanis/quantumflow | bf965f0ca70cd69b387f9ca8407ab38da955e925 | [
"Apache-2.0"
]
| 1 | 2021-06-25T15:18:31.000Z | 2021-06-25T15:18:31.000Z | tests/test_quil.py | stjordanis/quantumflow | bf965f0ca70cd69b387f9ca8407ab38da955e925 | [
"Apache-2.0"
]
| 24 | 2018-12-03T20:41:41.000Z | 2022-01-03T01:11:45.000Z |
# Copyright 2016-2018, Rigetti Computing
#
# This source code is licensed under the Apache License, Version 2.0 found in
# the LICENSE.txt file in the root directory of this source tree.
import pytest
import quantumflow as qf
QUIL_FILES = [
'hello_world.quil',
'empty.quil',
'classical_logic.quil',
'control_flow.quil',
'measure.quil',
'qaoa.quil',
'bell.quil',
# 'include.quil',
]
RUNNABLE_QUIL_FILES = QUIL_FILES[:-1]
def test_parse_quilfile():
print()
for quilfile in QUIL_FILES:
filename = 'tests/quil/'+quilfile
print("<<<"+filename+">>>")
with open(filename, 'r') as f:
quil = f.read()
qf.forest.quil_to_program(quil)
def test_run_quilfile():
print()
for quilfile in RUNNABLE_QUIL_FILES:
filename = 'tests/quil/'+quilfile
print("<<<"+filename+">>>")
with open(filename, 'r') as f:
quil = f.read()
prog = qf.forest.quil_to_program(quil)
prog.run()
def test_unparsable():
with pytest.raises(RuntimeError):
filename = 'tests/quil/unparsable.quil'
with open(filename, 'r') as f:
quil = f.read()
qf.forest.quil_to_program(quil)
| 23.188679 | 77 | 0.613507 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 390 | 0.317331 |
48daec9dcfb1b92e90a94069bc6dece79afb65a2 | 1,254 | py | Python | gitool/util.py | eikendev/gitool | 9bfa248093d4ee3caf25fde1a59f4f0fc66994af | [
"MIT"
]
| 1 | 2022-03-17T06:26:20.000Z | 2022-03-17T06:26:20.000Z | gitool/util.py | eikendev/gitool | 9bfa248093d4ee3caf25fde1a59f4f0fc66994af | [
"MIT"
]
| null | null | null | gitool/util.py | eikendev/gitool | 9bfa248093d4ee3caf25fde1a59f4f0fc66994af | [
"MIT"
]
| null | null | null | import itertools
import logging
from git import Repo, InvalidGitRepositoryError
from .repository import Repository
logger = logging.getLogger("gitool")
def _list_repositories(path):
subdirectories = [p for p in path.iterdir() if p.is_dir()]
names = [p.name for p in subdirectories]
if '.git' not in names:
roots = [_list_repositories(p) for p in subdirectories]
roots = list(itertools.chain.from_iterable(roots))
else:
msg = "Discovered repository at '{}'."
logger.debug(msg.format(path))
roots = [path]
return roots
def get_repositories(path):
paths = _list_repositories(path)
repositories = list()
for p in paths:
try:
repo = Repo(str(p))
except InvalidGitRepositoryError:
msg = "'{}' is not a git repository."
logger.warning(msg.format(p))
continue
relative = p.relative_to(path)
repository = Repository(relative, repo)
repositories.append(repository)
repositories.sort()
return repositories
def list_properties(properties) -> str:
if len(properties) > 1:
return ', '.join(properties[:-1]) + ' and ' + properties[-1]
else:
return properties[0]
| 24.115385 | 68 | 0.633174 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 88 | 0.070175 |
48dbc22d623e96499bba5ef1f32d58521697a022 | 3,571 | py | Python | taiga/projects/epics/serializers.py | threefoldtech/Threefold-Circles | cbc433796b25cf7af9a295af65d665a4a279e2d6 | [
"Apache-2.0"
]
| null | null | null | taiga/projects/epics/serializers.py | threefoldtech/Threefold-Circles | cbc433796b25cf7af9a295af65d665a4a279e2d6 | [
"Apache-2.0"
]
| 12 | 2019-11-25T14:08:32.000Z | 2021-06-24T10:35:51.000Z | taiga/projects/epics/serializers.py | threefoldtech/Threefold-Circles | cbc433796b25cf7af9a295af65d665a4a279e2d6 | [
"Apache-2.0"
]
| null | null | null | # -*- coding: utf-8 -*-
# Copyright (C) 2014-2017 Andrey Antukh <[email protected]>
# Copyright (C) 2014-2017 Jesús Espino <[email protected]>
# Copyright (C) 2014-2017 David Barragán <[email protected]>
# Copyright (C) 2014-2017 Alejandro Alonso <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from taiga.base.api import serializers
from taiga.base.fields import Field, MethodField
from taiga.base.neighbors import NeighborsSerializerMixin
from taiga.mdrender.service import render as mdrender
from taiga.projects.attachments.serializers import BasicAttachmentsInfoSerializerMixin
from taiga.projects.mixins.serializers import OwnerExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import ProjectExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import AssignedToExtraInfoSerializerMixin
from taiga.projects.mixins.serializers import StatusExtraInfoSerializerMixin
from taiga.projects.notifications.mixins import WatchedResourceSerializer
from taiga.projects.tagging.serializers import TaggedInProjectResourceSerializer
from taiga.projects.votes.mixins.serializers import VoteResourceSerializerMixin
class EpicListSerializer(VoteResourceSerializerMixin, WatchedResourceSerializer,
OwnerExtraInfoSerializerMixin, AssignedToExtraInfoSerializerMixin,
StatusExtraInfoSerializerMixin, ProjectExtraInfoSerializerMixin,
BasicAttachmentsInfoSerializerMixin,
TaggedInProjectResourceSerializer, serializers.LightSerializer):
id = Field()
ref = Field()
project = Field(attr="project_id")
created_date = Field()
modified_date = Field()
subject = Field()
color = Field()
epics_order = Field()
client_requirement = Field()
team_requirement = Field()
version = Field()
watchers = Field()
is_blocked = Field()
blocked_note = Field()
is_closed = MethodField()
user_stories_counts = MethodField()
def get_is_closed(self, obj):
return obj.status is not None and obj.status.is_closed
def get_user_stories_counts(self, obj):
assert hasattr(obj, "user_stories_counts"), "instance must have a user_stories_counts attribute"
return obj.user_stories_counts
class EpicSerializer(EpicListSerializer):
comment = MethodField()
blocked_note_html = MethodField()
description = Field()
description_html = MethodField()
def get_comment(self, obj):
return ""
def get_blocked_note_html(self, obj):
return mdrender(obj.project, obj.blocked_note)
def get_description_html(self, obj):
return mdrender(obj.project, obj.description)
class EpicNeighborsSerializer(NeighborsSerializerMixin, EpicSerializer):
pass
class EpicRelatedUserStorySerializer(serializers.LightSerializer):
epic = Field(attr="epic_id")
user_story = Field(attr="user_story_id")
order = Field()
| 40.123596 | 104 | 0.758051 | 1,790 | 0.50098 | 0 | 0 | 0 | 0 | 0 | 0 | 1,029 | 0.287993 |
48dbc9d4daecd2cf1d72d63509bbaa3a2bffe8c4 | 2,178 | py | Python | src/TMDbApi/TMTranslationUnit.py | shasha79/nectm | 600044a6fe2c3a73e0d9327bc85883831a26dcae | [
"Apache-2.0"
]
| 3 | 2020-02-28T21:42:44.000Z | 2021-03-12T13:56:16.000Z | src/TMDbApi/TMTranslationUnit.py | Pangeamt/nectm | 6b84f048698f2530b9fdbb30695f2e2217c3fbfe | [
"Apache-2.0"
]
| 2 | 2020-11-06T14:40:10.000Z | 2020-12-29T19:03:11.000Z | src/TMDbApi/TMTranslationUnit.py | Pangeamt/nectm | 6b84f048698f2530b9fdbb30695f2e2217c3fbfe | [
"Apache-2.0"
]
| 2 | 2020-03-26T16:05:11.000Z | 2020-08-06T16:35:39.000Z | #
# Copyright (c) 2020 Pangeanic SL.
#
# This file is part of NEC TM
# (see https://github.com/shasha79/nectm).
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import uuid
class TMTranslationUnit:
attributes = ['source_text', 'target_text',
'source_id', 'target_id',
'source_language', 'target_language',
'source_metadata', 'target_metadata', 'metadata',
'source_pos', 'target_pos',
'tuid', 'dirty_score', 'username',
'industry', 'type', 'file_name', 'domain', 'organization',
'tm_creation_date', 'tm_change_date',
'insert_date', 'update_date', 'check_date', 'check_version']
def __init__(self, sdict={}):
self.reset(sdict)
def reset(self, sdict):
# Initialize segment fields
for attr in self.attributes:
val = None if not attr in sdict else sdict[attr]
setattr(self, attr, val)
# allocate ids
self._allocate_id('source')
self._allocate_id('target')
def _allocate_id(self, type):
text = getattr(self, type + '_text')
if text:
setattr(self, type + '_id', uuid.uuid5(uuid.NAMESPACE_URL, text))
def to_dict(self):
return dict([(a, getattr(self, a)) for a in self.attributes])
def to_dict_short(self):
return dict([(a, getattr(self, a)) for a in ['source_text', 'target_text', 'source_metadata', 'target_metadata'] if getattr(self, a)])
| 36.915254 | 138 | 0.674472 | 1,262 | 0.579431 | 0 | 0 | 0 | 0 | 0 | 0 | 1,322 | 0.606979 |
48dcecd475c9d9c66ff47a1b76abf99c791428f8 | 805 | py | Python | tests/test_268.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
]
| null | null | null | tests/test_268.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
]
| null | null | null | tests/test_268.py | sungho-joo/leetcode2github | ce7730ef40f6051df23681dd3c0e1e657abba620 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
import pytest
"""
Test 268. Missing Number
"""
@pytest.fixture(scope="session")
def init_variables_268():
from src.leetcode_268_missing_number import Solution
solution = Solution()
def _init_variables_268():
return solution
yield _init_variables_268
class TestClass268:
def test_solution_0(self, init_variables_268):
assert init_variables_268().missingNumber([3, 0, 1]) == 2
def test_solution_1(self, init_variables_268):
assert init_variables_268().missingNumber([0, 1]) == 2
def test_solution_2(self, init_variables_268):
assert init_variables_268().missingNumber([9, 6, 4, 2, 3, 5, 7, 0, 1]) == 8
def test_solution_3(self, init_variables_268):
assert init_variables_268().missingNumber([0]) == 1
| 23.676471 | 83 | 0.696894 | 499 | 0.619876 | 196 | 0.243478 | 229 | 0.284472 | 0 | 0 | 62 | 0.077019 |
48de82f88d77ad42fe5f179efaac8655f74f00d7 | 5,682 | py | Python | tests/db/test_connector.py | DaWeSearch/backend | 809e575ed730fce55d0e89a2fbc2031ba116f5e0 | [
"MIT"
]
| 1 | 2021-02-15T01:05:22.000Z | 2021-02-15T01:05:22.000Z | tests/db/test_connector.py | DaWeSearch/backend | 809e575ed730fce55d0e89a2fbc2031ba116f5e0 | [
"MIT"
]
| null | null | null | tests/db/test_connector.py | DaWeSearch/backend | 809e575ed730fce55d0e89a2fbc2031ba116f5e0 | [
"MIT"
]
| null | null | null | import unittest
import os
import json
from functions.db.connector import *
from functions.db.models import *
from functions.authentication import *
sample_search = {
"search_groups": [
{
"search_terms": ["blockchain", "distributed ledger"],
"match": "OR"
},
{
"search_terms": ["energy", "infrastructure", "smart meter"],
"match": "OR"
}
],
"match": "AND"
}
db_dict = {"db_name": "hallo", "api_key": "test"}
class TestConnector(unittest.TestCase):
def setUp(self):
name = "test_review"
self.review = add_review(name)
self.sample_query = new_query(self.review, sample_search)
with open('test_results.json', 'r') as file:
self.results = json.load(file)
save_results(self.results['records'], self.review, self.sample_query)
def test_add_review(self):
name = "test_review"
new_review = add_review(name)
review = get_review_by_id(new_review._id)
review.delete()
self.assertEqual(review._id, new_review._id)
def test_save_results(self):
query = new_query(self.review, sample_search)
jsonpath = os.path.abspath(os.path.join(
os.path.dirname(__file__), "..", "..", "test_results.json"))
with open(jsonpath, 'r') as file:
results = json.load(file)
save_results(results['records'], self.review, query)
results_from_db = get_persisted_results(query).get('results')
self.assertEqual(len(results_from_db), len(results['records']))
def test_pagination(self):
page1 = get_persisted_results(self.sample_query, 1, 10).get('results')
self.assertTrue(len(page1) == 10)
page2 = get_persisted_results(self.sample_query, 2, 10).get('results')
self.assertTrue(len(page2) == 10)
self.assertNotEqual(page1, page2)
def test_get_list_of_dois_for_review(self):
dois = get_dois_for_review(self.review)
for record in self.results.get('records'):
self.assertTrue(record.get('doi') in dois)
def test_update_score(self):
user = User(name="test user")
doi = self.results.get('records')[0].get('doi')
result = get_result_by_doi(self.review, doi)
self.assertEqual(len(result.scores), 0)
evaluation = {
"user": "testmann",
"score": 2,
"comment": "test_comment"
}
update_score(self.review, result, evaluation)
self.assertEqual(result.scores[0].score, 2)
evaluation = {
"user": "testmann",
"score": 5,
"comment": "joiefjlke"
}
update_score(self.review, result, evaluation)
self.assertEqual(result.scores[0].score, 5)
self.assertEqual(len(result.scores), 1)
user.delete()
def test_delete_results_for_review(self):
num_results = len(get_dois_for_review(self.review))
self.assertGreater(num_results, 0)
delete_results_for_review(self.review)
num_results = len(get_dois_for_review(self.review))
self.assertEquals(num_results, 0)
def tearDown(self):
delete_results_for_review(self.review)
self.review.delete()
class TestUserDB(unittest.TestCase):
# TODO rewrite test cases
def setUp(self):
username = "philosapiens"
name = "Philippe"
surname = "Kalinowski"
email = "[email protected]"
password = "ABC123"
# databases = DatabaseInfo()
# databases.name = "SPRINGER_API"
# databases.api_key = "5150230aac7a227ve33693f99b5697aa"
# self.user = add_user(username, name, surname, email, password)
def test_add_user(self):
username = "philosapfiens"
name = "Philippe"
surname = "Kalinowski"
email = "[email protected]"
password = "ABC123222"
db_name = "SPRINGER_API"
api_key = "5150230aac7a227ve33693f99b5697aa"
# databases312 = DatabaseInfo.from_document(sample_databases)
# print(databases312)
new_user = add_user(username, name, surname, email, password)
# update_databases(new_user, db_dict)
# user = get_user_by_id(new_user.name)
def test_get_user_by_username(self):
user = get_user_by_username("philosapiens")
print(user.email)
def test_update_user(self):
user = get_user_by_username("philosapiens")
print(user.email)
update_user(user, user.name, "btesfd", "[email protected]", user.password)
user = get_user_by_username("philosapiens")
print(user.email)
def test_get_all_users(self):
print(str(get_users()))
def test_delete_users(self):
user = get_user_by_username("philosapiens")
delete_user(user)
class TestAuth(unittest.TestCase):
def setUp(self):
username = "philosapiens"
name = "Philippe"
surname = "Kalinowski"
email = "[email protected]"
password = "ABC123"
def test_login(self):
username = "philosapiens"
password = "ABC123222"
user = get_user_by_username(username)
password_correct = check_if_password_is_correct(user, password)
print(password_correct)
token = get_jwt_for_user(user)
print(type(token))
add_jwt_to_session(user, token)
is_token_valid = check_for_token(token)
print(is_token_valid)
is_token_in_session = check_if_jwt_is_in_session(token)
print(is_token_in_session)
# remove_jwt_from_session(user)
if __name__ == '__main__':
unittest.main()
| 29.28866 | 80 | 0.62566 | 5,123 | 0.901619 | 0 | 0 | 0 | 0 | 0 | 0 | 1,173 | 0.206441 |
48deb6f756807dc27d051aa0715208fc6f52b020 | 1,513 | py | Python | tests/test_capstone.py | GrammaTech/gtirb-capstone | f46d90e9cd733c632620e5d8c921a4b9f011020a | [
"MIT"
]
| 6 | 2020-04-10T15:19:30.000Z | 2021-04-13T22:54:17.000Z | tests/test_capstone.py | GrammaTech/gtirb-capstone | f46d90e9cd733c632620e5d8c921a4b9f011020a | [
"MIT"
]
| null | null | null | tests/test_capstone.py | GrammaTech/gtirb-capstone | f46d90e9cd733c632620e5d8c921a4b9f011020a | [
"MIT"
]
| 3 | 2020-07-10T22:52:32.000Z | 2021-02-13T19:52:22.000Z | # Copyright (C) 2020 GrammaTech, Inc.
#
# This code is licensed under the MIT license. See the LICENSE file in
# the project root for license terms.
#
# This project is sponsored by the Office of Naval Research, One Liberty
# Center, 875 N. Randolph Street, Arlington, VA 22203 under contract #
# N68335-17-C-0700. The content of the information does not necessarily
# reflect the position or policy of the Government and no official
# endorsement should be inferred.
#
import pytest
import gtirb
import gtirb_capstone
@pytest.mark.commit
def test_insert_bytes():
ir = gtirb.IR()
m = gtirb.Module(
name="test",
isa=gtirb.Module.ISA.X64,
byte_order=gtirb.Module.ByteOrder.Little,
)
m.ir = ir
s = gtirb.Section(name=".text")
s.module = m
bi = gtirb.ByteInterval(
contents=b"\x00\x01\x02\x03\x04\x05\x06\x07", address=0x1000
)
bi.section = s
b = gtirb.CodeBlock(offset=2, size=2)
b.byte_interval = bi
b2 = gtirb.DataBlock(offset=6, size=2)
b2.byte_interval = bi
bi.symbolic_expressions[6] = gtirb.SymAddrConst(0, None)
ctx = gtirb_capstone.RewritingContext(ir)
ctx.modify_block_insert(m, b, b"\x08\x09", 1)
assert bi.address == 0x1000
assert bi.size == 10
assert bi.contents == b"\x00\x01\x02\x08\x09\x03\x04\x05\x06\x07"
assert b.offset == 2
assert b.size == 4
assert b2.offset == 8
assert b2.size == 2
assert 6 not in bi.symbolic_expressions
assert 8 in bi.symbolic_expressions
| 30.877551 | 72 | 0.68341 | 0 | 0 | 0 | 0 | 989 | 0.653668 | 0 | 0 | 562 | 0.371447 |
48df4ad454aad4847f1d7ce4f347d3747f7148ed | 2,552 | py | Python | python/paddle/fluid/tests/unittests/npu/test_update_loss_scaling_min_op_npu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
]
| null | null | null | python/paddle/fluid/tests/unittests/npu/test_update_loss_scaling_min_op_npu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
]
| null | null | null | python/paddle/fluid/tests/unittests/npu/test_update_loss_scaling_min_op_npu.py | L-Net-1992/Paddle | 4d0ca02ba56760b456f3d4b42a538555b9b6c307 | [
"Apache-2.0"
]
| 1 | 2021-12-09T08:59:17.000Z | 2021-12-09T08:59:17.000Z | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import numpy as np
import sys
import os
sys.path.append("..")
from op_test import OpTest
import paddle
import paddle.fluid as fluid
import paddle.fluid.contrib.mixed_precision.amp_nn as amp_nn
from test_update_loss_scaling_op_npu import TestUpdateLossScalingOpBad
paddle.enable_static()
SEED = 2021
class TestUpdateLossScalingOpMinLossScalingBad(TestUpdateLossScalingOpBad):
def setUp(self):
self.set_npu()
self.op_type = "update_loss_scaling"
self.place = paddle.NPUPlace(0)
self.init()
fluid.core.globals()['FLAGS_min_loss_scaling'] = 1639
found_inf = np.array([True], dtype=np.bool_)
x = np.random.random((1024, 1024)).astype(self.dtype)
i = np.random.randint(0, 1024, 1)
j = np.random.randint(0, 1024, 1)
x[i[0]][j[0]] = np.inf
self.inputs = {
'X': [('x0', x)],
'FoundInfinite': found_inf,
'PrevLossScaling': self.prev_loss_scaling,
'InGoodSteps': self.num_good_steps,
'InBadSteps': self.num_bad_steps
}
self.outputs = {
'Out': [('out0', np.zeros_like(x))],
'LossScaling': np.array([1639.0]).astype(self.dtype),
'OutGoodSteps': self.zero_steps,
'OutBadSteps': self.zero_steps
}
def init(self):
self.incr_ratio = 2.0
self.decr_ratio = 0.8
self.dtype = np.float32
self.prev_loss_scaling = np.array([2048]).astype(self.dtype)
self.num_good_steps = np.array([999], dtype=np.int32)
self.num_bad_steps = np.array([1], dtype=np.int32)
self.zero_steps = np.array([0], dtype=np.int32)
self.attrs = {
'incr_every_n_steps': 1000,
'decr_every_n_nan_or_inf': 2,
'incr_ratio': self.incr_ratio,
'decr_ratio': self.decr_ratio,
}
if __name__ == '__main__':
unittest.main()
| 32.303797 | 75 | 0.647727 | 1,570 | 0.615204 | 0 | 0 | 0 | 0 | 0 | 0 | 842 | 0.329937 |
48df99695d0c2e85858fd3010b30aa03fd644e15 | 1,031 | py | Python | Examples/WorkingWithMimeMessages/SetEmailHeaders.py | Muzammil-khan/Aspose.Email-Python-Dotnet | 04ca3a6f440339f3ddf316218f92d15d66f24e7e | [
"MIT"
]
| 5 | 2019-01-28T05:17:12.000Z | 2020-04-14T14:31:34.000Z | Examples/WorkingWithMimeMessages/SetEmailHeaders.py | Muzammil-khan/Aspose.Email-Python-Dotnet | 04ca3a6f440339f3ddf316218f92d15d66f24e7e | [
"MIT"
]
| 1 | 2019-01-28T16:07:26.000Z | 2021-11-25T10:59:52.000Z | Examples/WorkingWithMimeMessages/SetEmailHeaders.py | Muzammil-khan/Aspose.Email-Python-Dotnet | 04ca3a6f440339f3ddf316218f92d15d66f24e7e | [
"MIT"
]
| 6 | 2018-07-16T14:57:34.000Z | 2020-08-30T05:59:52.000Z | import aspose.email as ae
import datetime
def run():
# The path to the File directory.
dataDir = "Data/"
#ExStart: SetEmailHeaders
# Create an instance of MailMessage class
eml = ae.MailMessage()
# Specify ReplyTo, From, To field, Cc and Bcc Addresses
eml.reply_to_list.Add("[email protected]")
eml.from_address = "[email protected]"
eml.to.append(ae.MailAddress("[email protected]", "Recipient 1"))
eml.to.append(ae.MailAddress("[email protected]", "Recipient 2"))
eml.cc.append(ae.MailAddress("[email protected]", "Recipient 3"))
eml.bcc.append(ae.MailAddress("[email protected]", "Recipient 4"))
# Specify Date, Message subject, XMailer, Secret Header, Save message to disc
eml.subject = "test mail"
eml.date = datetime.datetime(2006, 3, 6, 12, 00)
eml.xmailer = "Aspose.Email"
eml.headers.Add("secret-header", "mystery")
eml.save(dataDir + "SetEmailHeaders_out.msg", ae.SaveOptions.default_msg)
#ExEnd: SetEmailHeaders
if __name__ == '__main__':
run()
| 33.258065 | 81 | 0.681862 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 496 | 0.481086 |
48e060479c6f9450fb40ff919e56deed4c5f57d9 | 7,527 | py | Python | intrinsic/classify.py | seenu-andi-rajendran/plagcomps | 98e82cfb871f73bbd8f4ab1452c2b27a95beee83 | [
"MIT"
]
| 2 | 2015-01-18T06:20:27.000Z | 2021-03-19T21:19:16.000Z | intrinsic/classify.py | NoahCarnahan/plagcomps | 98e82cfb871f73bbd8f4ab1452c2b27a95beee83 | [
"MIT"
]
| null | null | null | intrinsic/classify.py | NoahCarnahan/plagcomps | 98e82cfb871f73bbd8f4ab1452c2b27a95beee83 | [
"MIT"
]
| 2 | 2015-11-19T12:52:14.000Z | 2016-11-11T17:00:50.000Z | # classify.py
# Alternative methods to clustering
import sys, os
from random import shuffle
import cPickle
from collections import Counter
sys.path.append('../pybrain/') # add the pybrain module to the path... TODO: actually install it.
from plagcomps.shared.util import IntrinsicUtility
from ..dbconstants import username
from ..dbconstants import password
from ..dbconstants import dbname
'''
from pybrain.structure import FeedForwardNetwork, LinearLayer, SigmoidLayer, FullConnection, TanhLayer
from pybrain.tools.shortcuts import buildNetwork
from pybrain.datasets import SupervisedDataSet
from pybrain.utilities import percentError
from pybrain.tools.shortcuts import buildNetwork
from pybrain.supervised.trainers import BackpropTrainer
from pybrain.structure.modules import SoftmaxLayer
from pybrain.tools.customxml.networkwriter import NetworkWriter
from pybrain.tools.customxml.networkreader import NetworkReader
from pybrain.structure.modules import BiasUnit
'''
import scipy
import sklearn
import sklearn.metrics
import matplotlib
import matplotlib.pyplot as pyplot
from pylab import ion, ioff, figure, draw, contourf, clf, show, hold, plot
from scipy import diag, arange, meshgrid, where
from numpy.random import multivariate_normal
import sqlalchemy
from sqlalchemy.orm import sessionmaker
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class NeuralNetworkConfidencesClassifier:
nn_filepath = os.path.join(os.path.dirname(__file__), "neural_networks/nn.xml")
dataset_filepath = os.path.join(os.path.dirname(__file__), "neural_networks/dataset.pkl")
def create_nn(self, features, num_hidden_layer_nodes):
net = buildNetwork(len(features), num_hidden_layer_nodes, 1)
return net
def create_trainer(self, network, dataset):
trainer = BackpropTrainer(network, dataset, learningrate=0.01, momentum=0.01, verbose=True)
return trainer
def roc(self, confidences, actuals):
fpr, tpr, thresholds = sklearn.metrics.roc_curve(actuals, confidences, pos_label=1)
roc_auc = sklearn.metrics.auc(fpr, tpr)
print 'ROC area under curve:', roc_auc
# The following code is from http://scikit-learn.org/stable/auto_examples/plot_roc.html
pyplot.plot(fpr, tpr, label='ROC curve (area = %0.2f)' % roc_auc)
pyplot.plot([0, 1], [0, 1], 'k--')
pyplot.xlim([0.0, 1.0])
pyplot.ylim([0.0, 1.0])
pyplot.xlabel('False Positive Rate')
pyplot.ylabel('True Positive Rate')
pyplot.title('Receiver operating characteristic')
pyplot.legend(loc="lower right")
#path = "figures/roc"+str(time.time())+".pdf"
path = ospath.join(ospath.dirname(__file__), "neural_networks/roc"+str(time.time())+".pdf")
pyplot.savefig(path)
return path, roc_auc
def construct_confidence_vectors_dataset(self, reduced_docs, features, session):
from cluster import cluster
conf_dataset = SupervisedDataSet(len(features), 1)
confidence_vectors = []
num_trues = 0
for feature in features:
vi = 0
for doc in reduced_docs:
feature_vectors = doc.get_feature_vectors([feature], session)
confidences = cluster("outlier", 2, feature_vectors, center_at_mean=True, num_to_ignore=1, impurity=.2)
for i, confidence in enumerate(confidences, 0):
if len(confidence_vectors) <= vi:
confidence_vectors.append([[], 0])
if doc.span_is_plagiarized(doc._spans[i]):
t = 1
num_trues += 1
else:
t = 0
confidence_vectors[vi][0].append(confidence)
confidence_vectors[vi][1] = t
vi += 1
num_plagiarised = num_trues / len(features)
print num_plagiarised
shuffle(confidence_vectors)
for vec in confidence_vectors:
if vec[1] == 0:
num_plagiarised -= 1
if not (vec[1] == 0 and num_plagiarised <= 0):
conf_dataset.addSample(vec[0], vec[1])
f = open(self.dataset_filepath, 'wb')
cPickle.dump(conf_dataset, f)
print 'dumped dataset file'
return conf_dataset
def read_dataset(self):
f = open(self.dataset_filepath, 'rb')
return cPickle.load(f)
def construct_and_train_nn(self, features, num_files, epochs, filepath, session):
from plagcomps.evaluation.intrinsic import _get_reduced_docs
IU = IntrinsicUtility()
all_test_files = IU.get_n_training_files(n=num_files)
reduced_docs = _get_reduced_docs("paragraph", all_test_files, session)
print 'constructing datasets...'
# dataset = self.construct_confidence_vectors_dataset(reduced_docs, features, session)
dataset = self.read_dataset()
training_dataset, testing_dataset = dataset.splitWithProportion(0.75)
print 'dataset lengths:', len(dataset), len(training_dataset), len(testing_dataset)
print
print 'creating neural network...'
net = self.create_nn(features, num_hidden_layer_nodes)
print 'creating trainer...'
trainer = self.create_trainer(net, training_dataset)
print 'training neural network for', epochs, 'epochs...'
trainer.trainEpochs(epochs)
print 'writing neural network to ' + str(filepath) + '...'
NetworkWriter.writeToFile(net, filepath)
print 'testing neural network...'
confidences = []
actuals = []
for point in testing_dataset:
confidences.append(net.activate(point[0])[0])
actuals.append(point[1][0])
print 'confidences|actuals ', zip(confidences, actuals)
print 'generating ROC curve...'
matplotlib.use('pdf')
path, auc = self.roc(confidences, actuals)
print 'area under curve =', auc
def nn_confidences(self, feature_vectors):
'''
Read the saved nn and run it.
'''
net = NetworkReader.readFrom(self.nn_filepath)
confidences = []
for feature_vector in feature_vectors:
confidences.append(net.activate(feature_vector)[0])
return confidences
# an Engine, which the Session will use for connection resources
url = "postgresql://%s:%s@%s" % (username, password, dbname)
engine = sqlalchemy.create_engine(url)
# create tables if they don't already exist
Base.metadata.create_all(engine)
# create a configured "Session" class
Session = sessionmaker(bind=engine)
if __name__ == '__main__':
session = Session()
features = ['average_sentence_length',
'average_syllables_per_word',
'avg_external_word_freq_class',
'avg_internal_word_freq_class',
'flesch_kincaid_grade',
'flesch_reading_ease',
'num_chars',
'punctuation_percentage',
'stopword_percentage',
'syntactic_complexity',
'syntactic_complexity_average']
num_hidden_layer_nodes = 20
num_files = 30
epochs = 400
filepath = os.path.join(os.path.dirname(__file__), "neural_networks/nn.xml")
NN = NeuralNetworkConfidencesClassifier()
NN.construct_and_train_nn(features, num_files, epochs, filepath, session)
| 37.635 | 119 | 0.659891 | 4,991 | 0.66308 | 0 | 0 | 0 | 0 | 0 | 0 | 2,007 | 0.26664 |
48e0a28c89b1ce15b99aa2daf6b83acba8204f1b | 4,316 | py | Python | matplotlib-3.4.3/matplotlib-3.4.3/examples/images_contours_and_fields/image_transparency_blend.py | JohnLauFoo/clc_packages_Yu | 259f01d9b5c02154ce258734d519ae8995cd0991 | [
"MIT"
]
| 1 | 2021-11-13T17:21:44.000Z | 2021-11-13T17:21:44.000Z | matplotlib-3.4.3/matplotlib-3.4.3/examples/images_contours_and_fields/image_transparency_blend.py | JohnLauFoo/clc_packages_Yu | 259f01d9b5c02154ce258734d519ae8995cd0991 | [
"MIT"
]
| null | null | null | matplotlib-3.4.3/matplotlib-3.4.3/examples/images_contours_and_fields/image_transparency_blend.py | JohnLauFoo/clc_packages_Yu | 259f01d9b5c02154ce258734d519ae8995cd0991 | [
"MIT"
]
| null | null | null | """
==========================================
Blend transparency with color in 2D images
==========================================
Blend transparency with color to highlight parts of data with imshow.
A common use for `matplotlib.pyplot.imshow` is to plot a 2D statistical
map. The function makes it easy to visualize a 2D matrix as an image and add
transparency to the output. For example, one can plot a statistic (such as a
t-statistic) and color the transparency of each pixel according to its p-value.
This example demonstrates how you can achieve this effect.
First we will generate some data, in this case, we'll create two 2D "blobs"
in a 2D grid. One blob will be positive, and the other negative.
"""
# sphinx_gallery_thumbnail_number = 3
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import Normalize
def normal_pdf(x, mean, var):
return np.exp(-(x - mean)**2 / (2*var))
# Generate the space in which the blobs will live
xmin, xmax, ymin, ymax = (0, 100, 0, 100)
n_bins = 100
xx = np.linspace(xmin, xmax, n_bins)
yy = np.linspace(ymin, ymax, n_bins)
# Generate the blobs. The range of the values is roughly -.0002 to .0002
means_high = [20, 50]
means_low = [50, 60]
var = [150, 200]
gauss_x_high = normal_pdf(xx, means_high[0], var[0])
gauss_y_high = normal_pdf(yy, means_high[1], var[0])
gauss_x_low = normal_pdf(xx, means_low[0], var[1])
gauss_y_low = normal_pdf(yy, means_low[1], var[1])
weights = (np.outer(gauss_y_high, gauss_x_high)
- np.outer(gauss_y_low, gauss_x_low))
# We'll also create a grey background into which the pixels will fade
greys = np.full((*weights.shape, 3), 70, dtype=np.uint8)
# First we'll plot these blobs using ``imshow`` without transparency.
vmax = np.abs(weights).max()
imshow_kwargs = {
'vmax': vmax,
'vmin': -vmax,
'cmap': 'RdYlBu',
'extent': (xmin, xmax, ymin, ymax),
}
fig, ax = plt.subplots()
ax.imshow(greys)
ax.imshow(weights, **imshow_kwargs)
ax.set_axis_off()
###############################################################################
# Blending in transparency
# ========================
#
# The simplest way to include transparency when plotting data with
# `matplotlib.pyplot.imshow` is to pass an array matching the shape of
# the data to the ``alpha`` argument. For example, we'll create a gradient
# moving from left to right below.
# Create an alpha channel of linearly increasing values moving to the right.
alphas = np.ones(weights.shape)
alphas[:, 30:] = np.linspace(1, 0, 70)
# Create the figure and image
# Note that the absolute values may be slightly different
fig, ax = plt.subplots()
ax.imshow(greys)
ax.imshow(weights, alpha=alphas, **imshow_kwargs)
ax.set_axis_off()
###############################################################################
# Using transparency to highlight values with high amplitude
# ==========================================================
#
# Finally, we'll recreate the same plot, but this time we'll use transparency
# to highlight the extreme values in the data. This is often used to highlight
# data points with smaller p-values. We'll also add in contour lines to
# highlight the image values.
# Create an alpha channel based on weight values
# Any value whose absolute value is > .0001 will have zero transparency
alphas = Normalize(0, .3, clip=True)(np.abs(weights))
alphas = np.clip(alphas, .4, 1) # alpha value clipped at the bottom at .4
# Create the figure and image
# Note that the absolute values may be slightly different
fig, ax = plt.subplots()
ax.imshow(greys)
ax.imshow(weights, alpha=alphas, **imshow_kwargs)
# Add contour lines to further highlight different levels.
ax.contour(weights[::-1], levels=[-.1, .1], colors='k', linestyles='-')
ax.set_axis_off()
plt.show()
ax.contour(weights[::-1], levels=[-.0001, .0001], colors='k', linestyles='-')
ax.set_axis_off()
plt.show()
#############################################################################
#
# .. admonition:: References
#
# The use of the following functions, methods, classes and modules is shown
# in this example:
#
# - `matplotlib.axes.Axes.imshow` / `matplotlib.pyplot.imshow`
# - `matplotlib.axes.Axes.contour` / `matplotlib.pyplot.contour`
# - `matplotlib.colors.Normalize`
# - `matplotlib.axes.Axes.set_axis_off`
| 34.528 | 79 | 0.657322 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,771 | 0.64203 |
48e3db6b6aba7110ea8f3e0d1c747e61649abf82 | 634 | py | Python | tests/test_admin.py | FernandoCelmer/django-global-permissions | 1ece2b18476a514dec7b1e13a51191943acb460b | [
"MIT"
]
| 30 | 2015-02-04T12:26:35.000Z | 2022-03-23T21:19:10.000Z | tests/test_admin.py | FernandoCelmer/django-global-permissions | 1ece2b18476a514dec7b1e13a51191943acb460b | [
"MIT"
]
| 15 | 2015-11-27T17:42:02.000Z | 2022-03-23T00:34:10.000Z | tests/test_admin.py | FernandoCelmer/django-global-permissions | 1ece2b18476a514dec7b1e13a51191943acb460b | [
"MIT"
]
| 15 | 2015-04-14T18:09:26.000Z | 2022-03-22T11:42:04.000Z | from django.test import TestCase
from django.core.urlresolvers import reverse
try:
from django.contrib.auth import get_user_model
User = get_user_model()
except ImportError:
from django.contrib.auth.models import User
class GlobalPermissionsAdminTest(TestCase):
def setUp(self):
User.objects.create_superuser(username='ham', password='spam', email='[email protected]')
self.client.login(username='ham', password='spam')
def test_admin_simply_works(self):
resp = self.client.get(reverse('admin:global_permissions_globalpermission_changelist'))
self.assertEqual(200, resp.status_code)
| 33.368421 | 95 | 0.749211 | 400 | 0.630915 | 0 | 0 | 0 | 0 | 0 | 0 | 90 | 0.141956 |
48e429fb8eb61c10b1ad429f9b2db275e7f48ee3 | 2,307 | py | Python | Models/utils.py | weslai/ecg_classification | 61cb45849485129cf04ee97f458fdf731353fd4b | [
"MIT"
]
| 1 | 2020-12-03T13:34:04.000Z | 2020-12-03T13:34:04.000Z | Models/utils.py | weslai/ecg_classification | 61cb45849485129cf04ee97f458fdf731353fd4b | [
"MIT"
]
| null | null | null | Models/utils.py | weslai/ecg_classification | 61cb45849485129cf04ee97f458fdf731353fd4b | [
"MIT"
]
| null | null | null | import matplotlib.pyplot as plt
import itertools
import numpy as np
from sklearn.metrics import confusion_matrix
## be used to evaluate the model
def evaluate_model(history, X_test, y_test, model):
scores = model.evaluate((X_test), y_test, verbose=0)
print("Accuracy: %.2f%%" % (scores[1] *100))
print(history)
fig1, ax_acc = plt.subplots()
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.title('Model - Accuracy')
plt.legend(['Training', 'Validation'], loc='lower right')
plt.show()
fig2, ax_loss = plt.subplots()
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.title('Model- Loss')
plt.legend(['Training', 'Validation'], loc='upper right')
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.show()
## put the return to the plot_confusion_matrix
def confustion_matrix(true_label, val_prediction):
cnf_matrix = confusion_matrix(true_label.argmax(axis=1), val_prediction.argmax(axis=1))
return cnf_matrix
## confusion matrix
def plot_confusion_matrix(cm, classes,
normalize=False,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
np.set_printoptions(precision=2)
if normalize:
cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
print("Normalized confusion matrix")
else:
print('Confusion matrix, without normalization')
plt.figure(figsize=(10, 10))
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
fmt = '.2f' if normalize else 'd'
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, format(cm[i, j], fmt),
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.show()
| 32.041667 | 91 | 0.640225 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 569 | 0.246641 |
48e43797c38281b4f9c9d4f57ea6a962850d4cc0 | 44,206 | py | Python | evapotranspiration/penman_monteith_daily.py | JRoehrig/evapotranspiration | aeec040273e15f93bb25ff850b33a90a41c65291 | [
"MIT"
]
| 2 | 2021-08-07T10:38:41.000Z | 2022-03-02T07:34:11.000Z | evapotranspiration/penman_monteith_daily.py | JRoehrig/evapotranspiration | aeec040273e15f93bb25ff850b33a90a41c65291 | [
"MIT"
]
| null | null | null | evapotranspiration/penman_monteith_daily.py | JRoehrig/evapotranspiration | aeec040273e15f93bb25ff850b33a90a41c65291 | [
"MIT"
]
| null | null | null | import math
import numpy as np
import pandas as pd
class PenmanMonteithDaily(object):
r"""The class *PenmanMonteithDaily* calculates daily potential evapotranspiration according to the Penman-Monteith
method as described in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_ (Allen et al.,
1998). Reference evapotranspiration for a hypothetical grass reference crop (:math:`h=12` *cm*;
:math:`albedo=0.23`, and :math:`LAI=2.88`) is calculated by default. Wind and humidity observations at 2 meters
height as well as soil heat flux density :math:`G=0.0` *MJ/m²day* are also assumed by default.
Default values can be changed in the keyword arguments (`**kwargs`) described below.
The class *PenmanMonteithDaily* solves equation 3 in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_:
.. math::
ET = \frac{\Delta (R_n - G) + \rho_a c_p \frac{e_s - e_a}{r_a}}
{\lambda \left[ \Delta + \gamma \left( 1 + \frac{r_s}{r_a} \right) \right]}
\tag{eq. 3, p. 19}
:param elevation: elevation above sea level (*z*) *[m]*. Used in :meth:`clear_sky_shortwave_radiation` and
:meth:`atmospheric_pressure`
:type elevation: float
:param latitude: latitude (:math:`\varphi`) *[decimal degrees]*. Used in :meth:`sunset_hour_angle` and
:meth:`extraterrestrial_radiation`
:type latitude: float
:Keyword Arguments:
* **albedo** (*float*) - albedo or canopy reflection coefficient (:math:`\alpha`) *[-]*.
Range: :math:`0.0 \leq \alpha \leq 1.0`. Default :math:`albedo=0.23` for the hypothetical grass
reference crop. Used in :meth:`net_shortwave_radiation`
* **h** (*float*) - crop height (*h*) *[m]*. Default :math:`h=0.12` for the hypothetical grass reference
crop. Required to calculate the zero plane displacement height (:math:`d`) *[m]* and the roughness length
governing momentum (:math:`z_{om}`) *[m]*, both necessary for the aerodynamic resistance (:math:`r_a`) *[s/m]*.
See :meth:`aerodynamic_resistance_factor`
* **lai** (*float*) - leaf area index (:math:`LAI`) *[-]*. Default :math:`lai=2.88` for the hypothetical
grass reference crop. See *BOX 5* in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_ and
:meth:`bulk_surface_resistance`
* **rl** (*float*) - bulk stomatal resistance of well-illuminated leaf (:math:`r_l`) *[s/m]*. Default
:math:`rl=100.0` for any crop. See :meth:`bulk_surface_resistance`
* **zm** (*float*) - height of wind measurements (:math:`z_m`) *[m]*. Default :math:`zm=2.0`. Required to
calculate aerodynamic resistance (:math:`r_a`) *[s/m]*. See :meth:`aerodynamic_resistance_factor`
* **zh** (*float*) - height of humidity measurements (:math:`z_h`) *[m]*. Default :math:`zh=2.0`. Required to
calculate aerodynamic resistance (:math:`r_a`) *[s/m]*. See :meth:`aerodynamic_resistance_factor`
* **g** (*float*) - soil heat flux density (:math:`G`) *[MJ/m²day]*. Default :math:`g=0.0`. This
corresponds to :math:`G` in eq. 3, p. 19 above. It can be also given with daily parameters in :meth:`et0`
.. note::
Only :attr:`elevation` and :attr:`latitude` are mandatory parameters of :meth:`PenmanMonteithDaily()`.
:attr:`albedo`, :attr:`h`, and :attr:`lai` are only necessary when calculating evapotranspiration for crops
other than reference grass.
:ivar doy: day of year *[-]*
:ivar z: elevation in meters above sea level (*z*) *[m]*
:ivar p: atmospheric pressure (*P*) *[kPa]*
:ivar u2: wind speed at height :math:`z` (:math:`u_2`) *[m/s]*
:ivar ld: latent heat of vaporization (:math:`\lambda`) *[MJ/kg]*. See :meth:`latent_heat_of_vaporization()`
:ivar s: slope of saturation vapour pressure curve (:math:`\Delta`) *[kPa/°C]*.
See :meth:`slope_of_saturation_vapour_pressure_curve()`
:ivar psych: psychrometric constant (:math:`\gamma`) *[kPa/°C]*. See :meth:`psychrometric_constant()`
:ivar mn: daylight hours (:math:`N`) *[hours]*. See :meth:`daylight_hours()`
:ivar es: saturation vapour pressure (:math:`e_s`) *[kPa]*. See :meth:`saturation_vapour_pressure()`
:ivar ea: actual vapour pressure (:math:`e_a`) *[kPa]*. See :meth:`actual_vapour_pressure()`
:ivar ra: daily extraterrestrial radiation (:math:`R_a`) *[MJ/m²day]*. See :meth:`extraterrestrial_radiation()`
:ivar rs: daily shortwave radiation (:math:`R_s`) *[MJ/m²day]*. See :meth:`shortwave_radiation()`
:ivar rs0: clear-sky shortwave radiation (:math:`R_{so}`) *[MJ/m²day]*.
See :meth:`clear_sky_shortwave_radiation()`
:ivar rns: net shortwave radiation (:math:`R_{ns}`) *[MJ/m²day]*. See :meth:`net_shortwave_radiation()`
:ivar rnl: net outgoing longwave radiation (:math:`R_{nl}`) *[MJ/m²day]*. See :meth:`net_longwave_radiation()`
:ivar rn: net radiation (:math:`R_{n}`) *[MJ/m²day]*. :math:`R_{n} = R_{ns} - R_{nl}`
:ivar etr: radiation component of reference evapotranspiration *[mm/day]*
:ivar etw: wind component of reference evapotranspiration *[mm/day]*
:ivar et: reference evapotranspiration *[mm/day]*
Object Constants:
* **e** - ratio molecular weight of water vapour/dry air (:math:`\varepsilon`) *[-]*.
:math:`e = 0.622`
* **r** - specific gas constant *[kJ/kg.K]*. :math:`r = 0.287`
* **k** - von Karman constant (:math:`k`) *[-]*, see
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_ eq. 4.
:math:`k=0.41`
Object crop specific factors:
* **d_factor** - factor of the zero plane displacement height (:math:`d`) *[-]*. :math:`d\_factor = 2.0 / 3.0`
* **zom_factor** - factor of the roughness length governing momentum transfer (:math:`z_{om}`) *[-]*.
:math:`zom\_factor = 0.123`
* **zoh_factor** - factor of the roughness length governing transfer of heat and vapour (:math:`z_{oh}`) *[-]*.
:math:`zoh\_factor = 0.1`
* **lai_active_factor** - factor of the active (sunlit) leaf area index (:math:`LAI_{active}`) *[-]* (it
considers that generally only the upper half of dense clipped grass is actively contributing to the surface
heat and vapour transfer). :math:`lai\_active\_factor = 0.5`
Calculation with :meth:`et0`::
- pm = PenmanMonteithDaily(elevation, latitude, ...)
- et0 = pm.et0(...)
Calculation with :meth:`et0_frame` given a *pandas.DataFrame()* as input parameter::
- pm = PenmanMonteithDaily(elevation, latitude, ...)
- df = pm.et0_frame(df, ...)
"""
def __init__(self, elevation, latitude, **kwargs):
self.albedo = kwargs.get('albedo', 0.23) # albedo
self.h = kwargs.get('h', 0.12) # crop height h [m]
self.zm = kwargs.get('zm', 2.0) # height of wind measurements [m]
self.zh = kwargs.get('zh', 2.0) # roughness length governing transfer of heat and vapour [m]
self.lai = kwargs.get('lai', 2.88) # LAI dependence
self.rl = kwargs.get('rl', 100.0) # The stomatal resistance
self.g_default = kwargs.get('g', 0.0) # soil heat flux density [MJ/m²day]
self.doy = None
self.u2 = None
self.ld = None
self.s = None
self.pc = None
self.mn = None
self.es = None
self.ea = None
self.ra = None
self.rs = None
self.rs0 = None
self.rns = None
self.rnl = None
self.rn = None
self.etr = None
self.etw = None
self.et = None
self.e = 0.622
self.r = 0.287
self.k = 0.41
self.d_factor = 2.0 / 3.0
self.zom_factor = 0.123
self.zoh_factor = 0.1
self.lai_active_factor = 0.5
if latitude:
days = np.array(range(367))
latitude = float(np.radians(latitude))
dr_366 = self.inverse_relative_distance_earth_sun(days)
sd_366 = np.array([self.solar_declination(day) for day in range(367)])
ws_366 = np.array([self.sunset_hour_angle(latitude, s) for s in sd_366])
self.daylight_hours_366 = np.array([PenmanMonteithDaily.daylight_hours(w) for w in ws_366])
self.ra_366 = np.array([self.extraterrestrial_radiation(
dr_366[i], ws_366[i], latitude, sd_366[i]) for i in range(len(dr_366))])
self.rs0_366 = np.array([self.clear_sky_shortwave_radiation(
ra, elevation=elevation) for ra in self.ra_366])
else:
self.daylight_hours_366 = None
self.ra_366 = None
self.rs0_366 = None
self.z = elevation
self.p = PenmanMonteithDaily.atmospheric_pressure(self.z)
ra_factor = self.aerodynamic_resistance_factor()
self.f1 = 86400 * self.e / (1.01 * self.r * ra_factor)
"""f1 = (specific heat at constant pressure) * (mean air density at constant pressure) /
(1.01 * :attr:`r` * :meth:`aerodynamic_resistance_factor`).
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_ Box 6
"""
self.f2 = self.bulk_surface_resistance() / ra_factor
r""":math:`f_1 = \frac{rs}{f_{ra}}` with :math:`f_{ra}` = :meth:`aerodynamic_resistance_factor`"""
def reset(self):
r"""Reset the following output attributes before calculating :math:`ETo`: :math:`doy`, :math:`u2`,
:math:`ld`, :math:`s`, :math:`pc`, :math:`mn`, :math:`es`, :math:`ea`, :math:`ra`,
:math:`rs`, :math:`rs0`, :math:`rns`, :math:`rnl`, :math:`rn`, :math:`etr`, :math:`etw`, and :math:`et`
"""
self.doy = None
self.u2 = None
self.ld = None
self.s = None
self.pc = None
self.mn = None
self.es = None
self.ea = None
self.ra = None
self.rs = None
self.rs0 = None
self.rns = None
self.rnl = None
self.rn = None
self.etr = None
self.etw = None
self.et = None
@staticmethod
def atmospheric_pressure(z):
r""" Return the atmospheric pressure (:math:`P`) *[kPa]* as a function of the elevation above sea level as
defined in `FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 7, p. 31):
.. math::
P = 101.3\left(\frac{293-0.0065z}{293}\right)^{5.26}
The atmospheric pressure (:math:`P`) is the pressure exerted by the weight of the earth's atmosphere.
Evaporation at high altitudes is promoted due to low atmospheric pressure as expressed in the psychrometric
constant. The effect is, however, small and in the calculation procedures, the average value for a location
is sufficient. A simplification of the ideal gas law, assuming :math:`20` *°C* for a standard atmosphere,
can be employed to calculate :math:`P`
(`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_).
:param z: elevation above sea level *[m]*
:type z: float or np.array
:return: (*float or np.array*) atmospheric pressure (:math:`P`) *[kPa]*
"""
return 101.3 * ((293.0 - 0.0065 * z) / 293.0) ** 5.26
@staticmethod
def latent_heat_of_vaporization(temperature=20):
r"""Return the latent heat of vaporization (:math:`\lambda`) *[MJ/kg]* as described in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(Annex 3, eq. 3-1, p. 223):
.. math::
\lambda = 2.501-(2.361 * 10^{-3})T
:param temperature: air temperature (:math:`T`) *[°C]*. Default :math:`temperature=20`
:type temperature: float or np.array
:return: (*float or np.array*) latent heat of vaporization (:math:`\lambda`) *[MJ/kg]*.
Default :math:`\lambda=2.45378`
"""
return 2.501 - 2.361e-3 * temperature
@staticmethod
def psychrometric_constant(p, **kwargs):
r"""Return the psychrometric constant (:math:`\gamma`) *[kPa/°C]* according to
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
eq. 8, p. 32:
.. math::
\gamma = \frac{c_p P}{\varepsilon \lambda}
or, using default values:
.. math::
\gamma = a_{psy} \cdot P
:param p: atmospheric pressure (:math:`P`) *[kPa]*
:type p: float or np.array
:Keyword Arguments:
* **lamda** (*float*) - latent heat of vaporization (:math:`\lambda`) *[MJ/kg]*. Default :math:`lamda=2.45`.
See Used in :meth:`latent_heat_of_vaporization`
* **cp** (*float*) - specific heat at constant pressure (:math:`c_p`) *[MJ/kg]*. Default
:math:`cp=1.013e^{-3}`
* **epsilon** (*float*) - ratio molecular weight of water vapour/dry air (:math:`\epsilon`) *[-]*.
Default :math:`epsilon=0.622`
* **a_psy** (*float*) - coefficient depending on the type of the ventilation of the bulb *[1/°C]*. Examples:
* :math:`a_{psy} = 0.000665` (default)
* :math:`a_{psy} = 0.000662` for ventilated (Asmann type) psychrometers, with an air movement of some 5
*m/s*
* :math:`a_{psy} = 0.000800` for natural ventilated psychrometers (about 1 *m/s*)
* :math:`a_{psy} = 0.001200` for non-ventilated psychrometers installed indoors
The method uses :math:`a_{psy}` if given, otherwise eq. 8 (see above) with given or default values. Default
values correspond to :math:`a_{psy} = 0.000665` as argument.
:return: (*float or np.array*) psychrometric constant (:math:`\gamma`) *[kPa/°C]*
"""
if 'a_psy' in kwargs:
return kwargs.get('a_psy', 0.000665) * p
else:
return (kwargs.get('cp', 1.013e-3) * p) / (kwargs.get('epsilon', 0.622) * kwargs.get('lamda', 2.45))
@staticmethod
def saturation_vapour_pressure(*temperature):
r"""Return the saturation vapour pressure (:math:`e_s`) *[kPa]* according to
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 11, p. 36):
.. math::
e^{°}(T) = 0.6108 exp \left[\frac{17.27 T}{T + 237.3}\right]
:param temperature: air temperature (:math:`T`) *[°C]*
:type temperature: float or np.array
:return: (*float or np.array*) saturation vapour pressure (:math:`e_s`) *[kPa]*
"""
t = np.array([0.6108 * np.exp((17.27 * t) / (t + 237.3)) for t in temperature])
t = np.mean(t, axis=0)
return t
@staticmethod
def slope_of_saturation_vapour_pressure_curve(*temperature):
r"""Return the slope of saturation vapour pressure curve (:math:`\Delta`) *[kPa/°C]* according to
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 13, p. 37):
.. math::
\Delta = 4098\left[\frac{0.6108exp\left(\frac{17.27 T}{T + 237.3}\right)}{(T + 237.3)^{2}}\right]
:param temperature: air temperature (:math:`T`) *[°C]*
:type temperature: float or np.array
:return: (*float or np.array*) slope of saturation vapour pressure curve (:math:`\Delta`) *[kPa/°C]*
"""
sl = np.array([(4098.0 * PenmanMonteithDaily.saturation_vapour_pressure(t)) / ((t + 237.3) ** 2)
for t in temperature])
return np.mean(sl, axis=0)
@staticmethod
def actual_vapour_pressure(**kwargs):
"""Return the actual vapour pressure (:math:`e_a`) *[kPa]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(p. 37 , 38 , and 39):
:Keyword Arguments:
* **rh_min** (*float*) - 0.0 to 100.0 *[%]*
* **rh_max** (*float*) - 0.0 to 100.0 *[%]*
* **es_min** (*float*) - saturation vapour pressure for :math:`t\_min` *[kPa]*
* **es_max** (*float*) - saturation vapour pressure for :math:`t\_max` *[kPa]*
* **t_min** (*float*) - minimum air temperature *[°C]*
* **t_max** (*float*) - maximum air temperature *[°C]*
* **t_dew** (*float*) - dew point temperature *[°C]*
* **t_wet** (*float*) - wet bulb temperature *[°C]*
* **t_dry** (*float*) - dry bulb temperature *[°C]*
* **apsy** (*float*) - coefficient depending on the type of ventilation of the wet bulb *[-]*
:return: (*float or np.array*) actual vapour pressure (:math:`e_a`) *[kPa]*
"""
try:
rh_min = kwargs['rh_min'] / 100.0
rh_max = kwargs['rh_max'] / 100.0
if 'es_min' in kwargs and 'es_max' in kwargs:
es_min = kwargs['es_min']
es_max = kwargs['es_max']
else:
es_min = PenmanMonteithDaily.saturation_vapour_pressure(kwargs['t_min'])
es_max = PenmanMonteithDaily.saturation_vapour_pressure(kwargs['t_max'])
return (rh_max * es_min + rh_min * es_max) / 2.0
except KeyError:
t_dew = kwargs.get('t_dew', None)
return 0.6108 * math.exp((17.27 * t_dew) / (t_dew + 237.3))
def aerodynamic_resistance_factor(self):
r"""Return the aerodynamic resistance (:math:`r_a`) *[s/m]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 4, p. 20):
.. math::
r_a = \frac{ \ln \left( \frac{z_m - d}{z_{om}} \right) \ln \left( \frac{z_h - d}{z_{oh}} \right) }
{ k^2 u_z }
where (see :meth:`PenmanMonteithDaily()`):
:math:`u_z` --- the wind speed *[m/s]* at height :math:`z` (see :meth:`et0()`)
:math:`k` --- von Karman's constant *[-]*
:math:`zm` --- height of wind measurements *[m]*
:math:`zh` --- height of air humidity measurements *[m]*
The aerodynamic resistance factor :math:`f_{r_a}` is constant for a given crop:
.. math::
f_{r_a} = \frac{ \ln \left( \frac{z_m - d}{z_{om}} \right) \ln \left( \frac{z_h - d}{z_{oh}} \right) }
{ k^2}
with the zero plane displacement height (:math:`d`):
.. math::
d = f_d \cdot h
and roughness length governing momentum transfer (:math:`z_{om}`):
.. math::
z_{om} = f_{zom} \cdot h
where:
:math:`f_d` --- defined in :attr:`d_factor`
:math:`f_{zom}` --- defined in in :attr:`zom_factor`
:return: (*float*) aerodynamic resistance factor :math:`f_{r_a}`
"""
# zero plane displacement height, d [m]
d = self.d_factor * self.h
# roughness length governing momentum transfer [m]
zom = self.zom_factor * self.h
# roughness length governing transfer of heat and vapour [m]
zoh = self.zoh_factor * zom
return math.log((self.zm - d) / zom) * math.log((self.zh - d) / zoh) / (self.k ** 2)
def bulk_surface_resistance(self):
r"""Return (bulk) surface resistance (:math:`r_s`) *[s/m]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 5, p. 21):
.. math::
r_s = \frac{ r_l } { LAI_{active} }
where:
:math:`r_l` --- the bulk stomatal resistance of the well-illuminated leaf *[s/m]*
:math:`LAI_{active}` --- the active (sunlit) leaf area index *[m² (leaf area) / m² (soil surface)]*
A general equation for :math:`LAI_{active}` is:
.. math::
LAI_{active} = 0.5 LAI
with:
.. math::
LAI = 24 h
where :math:`h` is an optional input parameter in :class:`PenmanMonteithDaily`.
:return: (*float*) (bulk) surface resistance :math:`r_s` *[s/m]*
"""
#
# active (sunlit) leaf area index [m^2 (leaf area) / m^2 (soil surface)]
lai_active = self.lai_active_factor * self.lai
rs = self.rl / lai_active
return rs
@staticmethod
def to_u2(uz, z):
r""" Return the calculated wind speed at 2 meters above ground surface (:math:`u_2`) *[m/s]*:
.. math::
u_2 = \frac{ 4.87 u_z}{ \ln{(67.8 z - 5.42)}}
:param uz: measured wind speed at :math:`z` meters above ground surface *[m/s]*
:type uz: float or np.array
:param z: height of measurement above ground surface *[m]*
:type z: float
:return: (*float or np.array*) wind speed at 2 meters above ground surface *[m/s]*
"""
return uz * 4.87 / np.log(67.8 * z - 5.42)
@staticmethod
def extraterrestrial_radiation(dr, ws, lat, sd):
r"""Return the extraterrestrial radiation (:math:`R_a`) *[MJ/m²day]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 21, p. 46):
.. math::
R_a = \frac{24(60)}{\pi} G_{sc} d_r [ \omega_s \sin(\varphi) \sin(\delta) + \cos(\varphi) \cos(\delta)
\sin(\omega_s)]
:param dr: inverse relative distance Earth-Sun (:math:`d_r`) *[-]*.
See :meth:`inverse_relative_distance_earth_sun`
:type dr: float
:param ws: sunset hour angle (:math:`\omega_s`) *[rad]*. See :meth:`sunset_hour_angle`
:type ws: float
:param lat: latitude (:math:`\varphi`) *[rad]*
:type lat: float
:param sd: solar declination (:math:`\delta`) *[rad]*. See :meth:`solar_declination`
:type sd: float
:return: *(float or np.array)* daily extraterrestrial radiation (:math:`R_a`) *[MJ/m²day]*
"""
# solar_constant = 0.0820 # MJ.m-2.min-1
# (24.0 * 60.0 / pi) * solar_constant = 37.586031360582005
return 37.586031360582005 * dr * (ws * np.sin(lat) * np.sin(sd) + np.cos(lat) * np.cos(sd) * np.sin(ws))
@staticmethod
def inverse_relative_distance_earth_sun(day):
r"""Return the inverse relative distance Earth-Sun (:math:`d_r`) *[-]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 23, p. 46):
.. math::
d_r = 1 + 0.033 \cos{ \left( \frac{2 \pi}{365} J \right)}
:param day: day of the year (:math:`J`) *[-]*. Range: :math:`1 \leq J \leq 366`
:type day: int or np.array
:return: *(float or np.array)* inverse relative distance Earth-Sun (:math:`d_r`) *[-]*
"""
# 2.0 * pi / 365 = 0.01721420632103996
return 1 + 0.033 * np.cos(0.01721420632103996 * day)
@staticmethod
def solar_declination(day):
r"""Return the solar declination (:math:`\delta`) *[rad]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 24, p. 46):
.. math::
\delta = 0.409 \sin{ \left( \frac{2 \pi}{365} J - 1.39\right)}
:param day: day of the year (:math:`J`) *[-]*. Range: :math:`1 \leq J \leq 366`
:type day: int
:return: (*float or np.array*) solar declination (:math:`\delta`) *[rad]*
"""
# 2.0 * pi / 365 = 0.01721420632103996
return 0.409 * np.sin(0.01721420632103996 * day - 1.39)
@staticmethod
def sunset_hour_angle(lat, sd):
r"""Return the sunset hour angle (:math:`\omega_s`) *[rad]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 25, p. 46):
.. math::
\omega_s = \arccos{ \left[-tan(\varphi)tan(\delta)\right]}
:param lat: latitude (:math:`\varphi`) *[rad]*
:type lat: float or np.array
:param sd: solar declination (:math:`\delta`) *[rad]*. See :meth:`solar_declination`
:type sd: float or np.array
:return: (*float or np.array*) sunset hour angle (:math:`\omega_s`) *[rad]*
"""
return np.arccos(-np.tan(sd) * np.tan(lat))
@staticmethod
def daylight_hours(ws):
r"""Return the daylight hours (:math:`N`) *[hour]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 34, p. 49):
.. math::
N = \frac{24}{\pi} \omega_s
:param ws: sunset hour angle (:math:`\omega_s`) *[rad]*. See :meth:`sunset_hour_angle`
:type ws: float or np.numpy
:return: (*float or np.numpy*) daylight hours (:math:`N`) *[hour]*
"""
# 24.0 / pi = 7.639437268410976
return 7.639437268410976 * ws
@staticmethod
def clear_sky_shortwave_radiation(ra, elevation=0.0, a_s=0.25, b_s=0.50):
r"""Return the clear-sky shortwave radiation (:math:`R_{so}`) *[MJ/m²day]*. It is required for computing
:meth:`net_longwave_radiation`.
For near sea level or when calibrated values for :math:`a_s` and :math:`b_s` are available
(`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_, eq. 36,
p. 51):
.. math::
R_{so} = (a_s + b_s ) R_a
When calibrated values for :math:`a_s` and :math:`b_s` are not available
(`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_,
eq. 37, p. 51):
.. math::
R_{so} = (0.75 + 2 * 10^{−5} z) R_a
where :math:`z` is the station elevation above sea level *[m]*.
:param ra: extraterrestrial radiation (:math:`R_a`) *[MJ/m²day]*. See :meth:`extraterrestrial_radiation`
:type ra: float or np.numpy
:param elevation: meters above sea level see (:math:`z`) [m]. See :attr:`elevation`
:type elevation: float or np.numpy
:param a_s: regression constant (:math:`a_s`) *[-]*. Default :math:`a_s=0.25`. It expresses the fraction of
extraterrestrial radiation reaching the earth on overcast days (:math:`n = 0`)
:type a_s: float or np.numpy
:param b_s: regression constant (:math:`b_s`) *[-]*. Default :math:`b_s=0.50`. The expression
:math:`a_s+b_s` indicates the fraction of extraterrestrial radiation reaching the earth on clear days
(:math:`n = N`)
:type b_s: float or np.numpy
:return: (*float or np.numpy*) daily clear-sky shortwave radiation (:math:`R_{so}`) *[MJ/m²day]*
"""
rs0 = ((a_s + b_s) + 2e-5 * elevation) * ra
return rs0
@staticmethod
def shortwave_radiation(ra, n, mn, a_s=0.25, b_s=0.50):
r"""Return the daily shortwave radiation (:math:`R_s`) *[MJ/m²day]* according to the Angstrom formula as
described in `FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 35, p. 50):
.. math::
R_s = \left( a_s + b_s \frac{n}{N} \right) R_a
Depending on atmospheric conditions (humidity, dust) and solar declination (latitude and month), the Angstrom
values :math:`a_s` and :math:`b_s` will vary. Where no actual solar radiation data are available and no
calibration has been carried out for improved :math:`a_s` and :math:`b_s` parameters, the values
:math:`a_s = 0.25` and :math:`b_s = 0.50` are recommended.
:param ra: extraterrestrial radiation (:math:`R_a`) *[MJ/m²day]*. See :meth:`extraterrestrial_radiation`
:type ra: float or np.array
:param n: actual duration of sunshine or cloudless hours (:math:`n`) *[hour]*
:type n: float or np.array
:param mn: maximum possible duration of sunshine or daylight hours (:math:`N`) *[hour]*
See :meth:`daylight_hours`
:type mn: float, np.array
:param a_s: regression constant (:math:`as`) *[-]*. Default :math:`a_s=0.25`. It expresses the fraction
of extraterrestrial radiation reaching the earth on overcast days (:math:`n = 0`)
:type a_s: float or np.numpy
:param b_s: regression constant (:math:`bs`) *[-]*. Default :math:`b_s=0.50`. The expression
:math:`a_s+b_s` indicates the fraction of extraterrestrial radiation reaching the earth on clear days
(:math:`n = N`)
:type b_s: float or np.numpy
:return: (*float, np.array*) daily total shortwave radiation (:math:`R_s`) *[MJ/m²day]* reaching the earth
.. note::
If shortwave radiation (i.e., solar radiation) measurements are available, :meth:`shortwave_radiation`
function is no needed. Measurements of shortwave radiation may be directly used as input data in
:meth:`et0`.
"""
rns = (a_s + b_s * n / mn) * ra
return rns
@staticmethod
def net_shortwave_radiation(rs, albedo):
r"""The net shortwave radiation (:math:`R_{ns}`) *[MJ/m²day]* resulting from the balance between incoming
and reflected solar radiation as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 38, p. 51):
.. math::
R_{ns} = (1 − \alpha) R_s
:param rs: daily shortwave radiation (:math:`R_s`) *[MJ/m²day]*. See :meth:`shortwave_radiation`
:type rs: float or np.array
:param albedo: albedo or reflection coefficient (:math:`\alpha` *[-]*). Range:
:math:`0.0 \leq \alpha \leq 1.0` (:math:`\alpha=0.23` for the hypothetical grass reference crop).
See :class:`PenmanMonteithDaily` and :meth:`et0`
:type albedo: float or np.array
:return: (*float or np.array*) daily net shortwave radiation (:math:`R_{ns}`) *[MJ/m²day]* reaching the earth
"""
return (1.0 - albedo) * rs
@staticmethod
def net_longwave_radiation(t_min, t_max, rs, rs0, ea=None):
r"""Return the net outgoing longwave radiation (:math:`R_{nl}`) *[MJ/m²day]* as defined in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_
(eq. 39, p. 52):
.. math::
R_{nl} = \sigma\left[\frac{T_{max,K}^4 + T_{min,K}^4}{2}\right](0.34-0.14\sqrt{e_a})\left(1.35
\frac{R_s}{R_{so}}-0.35\right)
:param t_min: minimum daily air temperature (:math:`T_{max}`) *[°C]*
:type t_min: float or np.array
:param t_max: maximum daily air temperature (:math:`T_{min}`) *[°C]*
:type t_max: float or np.array
:param rs: shortwave radiation (:math:`R_s`) *[MJ/m²day]*. See :meth:`shortwave_radiation`
:type rs: float or np.array
:param rs0: clear-sky shortwave radiation (:math:`R_{so}`) *[MJ/m²day]*. See
:meth:`clear_sky_shortwave_radiation`
:type rs0: float or np.array
:param ea: actual vapour pressure (:math:`e_a`) *[kPa]*
:type ea: float or np.array
:return: (*float or np.array*) daily net outgoing longwave radiation (:math:`R_{nl}`) *[MJ/m²day]*
.. note::
The :math:`R_s/R_{so}` term in the equation above must be limited so that :math:`R_s/R_{so} \leq 1.0`.
"""
t_min = t_min + 273.15
t_max = t_max + 273.15
if ea is not None:
rln = 4.903e-9 * (t_min ** 4 + t_max ** 4) * 0.5 * (0.34 - 0.14 * np.sqrt(ea)) * (1.35 * rs / rs0 - 0.35)
else:
t_mean = (t_min + t_max) / 2.0
rln = 4.903e-9 * (t_min ** 4 + t_max ** 4) * 0.5 * \
(-0.02 + 0.261 * np.exp(-7.77e10 ** -4 * t_mean ** 2)) * (1.35 * rs / rs0 - 0.35)
return rln
def et0(self, **kwargs):
r"""Returns potential evapotranspiration (:math:`ETo`) *[mm/day]* as described in
`FAO 56 <http://www.fao.org/tempref/SD/Reserved/Agromet/PET/FAO_Irrigation_Drainage_Paper_56.pdf>`_. Reference
(grass) potencial evapotranspiration is returned for default constructor values. If values in `**kwargs` are
arrays, their lengths must be the same.
:Keyword Arguments:
* **date** (*str, datetime.date, datetime.datetime, pandas.TimeStamp, or np.array*)
* **doy** (*int or np.array*) - day of the year (:math:`J`) *[-]*. Range: :math:`1 \leq J \leq 366`.
It is not used if date is given
* **u2** (*float or np.array*) - wind speed at 2 meters above ground surface *[m/s]*
* **uz** (*float or np.array*) - measured wind speed at :math:`z` meters above ground surface *[m/s]*
* **z** (*float or np.array*) - height of measurement above ground surface *[m]*
* **t_mean** (*float or np.array*) - daily mean air temperature *[°C]*
* **t_min** (*float or np.array*) - daily minimum air temperature *[°C]*
* **t_max** (*float or np.array*) - daily maximum air temperature *[°C]*
* **rh_mean** (*float or np.array*) - daily mean relative humidity *[%]*
* **rh_min** (*float or np.array*) - daily minimum relative humidity *[%]*
* **rh_max** (*float or np.array*) - daily maximum relative humidity *[%]*
* **rs** (*float or np.array*) - solar or shortwave radiation *[MJ/m²day]*
* **n** (*float or np.array*) - daily actual duration of sunshine or cloudless hours *[hour]*
* **g** (*float or np.array*) - soil heat flux density *[MJ/m²day]*. If not given, *g* defined in
:meth:`PenmanMonteithDaily` will be used
* **a_s** (*float or np.array*) - see :meth:`shortwave_radiation`. Default :math:`a_s = 0.25`
* **b_s** (*float or np.array*) - see :meth:`shortwave_radiation`. Default :math:`b_s = 0.50`
* **negative_rnl** (*bool*) - allow negative net longwave radiation. Default :math:`negative\_rnl=True`
* **negative_et0** (*bool*) - allow negative reference evapotranspiration. Default :math:`negative\_et0=True`
:return: (*float or np.array*) potential evapotranspiration (:math:`ETo`) *[mm/day]*
Cases:
* If date and doy are given, :math:`doy` is disregarded
* if :math:`uz` is given, :math:`z` must also be given
* if :math:`u2` and (:math:`uz`, :math:`z`) are given, both :math:`uz` and :math:`z` are disregarded
* if :math:`rs` and :math:`n` are given, :math:`n` will be disregarded
* The best options for air temperature are, in this order: 1) t_min, t_max, and t_mean, 2) t_min, t_max, and
3) tmean
* The best options for relative air humidity are, in this order: 1) rh_max and rh_min, 2) rh_max, and 3)
rh_mean
Example 1::
>>> from evapotranspiration.penman_monteith_daily import PenmanMonteithDaily
>>> pm = PenmanMonteithDaily(elevation=100, latitude=50.80)
>>> et0 = pm.et0(doy=187, u2=2.078, t_min=12.3, t_max=21.5, rh_min=63, rh_max=84, n=9.25)
>>> print(et0)
3.872968723753793
Example 2::
>>> from evapotranspiration.penman_monteith_daily import PenmanMonteithDaily
>>> pm = PenmanMonteithDaily(elevation=100, latitude=50.80)
>>> et0 = pm.et0(date='2001-07-06', u2=2.078, t_min=12.3, t_max=21.5, rh_min=63, rh_max=84, n=9.25)
>>> print(et0)
3.872968723753793
Example 3::
>>> from evapotranspiration.penman_monteith_daily import PenmanMonteithDaily
>>> pm = PenmanMonteithDaily(elevation=100, latitude=50.80)
>>> date=np.array(['2001-07-06', '2001-07-06'])
>>> u2=np.array([2.078, 2.078])
>>> t_min=np.array([12.3, 12.3])
>>> t_max=np.array([21.5, 21.5])
>>> rh_min=np.array([63, 63])
>>> rh_max=np.array([84, 84])
>>> n=np.array([9.25, 9.25])
>>> et0 = pm.et0(date=date, u2=u2, t_min=t_min, t_max=t_max, rh_min=rh_min, rh_max=rh_max, n=n)
>>> print(et0)
[3.87296872 3.87296872]
"""
self.reset()
try:
self.u2 = kwargs.get('u2', None)
if self.u2 is None:
self.u2 = self.to_u2(kwargs['uz'], kwargs['z'])
except KeyError:
raise KeyError('Penmam-Monteith: Either u2 or both uz and z must be given')
t_min = kwargs.get('t_min', None)
if t_min is None:
t_min = kwargs['t_mean']
t_max = kwargs.get('t_max', None)
if t_max is None:
t_max = kwargs['t_mean']
t_mean = kwargs.get('t_mean', None)
rh_min = kwargs.get('rh_min', None)
rh_max = kwargs.get('rh_max', None)
if rh_max is not None:
if rh_min is None:
rh_min = rh_max
else:
rh_min = rh_max = kwargs['rh_mean']
self.doy = kwargs.get('doy', None)
if self.doy is None:
self.doy = pd.to_datetime(kwargs['date']).dayofyear
self.rs = kwargs.get('rs', None)
n = kwargs.get('n', None)
g = kwargs.get('g', None)
if g is None:
g = self.g_default
a_s = kwargs.get('a_s', 0.25)
b_s = kwargs.get('b_s', 0.50)
if t_mean is None:
t_mean = (t_min + t_max) / 2.0
self.ld = PenmanMonteithDaily.latent_heat_of_vaporization(t_mean)
# In FAO 56, where delta occurs in the numerator and denominator, the slope
# of the vapour pressure curve is calculated using mean air temperature (Equation 9)
self.s = PenmanMonteithDaily.slope_of_saturation_vapour_pressure_curve(t_mean)
self.pc = PenmanMonteithDaily.psychrometric_constant(self.p, lamda=self.ld)
self.es = PenmanMonteithDaily.saturation_vapour_pressure(t_min, t_max)
self.ea = PenmanMonteithDaily.actual_vapour_pressure(rh_min=rh_min, rh_max=rh_max, t_min=t_min, t_max=t_max)
try:
self.ra = np.array([self.ra_366[i] for i in self.doy])
self.rs0 = np.array([self.rs0_366[i] for i in self.doy])
if self.rs is None:
self.mn = np.array([self.daylight_hours_366[i] for i in self.doy])
self.rs = self.shortwave_radiation(self.ra, n, self.mn, a_s, b_s)
# FAO56 eq. 39. The Rs/Rso term in equation 39 must be limited so that Rs/Rso ≤ 1.0.
self.rs = np.where(self.rs > self.rs0, self.rs0, self.rs)
except TypeError:
self.ra = self.ra_366[self.doy]
self.rs0 = self.rs0_366[self.doy]
if self.rs is None:
self.mn = self.daylight_hours_366[self.doy]
self.rs = self.shortwave_radiation(self.ra, n, self.mn, a_s, b_s)
# FAO56 eq. 39. The Rs/Rso term in equation 39 must be limited so that Rs/Rso ≤ 1.0.
self.rs = self.rs0 if self.rs > self.rs0 else self.rs
self.rns = self.net_shortwave_radiation(self.rs, self.albedo)
self.rnl = self.net_longwave_radiation(t_min, t_max, self.rs, self.rs0, self.ea)
if kwargs.get('negative_rnl', False) and self.rnl < 0.0:
self.rnl = 0.0
self.rn = self.rns - self.rnl
# denominator of FAO 56 eq. 3
etd = self.ld * (self.s + self.pc * (1 + self.f2 * self.u2))
# ETo energy component of FAO 56 eq. 3
self.etr = self.s * (self.rn - g) / etd
# ETo wind component of FAO 56 eq. 3
self.etw = (self.ld * self.pc * self.u2 * self.f1 * (self.es - self.ea) / (t_mean + 273.0)) / etd
# Reference evapotranspiration
self.et = self.etr + self.etw
self.et = np.where(self.et < 0.0, 0.0, self.et)
try:
self.et = float(self.et)
except TypeError:
pass
if kwargs.get('negative_rnl', False) and self.et < 0.0:
self.et = 0.0
return self.et
def et0_frame(self, df, **kwargs):
"""Return the input DataFrame extended by :meth:`et0` and further calculation parameters.
:param df: pandas DataFrame with columns corresponding to the inputs described in :meth:`et0`
:type df: pandas.DataFrame
:Keyword Arguments:
* **show_all** (*bool*) - show all results if :math:`True`, otherwise set `parameter=True` to show individual
parameters. For example :math:`doy=True`, :math:`ld=True`, etc. See :meth:`PenmanMonteithDaily`
:return: (*pandas.DataFrame*) DataFrame
"""
doy_str = kwargs.get('doy', 'doy')
date_str = kwargs.get('date', 'date')
u2_str = kwargs.get('u2', 'u2')
uz_str = kwargs.get('uz', 'uz')
z_str = kwargs.get('z', 'z')
t_mean_str = kwargs.get('t_mean', 't_mean')
t_min_str = kwargs.get('t_min', 't_min')
t_max_str = kwargs.get('t_max', 't_max')
rh_mean_str = kwargs.get('rh_mean', 'rh_mean')
rh_min_str = kwargs.get('rh_min', 'rh_min')
rh_max_str = kwargs.get('rh_max', 'rh_max')
rs_str = kwargs.get('rs', 'rs')
n_str = kwargs.get('n', 'n')
g_str = kwargs.get('g', 'g')
columns = df.columns
doy = df[doy_str].values if doy_str in columns else None
date = df[date_str].values if date_str in columns else None
u2 = df[u2_str].values if u2_str in columns else None
uz = df[uz_str].values if uz_str in columns else None
z = df[z_str].values if z_str in columns else None
t_mean = df[t_mean_str].values if t_mean_str in columns else None
t_min = df[t_min_str].values if t_min_str in columns else None
t_max = df[t_max_str].values if t_max_str in columns else None
rh_mean = df[rh_mean_str].values if rh_mean_str in columns else None
rh_min = df[rh_min_str].values if rh_min_str in columns else None
rh_max = df[rh_max_str].values if rh_max_str in columns else None
rs = df[rs_str].values if rs_str in columns else None
n = df[n_str].values if n_str in columns else None
g = df[g_str].values if g_str in columns else None
self.et0(doy=doy, date=date, u2=u2, uz=uz, z=z, t_mean=t_mean, t_min=t_min, t_max=t_max,
rh_mean=rh_mean, rh_min=rh_min, rh_max=rh_max, rs=rs, n=n, g=g)
show_all = kwargs.get('show_all', True)
if show_all:
if doy is None:
df['DoY'] = self.doy
df['Lambda'] = self.ld
df['Psy'] = self.pc
df['Delta'] = self.s
df['es'] = self.es
df['ea'] = self.ea
df['Rs'] = self.rs
df['Rns'] = self.rns
df['Rnl'] = self.rnl
df['ET0r'] = self.etr
df['ET0w'] = self.etw
df['ET0'] = self.et
else:
if kwargs.get('Lambda', False):
df['Lambda'] = self.ld
if kwargs.get('Psy', False):
df['Psy'] = self.pc
if kwargs.get('Delta', False):
df['Delta'] = self.s
if kwargs.get('es', False):
df['es'] = self.es
if kwargs.get('ea', False):
df['ea'] = self.ea
if kwargs.get('Rs', False):
df['Rs'] = self.rs
if kwargs.get('Rns', False):
df['Rns'] = self.rns
if kwargs.get('Rnl', False):
df['Rnl'] = self.rnl
if kwargs.get('ET0r', False):
df['ET0r'] = self.etr
if kwargs.get('ET0w', False):
df['ET0w'] = self.etw
if kwargs.get('ET0', True):
df['ET0'] = self.et
return df
| 46.14405 | 120 | 0.58474 | 44,208 | 0.998735 | 0 | 0 | 18,924 | 0.427526 | 0 | 0 | 31,635 | 0.714689 |
48e612645ef11a151beea876541ffc2a70be93e5 | 5,123 | py | Python | src/cnc-app-name/views.py | scotchoaf/cnc-skeleton | 2116bf3d61fc1ed834daeaa146f5730713300010 | [
"MIT"
]
| null | null | null | src/cnc-app-name/views.py | scotchoaf/cnc-skeleton | 2116bf3d61fc1ed834daeaa146f5730713300010 | [
"MIT"
]
| null | null | null | src/cnc-app-name/views.py | scotchoaf/cnc-skeleton | 2116bf3d61fc1ed834daeaa146f5730713300010 | [
"MIT"
]
| 1 | 2019-04-08T14:54:12.000Z | 2019-04-08T14:54:12.000Z | # Copyright (c) 2018, Palo Alto Networks
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# Author: $YOURNAME and $EMAIL
"""
Palo Alto Networks cnc-skeleton
This software is provided without support, warranty, or guarantee.
Use at your own risk.
"""
from django import forms
from django.contrib import messages
from django.shortcuts import HttpResponseRedirect
# Every app will need to import at least the CNCBaseFormView
from pan_cnc.views import CNCBaseFormView, ProvisionSnippetView
# All class attributes can be defined here or in the .pan-cnc.yaml
# In this case, we have defined class level attributes there. This makes it possible to
# create apps while writing no code at all. Just create a view in the .pan-cnc.yaml based on a
# CNCBaseFormView and configure the attributes as needed.
# If you want additional logic, then you subclass the CNCBaseFormView and add your logic there.
# The two main methods to override are 'generate_dynamic_form' and 'form_valid'.
#
# generate_dynamic_form gets called before the web form is created and displayed to the user
#
# form_valid is called after they submit the form
#
class ExampleAppView(CNCBaseFormView):
def form_valid(self, form):
# we now have the form from the user, let's get some values to perform some logic
# every variable entered by the user is saved in the user session. We can access it using this
# convenience method:
var_name = self.get_value_from_workflow('var_name', 'DEFAULT_IF_NOT_FOUND')
var_name_again = self.get_value_from_workflow('var_name_again', 'DEFAULT_IF_NOT_FOUND')
# silly exercise to just upper case the value entered by the user
var_name_upper = str(var_name).upper()
var_name_again_reverse = str(var_name_again)[::-1]
# now, save the values back to the workflow
self.save_value_to_workflow('var_name', var_name_upper)
self.save_value_to_workflow('var_name_again', var_name_again_reverse)
# and call our super to continue processing
return super().form_valid(form)
# Again override the ProvisionSnippetView as we are only building a workflow here.
# CNCBaseFormView will only display the form and perform a redirect after 'form_valid'
# however, ProvisionSnippetView will actually redirect to another CNC class based in the skillet type
# I.e. this is where the logic of how to interact with APIs, PAN-OS devies, render templates, etc is all done
# You usually want a child of this class to the 'last' in a chain if you need extended logic
class ExampleAppPasswordView(ProvisionSnippetView):
def get_snippet(self):
return self.snippet
# this method allows us to customize what is shown to the user beyond what is present in the loaded skillet
# 'variables' section
def generate_dynamic_form(self):
# let's first get the generated from from our base class
dynamic_form = super().generate_dynamic_form()
dynamic_form.fields['password_2'] = forms.CharField(widget=forms.PasswordInput(render_value=True),
initial='')
return dynamic_form
# the user has now completed the form and we have the results
def form_valid(self, form):
# Everything the user has entered will be available here in the 'workflow'
# Note that any 'variable' entries defined in the .meta-cnc snippet will
# be automatically added to the session workflow
workflow = self.get_workflow()
# get the values from the user submitted here
var_name = workflow.get('var_name')
var_name_again = workflow.get('var_name_again')
example_password = workflow.get('example_password')
# to access variables that were not defined in the snippet
# you can grab them directly from the POST on the request object
password_2 = self.request.POST['password_2']
print(f'checking if {example_password} matches {password_2}')
if example_password != password_2:
# Send an error message back to the user
messages.add_message(self.request, messages.ERROR, 'Passwords do not match!')
return HttpResponseRedirect('workflow00')
print('Got some vars here!')
print(f'Found value for var_name: {var_name}')
print(f'Found another value for var_name_again {var_name_again}')
return super().form_valid(form)
| 44.163793 | 111 | 0.728479 | 2,844 | 0.555143 | 0 | 0 | 0 | 0 | 0 | 0 | 3,428 | 0.669139 |
48e75715f9ebbd7bc9cad087839a0b649f005b70 | 1,312 | py | Python | tcc_server/emulatorRPi.py | MegaNo0body/tcc | 469824a8afc1cf846793212d42f6c8c43ee4b0bf | [
"MIT"
]
| 1 | 2016-09-29T22:39:31.000Z | 2016-09-29T22:39:31.000Z | tcc_server/emulatorRPi.py | MegaNo0body/tcc | 469824a8afc1cf846793212d42f6c8c43ee4b0bf | [
"MIT"
]
| null | null | null | tcc_server/emulatorRPi.py | MegaNo0body/tcc | 469824a8afc1cf846793212d42f6c8c43ee4b0bf | [
"MIT"
]
| null | null | null | import sys
from time import sleep
from random import randint
from urllib.request import urlopen
from urllib.parse import urlencode
if len(sys.argv) != 2:
print('Por favor, usar: ' + sys.argv[0] + ' {idSensor}')
print('Exemplo: ' + sys.argv[0] + ' 8')
else:
sensorId = sys.argv[1]
URL_SERVICO = 'http://127.0.0.1:8081/tcc/sensor/' + sensorId + '/inserir'
VARIACAO_MAXIMA = 5
valores = {
'Chuva': 80.0,
'UmidadeAr': 85.0,
'UmidadeSolo': 80.0,
'TemperaturaAr': 30.0,
'TemperaturaSolo': 25.0
}
variacao = {}
for k in valores:
valores[k] = valores[k] + randint(-3, +3) / 10
variacao[k] = 0.0
accel = {}
while True:
for k in variacao:
accel[k] = randint(-1.0, +1.0) / 10
r = randint(10, 30)
for i in range(r):
data = {}
for k in variacao:
variacao[k] = variacao[k] + accel[k]
variacao[k] = max(variacao[k], -VARIACAO_MAXIMA)
variacao[k] = min(variacao[k], +VARIACAO_MAXIMA)
data[k] = '%.2f' % (valores[k] + round(variacao[k], 2))
data = urlencode(data)
print(data)
urlopen(URL_SERVICO, data.encode('ascii'))
sleep(0.50)
| 31.238095 | 77 | 0.51753 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 168 | 0.128049 |
48e7717d4dc4d7ba6b003ee81bea9813e26ea8e2 | 1,487 | py | Python | sayn/logging/file_logger.py | robin-173/sayn | d1cf36b92fad6a1798b57ad80abb22e8386e0e86 | [
"Apache-2.0"
]
| 105 | 2020-04-23T17:04:34.000Z | 2022-03-18T15:47:52.000Z | sayn/logging/file_logger.py | robin-173/sayn | d1cf36b92fad6a1798b57ad80abb22e8386e0e86 | [
"Apache-2.0"
]
| 53 | 2020-06-12T14:41:12.000Z | 2022-01-24T13:04:58.000Z | sayn/logging/file_logger.py | robin-173/sayn | d1cf36b92fad6a1798b57ad80abb22e8386e0e86 | [
"Apache-2.0"
]
| 9 | 2020-04-23T16:56:23.000Z | 2021-08-16T10:54:48.000Z | from pathlib import Path
import logging
from .logger import Logger
from .log_formatter import LogFormatter
class FileLogger(Logger):
fmt = LogFormatter(use_colour=False, output_ts=False)
logger = None
def __init__(self, folder, format=None):
if format is None:
format = ("%(asctime)s|%(levelname)s|%(message)s",)
formatter = logging.Formatter(format)
log_file = Path(folder, "sayn.log")
if not log_file.parent.exists():
log_file.parent.mkdir(parents=True)
handler = logging.FileHandler(log_file)
handler.setLevel(logging.DEBUG)
handler.setFormatter(formatter)
logger = logging.getLogger(__name__)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
self.logger = logger
def print(self, s=None):
if s is not None:
if s["level"] == "info":
func = self.logger.info
elif s["level"] == "error":
func = self.logger.error
elif s["level"] == "warning":
func = self.logger.warning
else:
func = self.logger.debug
s = s["message"]
if isinstance(s, str):
s = [s]
elif not isinstance(s, list):
raise ValueError("error in logging print")
func(f"{s[0]}")
for e in s[1:]:
for l in e.split("\n"):
func(f"{l}")
| 28.056604 | 63 | 0.543376 | 1,376 | 0.925353 | 0 | 0 | 0 | 0 | 0 | 0 | 144 | 0.096839 |
48e84fceaf520fea1c5ef759977376465d7f8dcf | 1,514 | py | Python | tests/test_docs.py | gitter-badger/pygsuite | 536766c36f653edbc7585141f1c3327f508e19da | [
"MIT"
]
| null | null | null | tests/test_docs.py | gitter-badger/pygsuite | 536766c36f653edbc7585141f1c3327f508e19da | [
"MIT"
]
| null | null | null | tests/test_docs.py | gitter-badger/pygsuite | 536766c36f653edbc7585141f1c3327f508e19da | [
"MIT"
]
| null | null | null | from pygsuite import DefaultFonts, TextStyle, Color
from pygsuite.docs.doc_elements.paragraph import Paragraph
BRIGHT_GREEN_HEX = "#72FF33"
def test_text(test_document):
document = test_document
docbody = document.body
docbody.delete()
docbody.add_text(
"TEST_CUSTOM\n",
style=TextStyle(font_size=18, font_weight=200, color=Color(hex=BRIGHT_GREEN_HEX)),
)
docbody.add_text("TEST_DEFAULT\n", style=DefaultFonts.NORMAL_TEXT)
docbody.add_text("TEST_INDEX\n", style=DefaultFonts.NORMAL_TEXT, position=1)
document.flush()
text = [item for item in document.body if isinstance(item, Paragraph)]
assert text[0].text.strip() == "TEST_INDEX"
assert text[2].text.strip() == "TEST_DEFAULT"
# TODO: return style objects
assert text[1].elements[0].style.font_size == 18
def test_paragraph(test_document):
document = test_document
docbody = document.body
docbody.delete()
docbody.add_text(
"TEST_CUSTOM\n",
style=TextStyle(font_size=18, font_weight=200, color=Color(hex=BRIGHT_GREEN_HEX)),
)
docbody.flush()
docbody.content[1].text = "TEST_CUSTOM_SETTER"
docbody.add_text("INSERT\n", position=0)
docbody.flush()
docbody.paragraphs[1].elements[0].style = TextStyle(
font_size=24, font_weight=500, color=Color(hex=BRIGHT_GREEN_HEX)
)
docbody.flush()
assert docbody.content[2].text.strip() == "TEST_CUSTOM_SETTER"
assert docbody.paragraphs[1].elements[0].style.font_size == 24
| 33.644444 | 90 | 0.707398 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 173 | 0.114267 |
48e92b16767155e8dc5662502fba6db4a07dc542 | 71,657 | py | Python | neutra/vae.py | dieterichlawson/google-research | 7ca9a612aa4239533c6ed8ef98543f9780d19f2b | [
"Apache-2.0"
]
| 4 | 2020-02-04T16:23:45.000Z | 2021-08-30T11:56:01.000Z | neutra/vae.py | lceustc/google-research | bf793f31022db2636f42e132198ffe8bd9631b58 | [
"Apache-2.0"
]
| 10 | 2020-09-26T00:19:12.000Z | 2022-03-12T00:04:29.000Z | neutra/vae.py | lceustc/google-research | bf793f31022db2636f42e132198ffe8bd9631b58 | [
"Apache-2.0"
]
| 1 | 2020-02-29T05:06:38.000Z | 2020-02-29T05:06:38.000Z | # coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python2, python3
# pylint: disable=invalid-name,g-bad-import-order,missing-docstring
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import functools
import os
from absl import app
from absl import flags
from concurrent import futures
import gin
import numpy as np
from six.moves import range
from six.moves import zip
import tensorflow as tf
import tensorflow_probability as tfp
from typing import Any, Dict, List, Optional, Tuple
from neutra import utils
tfd = tfp.distributions
tfb = tfp.bijectors
FLAGS = flags.FLAGS
TRAIN_BATCH = 250
TEST_BATCH = 1000
AIS_BATCH = 50
def ReduceL2(tensor, dims):
return tf.sqrt(tf.reduce_sum(tf.square(tensor), dims))
@utils.MakeTFTemplate
def Conv2DWN(inputs,
num_filters,
kernel_size=[3, 3],
stride=[1, 1],
pad="SAME",
activation=None,
weights_initializer=utils.L2HMCInitializer(),
biases_initializer=tf.zeros_initializer(),
scope=None):
if activation is None:
activation = lambda x: x
num_inputs = int(inputs.shape[3])
with tf.variable_scope(scope, "conv_2d_wn"):
w = tf.get_variable(
"w", [kernel_size[0], kernel_size[1], num_inputs, num_filters],
initializer=weights_initializer)
if biases_initializer is not None:
b = tf.get_variable("b", [num_filters], initializer=biases_initializer)
g = tf.get_variable(
"g", initializer=tf.log(ReduceL2(w.initialized_value(), [0, 1, 2])))
g = tf.exp(g)
w = tf.reshape(g, [1, 1, 1, num_filters]) * tf.nn.l2_normalize(w, [0, 1, 2])
out = tf.nn.conv2d(inputs, w, [1, stride[0], stride[1], 1], pad)
if biases_initializer is not None:
out += tf.reshape(b, [1, 1, 1, num_filters])
return activation(out)
def GetLinearARMask(num_inputs, num_outputs, zero_diagonal=False):
assert num_inputs % num_outputs == 0 or num_outputs % num_inputs == 0, "%d vs %d" % (num_inputs, num_outputs)
mask = np.ones([num_inputs, num_outputs], dtype=np.float32)
if num_outputs >= num_inputs:
k = num_outputs // num_inputs
for i in range(num_inputs):
mask[i + 1:, i * k:(i + 1) * k] = 0
if zero_diagonal:
mask[i:i + 1, i * k:(i + 1) * k] = 0
else:
k = num_inputs // num_outputs
for i in range(num_outputs):
mask[(i + 1) * k:, i:i + 1] = 0
if zero_diagonal:
mask[i * k:(i + 1) * k:, i:i + 1] = 0
return mask
def GetConvARMask(h, w, num_inputs, num_filters, zero_diagonal=False):
l = (h - 1) // 2
m = (w - 1) // 2
mask = np.ones([h, w, num_inputs, num_filters], dtype=np.float32)
mask[:l, :, :, :] = 0
mask[l, :m, :, :] = 0
mask[l, m, :, :] = GetLinearARMask(num_inputs, num_filters, zero_diagonal)
return mask
@utils.MakeTFTemplate
def Conv2DAR(inputs, num_filters,
kernel_size=[3, 3],
zero_diagonal=False,
weights_initializer=None,
biases_initializer=tf.zeros_initializer(),
scope=None):
num_inputs = int(inputs.get_shape()[3])
mask = GetConvARMask(kernel_size[0], kernel_size[1], num_inputs, num_filters, zero_diagonal)
w = tf.get_variable("w", [kernel_size[0], kernel_size[1], num_inputs, num_filters], initializer=weights_initializer)
b = tf.get_variable("b", [num_filters], initializer=biases_initializer)
g = tf.get_variable(
"g", initializer=tf.log(ReduceL2(w.initialized_value() * mask, [0, 1, 2])))
g = tf.exp(g)
w = tf.reshape(g, [1, 1, 1, num_filters]) * tf.nn.l2_normalize(w * mask, [0, 1, 2])
out = tf.nn.conv2d(inputs, w, [1, 1, 1, 1], "SAME")
return out + tf.reshape(b, [1, 1, 1, num_filters])
@utils.MakeTFTemplate
def ConvAR(x,
h=None,
real_event_shape=[],
hidden_layers=[],
**kwargs):
#input_shape = (
# np.int32(x.shape.as_list())
# if x.shape.is_fully_defined() else tf.shape(x))
#x = tf.reshape(x, [-1] + real_event_shape)
for i, units in enumerate(hidden_layers):
x = Conv2DAR("conv2d_ar_%d"%i, num_filters=units, zero_diagonal=False, **kwargs)(inputs=x)
if i == 0 and h is not None:
if h.shape[-1] != x.shape[-1]:
x += Conv2DWN("conv2d_h", num_filters=int(x.shape[-1]), kernel_size=[1, 1], stride=[1, 1])(h)
else:
x += h
x = tf.nn.elu(x)
shift = Conv2DAR(
"conv2d_shift",
num_filters=real_event_shape[-1],
zero_diagonal=True,
**kwargs)(
inputs=x)
log_scale = Conv2DAR(
"conv2d_scale",
num_filters=real_event_shape[-1],
zero_diagonal=True,
**kwargs)(
inputs=x)
#shift = tf.reshape(shift, input_shape)
#log_scale = tf.reshape(log_scale, input_shape)
return shift, log_scale
@utils.MakeTFTemplate
def DenseWN(inputs,
num_outputs,
activation=None,
weights_initializer=utils.L2HMCInitializer(),
biases_initializer=tf.zeros_initializer(),
scope=None):
if activation is None:
activation = lambda x: x
num_inputs = int(inputs.get_shape()[1])
with tf.variable_scope(scope, "dense_wn"):
w = tf.get_variable(
"w", [num_inputs, num_outputs], initializer=weights_initializer)
if biases_initializer is not None:
b = tf.get_variable("b", [num_outputs], initializer=biases_initializer)
g = tf.get_variable(
"g", initializer=tf.log(ReduceL2(w.initialized_value(), [0])))
g = tf.exp(g)
w = g * tf.nn.l2_normalize(w, [0])
out = tf.matmul(inputs, w)
if biases_initializer is not None:
out += tf.expand_dims(b, 0)
return activation(out)
@utils.MakeTFTemplate
def ResConv2D(inputs,
num_filters,
kernel_size,
stride,
activation=tf.nn.elu,
output_init_factor=1.0):
x = Conv2DWN(
"conv2d_in",
num_filters=num_filters,
kernel_size=kernel_size,
stride=stride,
activation=activation)(
inputs=inputs)
non_linear = Conv2DWN(
"conv2d_nl",
num_filters=num_filters,
kernel_size=kernel_size,
stride=[1, 1],
weights_initializer=utils.L2HMCInitializer(factor=output_init_factor))(
inputs=x)
skip = Conv2DWN(
"conv2d_skip",
num_filters=num_filters,
kernel_size=kernel_size,
stride=stride,
weights_initializer=utils.L2HMCInitializer(factor=output_init_factor))(
inputs=inputs)
return non_linear + skip
@utils.MakeTFTemplate
def ResDense(inputs, num_dims, activation=None):
x = DenseWN("dense_in", num_outputs=num_dims, activation=activation)(inputs)
non_linear = DenseWN("dense_nl", num_outputs=num_dims)(x)
skip = DenseWN("dense_skip", num_outputs=num_dims)(x)
return non_linear + skip
@gin.configurable("conv_hier_encoder")
@utils.MakeTFTemplate
def ConvHierEncoder(images, depth = 2, num_blocks = 2, z_dims = 32, h_dims=160):
x = Conv2DWN("conv2d_in", num_filters=h_dims, stride=[2, 2], kernel_size=[5, 5])(inputs=images - 0.5)
means = []
raw_scales = []
contexts = []
for i in range(depth):
for j in range(num_blocks):
downsample = i > 0 and j == 0
if downsample:
stride = [2, 2]
else:
stride = [1, 1]
h = tf.nn.elu(x)
h = Conv2DWN("conv2d_in_%d_%d"%(i, j), num_filters=2*z_dims + 2 * h_dims, stride=stride, kernel_size=[3, 3])(inputs=h)
mean, raw_scale, context, h = tf.split(h, [z_dims, z_dims, h_dims, h_dims], -1)
means.append(mean)
raw_scales.append(raw_scale)
contexts.append(context)
h = tf.nn.elu(h)
h = Conv2DWN("conv2d_h_%d_%d"%(i, j), num_filters=h_dims, stride=[1, 1], kernel_size=[3, 3])(inputs=h)
if downsample:
x = tf.image.resize_nearest_neighbor(x, [int(x.shape[1]) // 2, int(x.shape[2]) // 2])
x += 0.1 * h
return means, raw_scales, contexts
@gin.configurable("conv_hier_prior_post")
@utils.MakeTFTemplate
def ConvHierPriorPost(images=None,
encoder=None,
z=None,
batch=None,
depth = 2,
num_blocks = 2,
z_dims = 32,
h_dims = 160,
image_width = 32):
is_q = encoder is not None
if is_q:
means, raw_scales, up_contexts = encoder(images)
if batch is None:
if images is not None:
batch = tf.shape(images)[0]
else:
batch = tf.shape(z[0])[0]
h = tf.get_variable("h_top", [h_dims], initializer=tf.zeros_initializer())
h = tf.reshape(h, [1, 1, 1, -1])
top_width = image_width // 2 ** num_blocks
h = tf.tile(h, [batch, top_width, top_width, 1])
x = h
ret_z = []
ret_log_pz = []
for i in reversed(list(range(depth))):
for j in reversed(list(range(num_blocks))):
downsample = i > 0 and j == 0
h = tf.nn.elu(x)
h_p = Conv2DWN(
"conv2d_p_%d_%d" % (i, j),
num_filters=2 * h_dims + 2 * z_dims,
stride=[1, 1],
kernel_size=[3, 3])(
inputs=h)
p_mean, p_raw_scale, down_context, h_det = tf.split(
h_p, [z_dims, z_dims, h_dims, h_dims], -1)
p_z = tfd.Independent(
tfd.Normal(loc=p_mean, scale=tf.nn.softplus(p_raw_scale)),
reinterpreted_batch_ndims=3)
if is_q:
h_q = Conv2DWN(
"conv2d_q_%d_%d" % (i, j),
num_filters=2 * z_dims,
stride=[1, 1],
kernel_size=[3, 3])(
inputs=h)
q_mean, q_raw_scale = tf.split(h_q, [z_dims, z_dims], -1)
context = down_context + up_contexts.pop()
q_mean += means.pop()
q_raw_scale += raw_scales.pop()
num_flat_dims = np.prod(q_mean.shape.as_list()[1:])
_maf_template = ConvAR(
"iaf_%d_%d" % (i, j),
real_event_shape=q_mean.shape.as_list()[1:],
hidden_layers=[h_dims, h_dims],
h=context,
weights_initializer=utils.L2HMCInitializer(factor=0.01))
def maf_template(x, t=_maf_template):
# TODO: I don't understand why the shape gets lost.
#x.set_shape([None, num_flat_dims])
x.set_shape([None] + q_mean.shape.as_list()[1:])
return t(x)
bijectors = []
#bijectors.append(tfb.Reshape(tf.shape(q_mean)[1:], [num_flat_dims]))
bijectors.append(
tfb.Invert(
tfb.MaskedAutoregressiveFlow(shift_and_log_scale_fn=maf_template)))
#bijectors.append(tfb.Reshape([num_flat_dims], tf.shape(q_mean)[1:]))
# Do the shift/scale explicitly, so that we can use bijector to map the
# distribution to the standard normal, which is helpful for HMC.
bijectors.append(tfb.AffineScalar(shift=q_mean, scale=tf.nn.softplus(q_raw_scale)))
bijector = tfb.Chain(bijectors)
mvn = tfd.Independent(
tfd.Normal(loc=tf.zeros_like(q_mean), scale=tf.ones_like(q_raw_scale)),
reinterpreted_batch_ndims=3)
q_z = tfd.TransformedDistribution(mvn, bijector)
if is_q:
dist = q_z
else:
dist = p_z
if z is None:
z_val = dist.sample()
else:
z_val = z[0]
z = z[1:]
ret_z.append(z_val)
ret_log_pz.append(dist.log_prob(z_val))
h = tf.concat([z_val, h_det], -1)
if downsample:
new_shape = [2 * int(x.shape[1]), 2 * int(x.shape[2])]
x = tf.image.resize_nearest_neighbor(x, new_shape)
h = tf.image.resize_nearest_neighbor(h, new_shape)
h = Conv2DWN("deconv2d_%d_%d" % (i, j), num_filters=h_dims, stride=[1, 1], kernel_size=[3, 3])(inputs=h)
x = x + 0.1 * h
x = tf.image.resize_nearest_neighbor(x, [2 * int(x.shape[1]), 2 * int(x.shape[2])])
x = Conv2DWN("conv2d_out", num_filters=3, stride=[1, 1], kernel_size=[5, 5])(inputs=x)
return ret_z, ret_log_pz, x
@gin.configurable("conv_encoder")
@utils.MakeTFTemplate
def ConvEncoder(images, num_outputs, hidden_dims = 450,
filter_scale = 1, fully_convolutional = False):
x = images
x = ResConv2D("res_1", num_filters=filter_scale * 16, kernel_size=[3, 3], stride=[2, 2])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_2", num_filters=filter_scale * 16, kernel_size=[3, 3], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_3", num_filters=filter_scale * 16, kernel_size=[3, 3], stride=[2, 2])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_4", num_filters=filter_scale * 32, kernel_size=[3, 3], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_5", num_filters=filter_scale * 32, kernel_size=[3, 3], stride=[2, 2])(x)
x = tf.nn.elu(x)
if fully_convolutional:
return ResConv2D("res_out", num_filters=num_outputs, kernel_size=[3, 3], stride=[1, 1])(x)
else:
x = tf.reshape(x, [-1, filter_scale * 32 * 4 * 4])
x = ResDense("dense_h", num_dims=hidden_dims, activation=tf.nn.elu)(x)
return DenseWN(
"dense_out",
num_outputs=num_outputs,
weights_initializer=utils.L2HMCInitializer())(
x)
@gin.configurable("conv_decoder")
@utils.MakeTFTemplate
def ConvDecoder(encoding,
output_shape,
filter_scale = 1,
hidden_dims = 450,
fully_convolutional = False):
if isinstance(encoding, (list, tuple)):
encoding = encoding[0]
if fully_convolutional:
tf.logging.info("Encoding shape: %s", encoding.shape)
x = ResConv2D("res_in", num_filters=filter_scale * 32, kernel_size=[3, 3], stride=[1, 1])(encoding)
else:
x = ResDense("dense_in", num_dims=hidden_dims, activation=tf.nn.elu)(encoding)
x = ResDense("dense_h", num_dims=filter_scale * 32 * 4 * 4, activation=tf.nn.elu)(x)
x = tf.reshape(x, [-1, 4, 4, filter_scale * 32])
x = tf.image.resize_nearest_neighbor(x, [8, 8])
x = ResConv2D("res_5", num_filters=32 * filter_scale, kernel_size=[3, 3], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_4", num_filters=32 * filter_scale, kernel_size=[3, 3], stride=[1, 1])(x)
x = tf.nn.elu(x)
if output_shape[1] == 28:
# 8x8 -> 7x7
x = x[:, 1:, 1:, :]
x = tf.image.resize_nearest_neighbor(x, [output_shape[0] // 2, output_shape[1] // 2])
x = ResConv2D("res_3", num_filters=16 * filter_scale, kernel_size=[3, 3], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_2", num_filters=16 * filter_scale, kernel_size=[3, 3], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = tf.image.resize_nearest_neighbor(x, [output_shape[0], output_shape[1]])
x = ResConv2D(
"res_1",
num_filters=output_shape[-1],
kernel_size=[3, 3],
stride=[1, 1],
output_init_factor=0.01)(
x)
return tf.reshape(x, [-1] + output_shape)
@gin.configurable("conv_encoder2")
@utils.MakeTFTemplate
def ConvEncoder2(images, num_outputs, filter_scale = 1):
x = images
x = Conv2DWN("conv_1", num_filters=filter_scale * 16, kernel_size=[5, 5], stride=[2, 2], activation=tf.nn.elu)(x)
x = Conv2DWN("conv_2", num_filters=filter_scale * 16, kernel_size=[5, 5], stride=[1, 1], activation=tf.nn.elu)(x)
x = Conv2DWN("conv_3", num_filters=filter_scale * 16, kernel_size=[5, 5], stride=[2, 2], activation=tf.nn.elu)(x)
x = Conv2DWN("conv_4", num_filters=filter_scale * 32, kernel_size=[5, 5], stride=[1, 1], activation=tf.nn.elu)(x)
x = Conv2DWN("conv_5", num_filters=filter_scale * 32, kernel_size=[5, 5], stride=[2, 2], activation=tf.nn.elu)(x)
return ResConv2D("conv_out", num_filters=num_outputs, kernel_size=[3, 3], stride=[1, 1])(x)
@gin.configurable("conv_decoder2")
@utils.MakeTFTemplate
def ConvDecoder2(encoding,
output_shape,
filter_scale = 1):
if isinstance(encoding, (list, tuple)):
encoding = encoding[0]
x = Conv2DWN("conv_in", num_filters=filter_scale * 32, kernel_size=[3, 3], stride=[1, 1])(encoding)
x = tf.image.resize_nearest_neighbor(x, [8, 8])
x = Conv2DWN("conv_5", num_filters=32 * filter_scale, kernel_size=[5, 5], stride=[1, 1], activation=tf.nn.elu)(x)
x = Conv2DWN("conv_4", num_filters=32 * filter_scale, kernel_size=[5, 5], stride=[1, 1], activation=tf.nn.elu)(x)
if output_shape[1] == 28:
# 8x8 -> 7x7
x = x[:, 1:, 1:, :]
x = tf.image.resize_nearest_neighbor(x, [output_shape[0] // 2, output_shape[1] // 2])
x = Conv2DWN("conv_3", num_filters=16 * filter_scale, kernel_size=[5, 5], stride=[1, 1], activation=tf.nn.elu)(x)
x = Conv2DWN("conv_2", num_filters=16 * filter_scale, kernel_size=[5, 5], stride=[1, 1], activation=tf.nn.elu)(x)
x = tf.image.resize_nearest_neighbor(x, [output_shape[0], output_shape[1]])
x = Conv2DWN(
"conv_1",
num_filters=output_shape[-1],
kernel_size=[5, 5],
stride=[1, 1],
weights_initializer=utils.L2HMCInitializer(0.01))(
x)
return tf.reshape(x, [-1] + output_shape)
@gin.configurable("conv_encoder3")
@utils.MakeTFTemplate
def ConvEncoder3(images, num_outputs, hidden_dims = 450,
filter_scale = 1):
# This comes from VLAE paper.
x = images
x = ResConv2D("res_1", num_filters=filter_scale * 48, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_2", num_filters=filter_scale * 48, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = Conv2DWN("conv_3", num_filters=filter_scale * 48, kernel_size=[5, 5], stride=[2, 2])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_4", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_5", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = Conv2DWN("conv_6", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[2, 2])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_7", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_8", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_9", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
return Conv2DWN("conv_10", num_filters=num_outputs, kernel_size=[1, 1], stride=[1, 1])(x)
@gin.configurable("conv_decoder3")
@utils.MakeTFTemplate
def ConvDecoder3(encoding,
output_shape,
filter_scale = 1):
if isinstance(encoding, (list, tuple)):
encoding = encoding[0]
x = encoding
x = Conv2DWN("conv_1", num_filters=filter_scale * 96, kernel_size=[1, 1], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_2", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_3", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_4", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = tf.image.resize_nearest_neighbor(x, [output_shape[0] // 2, output_shape[1] // 2])
x = Conv2DWN("conv_5", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_6", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_7", num_filters=filter_scale * 96, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = tf.image.resize_nearest_neighbor(x, [output_shape[0], output_shape[1]])
x = Conv2DWN("conv_8", num_filters=filter_scale * 48, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_9", num_filters=filter_scale * 48, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = ResConv2D("res_10", num_filters=filter_scale * 48, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = Conv2DWN(
"conv_out",
num_filters=output_shape[-1],
kernel_size=[5, 5],
stride=[1, 1],
weights_initializer=utils.L2HMCInitializer(0.01))(
x)
return tf.reshape(x, [-1] + output_shape)
@gin.configurable("conv_encoder4")
@utils.MakeTFTemplate
def ConvEncoder4(images, num_outputs,
filter_scale = 1,
fully_convolutional = False):
x = images
x = Conv2DWN("conv_1", num_filters=filter_scale * 64, kernel_size=[5, 5], stride=[2, 2])(x)
x = tf.nn.elu(x)
x = Conv2DWN("conv_2", num_filters=filter_scale * 64, kernel_size=[5, 5], stride=[2, 2])(x)
x = tf.nn.elu(x)
if fully_convolutional:
return Conv2DWN("conv_out", num_filters=num_outputs, kernel_size=[1, 1], stride=[1, 1])(x)
else:
return DenseWN("dense_out", num_outputs=num_outputs)(tf.layers.flatten(x))
@gin.configurable("conv_decoder4")
@utils.MakeTFTemplate
def ConvDecoder4(encoding,
output_shape,
filter_scale = 1,
fully_convolutional = False):
if isinstance(encoding, (list, tuple)):
encoding = encoding[0]
x = encoding
if not fully_convolutional:
x = tf.reshape(DenseWN("dense_in", num_outputs=8*8*16)(x), [-1, 8, 8, 16])
x = tf.image.resize_nearest_neighbor(x, [output_shape[0] // 2, output_shape[1] // 2])
x = Conv2DWN("conv_1", num_filters=filter_scale * 64, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = tf.image.resize_nearest_neighbor(x, [output_shape[0], output_shape[1]])
x = Conv2DWN("conv_2", num_filters=filter_scale * 64, kernel_size=[5, 5], stride=[1, 1])(x)
x = tf.nn.elu(x)
x = Conv2DWN(
"conv_out",
num_filters=output_shape[-1],
kernel_size=[1, 1],
stride=[1, 1],
weights_initializer=utils.L2HMCInitializer(0.01))(
x)
return tf.reshape(x, [-1] + output_shape)
@gin.configurable("dense_encoder")
@utils.MakeTFTemplate
def DenseEncoder(images,
num_outputs,
hidden_layer_sizes = [1024, 1024],
activation=tf.nn.elu):
x = tf.layers.flatten(images)
# Center the data, assuming it goes from [0, 1] initially.
# x = 2.0 * x - 1.0
for size in hidden_layer_sizes:
x = tf.layers.dense(
x, size, activation=activation, kernel_initializer=utils.L2HMCInitializer())
return tf.layers.dense(x, num_outputs, kernel_initializer=utils.L2HMCInitializer())
@gin.configurable("dense_decoder")
@utils.MakeTFTemplate
def DenseDecoder(encoding,
output_shape,
hidden_layer_sizes = [1024, 1024],
activation=tf.nn.elu):
if isinstance(encoding, (list, tuple)):
encoding = encoding[0]
x = tf.layers.flatten(encoding)
for size in hidden_layer_sizes:
x = tf.layers.dense(
x, size, activation=activation, kernel_initializer=utils.L2HMCInitializer())
num_outputs = np.prod(output_shape)
return tf.reshape(
tf.layers.dense(
x, num_outputs, kernel_initializer=utils.L2HMCInitializer(factor=0.01)),
[-1] + output_shape)
def IndependentBernouli3D(logits):
return tfd.Independent(
tfd.Bernoulli(logits=logits), reinterpreted_batch_ndims=3)
def IndependentDiscreteLogistic3D(locations,
scales):
dist = tfd.TransformedDistribution(
distribution=tfd.Logistic(loc=locations, scale=scales),
bijector=tfb.AffineScalar(scale=255.0))
dist = tfd.QuantizedDistribution(distribution=dist, low=0., high=255.0)
dist = tfd.Independent(dist, reinterpreted_batch_ndims=3)
class ScaleHack(object):
def __init__(self, dist):
self._dist = dist
def sample(self, *args, **kwargs):
return self._dist.sample(*args, **kwargs) / 255.0
def log_prob(self, x, *args, **kwargs):
return self._dist.log_prob(tf.clip_by_value(x * 255.0, 0.0, 255.0), *args, **kwargs)
return ScaleHack(dist)
def IndependentDiscreteLogistic3D2(locations,
scales):
class IndependentDiscreteLogistic(object):
def __init__(self, loc, scale):
self._loc = loc
self._scale = scale
def sample(self, *args, **kwargs):
dist = tfd.Logistic(loc=self._loc, scale=self._scale)
return tf.clip_by_value(dist.sample(*args, **kwargs), 0.0, 1.0)
def log_prob(self, x, *args, **kwargs):
sample = x
mean = self._loc
scales = self._scale
binsize=1.0 / 256.0
sample = (tf.floor(sample / binsize) * binsize - mean) / scales
return tf.reduce_sum(
tf.log(
tf.sigmoid(sample + binsize / scales) - tf.sigmoid(sample) + 1e-7),
[-1, -2, -3])
return IndependentDiscreteLogistic(locations, scales)
@gin.configurable("dense_recognition")
@utils.MakeTFTemplate
def DenseRecognition(images, encoder, z=None, sigma_activation="exp"
):
"""Models Q(z | encoder(x))"""
encoding = encoder(images)
num_dims = int(encoding.shape[-1]) // 2
encoding_parts = tf.unstack(
tf.reshape(encoding, [-1, num_dims, 2]), num=2, axis=-1)
mu = encoding_parts[0]
if sigma_activation == "exp":
sigma = tf.exp(0.5 * encoding_parts[1])
elif sigma_activation == "softplus":
sigma = tf.nn.softplus(encoding_parts[1])
bijector = tfb.Affine(shift=mu, scale_diag=sigma)
mvn = tfd.MultivariateNormalDiag(
loc=tf.zeros_like(mu), scale_diag=tf.ones_like(sigma))
dist = tfd.TransformedDistribution(mvn, bijector)
if z is None:
z = [dist.sample()]
tf.logging.info("bijector z shape: %s", z[0].shape)
return z, [dist.log_prob(z[0])], [bijector] # pytype: disable=bad-return-type
@gin.configurable("dense_recognition_affine")
@utils.MakeTFTemplate
def DenseRecognitionAffine(images, encoder, z=None,
z_dims=None):
"""Models Q(z | encoder(x))"""
encoding = encoder(images)
mu = encoding[:, :z_dims]
tril_raw = tfd.fill_triangular(encoding[:, z_dims:])
sigma = tf.nn.softplus(tf.matrix_diag_part(tril_raw))
tril = tf.linalg.set_diag(tril_raw, sigma)
bijector = tfb.Affine(shift=mu, scale_tril=tril)
mvn = tfd.MultivariateNormalDiag(
loc=tf.zeros_like(mu), scale_diag=tf.ones_like(sigma))
dist = tfd.TransformedDistribution(mvn, bijector)
if z is None:
z = [dist.sample()]
return z, [dist.log_prob(z[0])], [bijector] # pytype: disable=bad-return-type
@gin.configurable("dense_recognition_affine_lr")
@utils.MakeTFTemplate
def DenseRecognitionAffineLR(images, encoder, z=None,
z_dims=None, rank=1):
"""Models Q(z | encoder(x))"""
encoding = encoder(images)
mu = encoding[:, :z_dims]
sigma = encoding[:, z_dims:2*z_dims]
perturb = encoding[:, 2*z_dims:]
perturb = tf.reshape(perturb, [-1, z_dims, rank])
sigma = tf.nn.softplus(sigma)
bijector = tfb.Affine(shift=mu, scale_diag=sigma,
scale_perturb_factor=perturb)
mvn = tfd.MultivariateNormalDiag(
loc=tf.zeros_like(mu), scale_diag=tf.ones_like(sigma))
dist = tfd.TransformedDistribution(mvn, bijector)
if z is None:
z = [dist.sample()]
return z, [dist.log_prob(z[0])], [bijector] # pytype: disable=bad-return-type
@gin.configurable("dense_recognition_rnvp")
@utils.MakeTFTemplate
def DenseRecognitionRNVP(
images,
encoder,
z=None,
num_bijectors=3,
condition_bijector=False,
layer_sizes=[128, 128],
sigma_activation="exp"):
"""Models Q(z | encoder(x)), z = f(w, encoder)"""
encoding = encoder(images)
if condition_bijector:
num_parts = 3
else:
num_parts = 2
num_dims = int(encoding.shape[-1]) // num_parts
encoding_parts = tf.unstack(
tf.reshape(encoding, [-1, num_dims, num_parts]), num=num_parts, axis=-1)
if condition_bijector:
h = encoding_parts[2]
else:
h = None
swap = tfb.Permute(permutation=np.arange(num_dims - 1, -1, -1))
bijectors = []
for i in range(num_bijectors):
_rnvp_template = utils.DenseShiftLogScale(
"rnvp_%d" % i,
h=h,
hidden_layers=layer_sizes,
activation=tf.nn.softplus,
kernel_initializer=utils.L2HMCInitializer(factor=0.01))
def rnvp_template(x, output_units, t=_rnvp_template):
# TODO: I don't understand why the shape gets lost.
x.set_shape([None, num_dims - output_units])
return t(x, output_units)
bijectors.append(
tfb.Invert(
tfb.RealNVP(
num_masked=num_dims // 2,
shift_and_log_scale_fn=rnvp_template)))
bijectors.append(swap)
# Drop the last swap.
bijectors = bijectors[:-1]
# Do the shift/scale explicitly, so that we can use bijector to map the
# distribution to the standard normal, which is helpful for HMC.
mu = encoding_parts[0]
if sigma_activation == "exp":
sigma = tf.exp(0.5 * encoding_parts[1])
elif sigma_activation == "softplus":
sigma = tf.nn.softplus(encoding_parts[1])
bijectors.append(tfb.Affine(shift=mu, scale_diag=sigma))
bijector = tfb.Chain(bijectors)
mvn = tfd.MultivariateNormalDiag(
loc=tf.zeros_like(mu), scale_diag=tf.ones_like(sigma))
dist = tfd.TransformedDistribution(mvn, bijector)
if z is None:
z = [dist.sample()]
return z, [dist.log_prob(z[0])], [bijector] # pytype: disable=bad-return-type
@gin.configurable("dense_recognition_iaf")
@utils.MakeTFTemplate
def DenseRecognitionIAF(
images,
encoder,
z=None,
num_iaf_layers=2,
iaf_layer_sizes=[128, 128],
condition_iaf=False,
sigma_activation="exp"):
"""Models Q(z | encoder(x)), z = f(w, encoder)"""
encoding = encoder(images)
if condition_iaf:
num_parts = 3
else:
num_parts = 2
num_dims = int(encoding.shape[-1]) // num_parts
encoding_parts = tf.unstack(
tf.reshape(encoding, [-1, num_dims, num_parts]), num=num_parts, axis=-1)
if condition_iaf:
h = encoding_parts[2]
else:
h = None
swap = tfb.Permute(permutation=np.arange(num_dims - 1, -1, -1))
bijectors = []
for i in range(num_iaf_layers):
#_maf_template = tfb.masked_autoregressive_default_template(
# hidden_layers=iaf_layer_sizes,
# activation=tf.nn.softplus,
# kernel_initializer=utils.L2HMCInitializer(factor=0.01))
_maf_template = utils.DenseAR(
"maf_%d" % i,
hidden_layers=iaf_layer_sizes,
h=h,
activation=tf.nn.softplus,
kernel_initializer=utils.L2HMCInitializer(factor=0.01))
def maf_template(x, t=_maf_template):
# TODO: I don't understand why the shape gets lost.
x.set_shape([None, num_dims])
return t(x)
bijectors.append(
tfb.Invert(
tfb.MaskedAutoregressiveFlow(shift_and_log_scale_fn=maf_template)))
bijectors.append(swap)
# Drop the last swap.
bijectors = bijectors[:-1]
# Do the shift/scale explicitly, so that we can use bijector to map the
# distribution to the standard normal, which is helpful for HMC.
mu = encoding_parts[0]
if sigma_activation == "exp":
sigma = tf.exp(0.5 * encoding_parts[1])
elif sigma_activation == "softplus":
sigma = tf.nn.softplus(encoding_parts[1])
bijectors.append(tfb.Affine(shift=mu, scale_diag=sigma))
bijector = tfb.Chain(bijectors)
mvn = tfd.MultivariateNormalDiag(
loc=tf.zeros_like(mu), scale_diag=tf.ones_like(sigma))
dist = tfd.TransformedDistribution(mvn, bijector)
if z is None:
z = [dist.sample()]
return z, [dist.log_prob(z[0])], [bijector] # pytype: disable=bad-return-type
class FlipImageBijector(tfb.Bijector):
def __init__(self, validate_args=False, name=None):
"""Creates the `Permute` bijector.
Args:
permutation: An `int`-like vector-shaped `Tensor` representing the
permutation to apply to the rightmost dimension of the transformed
`Tensor`.
validate_args: Python `bool` indicating whether arguments should be
checked for correctness.
name: Python `str`, name given to ops managed by this object.
Raises:
TypeError: if `not permutation.dtype.is_integer`.
ValueError: if `permutation` does not contain exactly one of each of
`{0, 1, ..., d}`.
"""
super(FlipImageBijector, self).__init__(
forward_min_event_ndims=3,
is_constant_jacobian=True,
validate_args=validate_args,
name=name or "flip_image")
def _forward(self, x):
return tf.image.flip_left_right(tf.image.flip_up_down(x))
def _inverse(self, y):
return tf.image.flip_up_down(tf.image.flip_left_right(y))
def _inverse_log_det_jacobian(self, y):
# is_constant_jacobian = True for this bijector, hence the
# `log_det_jacobian` need only be specified for a single input, as this will
# be tiled to match `event_ndims`.
return tf.constant(0., dtype=y.dtype.base_dtype)
def _forward_log_det_jacobian(self, x):
return tf.constant(0., dtype=x.dtype.base_dtype)
@gin.configurable("conv_iaf")
@utils.MakeTFTemplate
def ConvIAF(
images,
encoder,
z=None,
num_iaf_layers=2,
iaf_layer_sizes=[128, 128],
condition_iaf=False,
sigma_activation="softplus"):
"""Models Q(z | encoder(x)), z = f(w, encoder)"""
encoding = encoder(images)
if encoding.shape.ndims != 4:
raise ValueError("ConvIAF requires a convolutional encoder. %s", encoding.shape)
if condition_iaf:
num_parts = 3
else:
num_parts = 2
num_dims = int(encoding.shape[-1]) // num_parts
encoding_parts = tf.unstack(
tf.reshape(encoding, [-1] + encoding.shape.as_list()[1:-1] + [num_dims, num_parts]), num=num_parts, axis=-1)
if condition_iaf:
h = encoding_parts[2]
else:
h = None
bijectors = []
for i in range(num_iaf_layers):
_maf_template = ConvAR(
"iaf_%d" % i,
real_event_shape=encoding_parts[0].shape.as_list()[1:],
hidden_layers=iaf_layer_sizes,
h=h,
weights_initializer=utils.L2HMCInitializer(factor=0.01))
def maf_template(x, t=_maf_template):
# TODO: I don't understand why the shape gets lost.
x.set_shape([None] + encoding_parts[0].shape.as_list()[1:])
return t(x)
bijectors.append(
tfb.Invert(
tfb.MaskedAutoregressiveFlow(shift_and_log_scale_fn=maf_template)))
bijectors.append(FlipImageBijector())
# Drop the last swap.
bijectors = bijectors[:-1]
# Do the shift/scale explicitly, so that we can use bijector to map the
# distribution to the standard normal, which is helpful for HMC.
mu = encoding_parts[0]
if sigma_activation == "exp":
sigma = tf.exp(0.5 * encoding_parts[1])
elif sigma_activation == "softplus":
sigma = tf.nn.softplus(encoding_parts[1])
bijectors.append(tfb.AffineScalar(shift=mu, scale=sigma))
bijector = tfb.Chain(bijectors)
mvn = tfd.Independent(
tfd.Normal(loc=tf.zeros_like(mu), scale=tf.ones_like(sigma)),
reinterpreted_batch_ndims=3)
dist = tfd.TransformedDistribution(mvn, bijector)
if z is None:
z = [dist.sample()]
return z, [dist.log_prob(z[0])], [bijector] # pytype: disable=bad-return-type
@gin.configurable("conv_shift_scale")
@utils.MakeTFTemplate
def ConvShiftScale(
images,
encoder,
z=None,
sigma_activation="softplus"):
"""Models Q(z | encoder(x)), z = f(w, encoder)"""
encoding = encoder(images)
if encoding.shape.ndims != 4:
raise ValueError("ConvIAF requires a convolutional encoder. %s", encoding.shape)
num_parts = 2
num_dims = int(encoding.shape[-1]) // num_parts
encoding_parts = tf.unstack(
tf.reshape(encoding, [-1] + encoding.shape.as_list()[1:-1] + [num_dims, num_parts]), num=num_parts, axis=-1)
# Do the shift/scale explicitly, so that we can use bijector to map the
# distribution to the standard normal, which is helpful for HMC.
mu = encoding_parts[0]
if sigma_activation == "exp":
sigma = tf.exp(0.5 * encoding_parts[1])
elif sigma_activation == "softplus":
sigma = tf.nn.softplus(encoding_parts[1])
bijector = tfb.AffineScalar(shift=mu, scale=sigma)
mvn = tfd.Independent(
tfd.Normal(loc=tf.zeros_like(mu), scale=tf.ones_like(sigma)),
reinterpreted_batch_ndims=3)
dist = tfd.TransformedDistribution(mvn, bijector)
if z is None:
z = [dist.sample()]
return z, [dist.log_prob(z[0])], [bijector] # pytype: disable=bad-return-type
@utils.MakeTFTemplate
def SimplePrior(z=None, batch=None,
num_dims=None):
"""Models P(z)"""
mvn = tfd.MultivariateNormalDiag(
loc=tf.zeros(num_dims), scale_diag=tf.ones(num_dims))
if z is None:
z = [mvn.sample(batch)]
return z, [mvn.log_prob(z[0])] # pytype: disable=bad-return-type
@utils.MakeTFTemplate
def Simple3DPrior(z=None, batch=None,
shape=None):
"""Models P(z)"""
mvn = tfd.Independent(tfd.Normal(loc=tf.zeros(shape), scale=tf.ones(shape)), reinterpreted_batch_ndims=3)
if z is None:
z = [mvn.sample(batch)]
return z, [mvn.log_prob(z[0])] # pytype: disable=bad-return-type
@utils.MakeTFTemplate
def DenseMNISTNoise(x=None, z=None, decoder=None, return_means=True):
"""Models P(x | decoder(z))"""
decoding = decoder(z)
bernoulli = IndependentBernouli3D(decoding)
if x is None:
if return_means:
x = bernoulli.mean()
else:
x = tf.to_float(bernoulli.sample())
return x, bernoulli.log_prob(x)
@gin.configurable("cifar10_noise")
@utils.MakeTFTemplate
def DenseCIFAR10TNoise(x=None, z=None, decoder=None, return_means=True, uniform_scale=False, logistic_impl="mine"):
"""Models P(x | decoder(z))"""
decoding = decoder(z)
if uniform_scale:
scale = tf.get_variable("scale", initializer=1.0)
scales = tf.reshape(scale, [1, 1, 1])
else:
scales = tf.get_variable(
"scales", [32, 32, 3], initializer=tf.ones_initializer())
if logistic_impl == "mine":
disc_logistic = IndependentDiscreteLogistic3D(decoding, tf.nn.softplus(scales))
elif logistic_impl == "kingma":
disc_logistic = IndependentDiscreteLogistic3D2(decoding, tf.nn.softplus(scales))
if x is None:
x = tf.to_float(disc_logistic.sample())
return x, disc_logistic.log_prob(x)
@gin.configurable("learning_rate")
def LearningRate(train_size, global_step, schedule = "hoffman", warmup_steps=0):
if schedule == "hoffman":
base = tf.train.piecewise_constant(
global_step, [train_size * 500 // TRAIN_BATCH], [1e-3, 1e-4])
elif schedule == "new":
base = tf.train.piecewise_constant(
global_step,
[train_size * 500 // TRAIN_BATCH, train_size * 800 // TRAIN_BATCH],
[1e-3, 1e-4, 1e-5])
elif schedule == "new_gentle":
base = tf.train.piecewise_constant(
global_step,
[train_size * 500 // TRAIN_BATCH, train_size * 800 // TRAIN_BATCH],
[0.5e-3, 1e-4, 1e-5])
elif schedule == "fast":
base = tf.train.piecewise_constant(
global_step,
[train_size * 800 // TRAIN_BATCH],
[1e-2, 1e-5])
else:
raise ValueError("Invalid schedule: " + schedule)
if warmup_steps == 0:
return base
else:
return tf.minimum(base * tf.to_float(global_step) / warmup_steps, base)
VAEOutputs = collections.namedtuple(
"VAEOutputs", "log_p_x_z, elbo, sample_means, recon_means, klqp, total_klqp, post_z, prior_z")
AISOutputs = collections.namedtuple(
"AISOutputs",
"log_p, p_accept, z_fin, recon"
)
def MakeVAE(images, recognition, prior, noise, beta, num_samples,
min_kl):
z, log_q_z = recognition(images)
_, log_p_z = prior(z)
_, log_p_x_z = noise(images, z)
post_z = z
log_q_z = [tf.reduce_mean(layer_log_q_z) for layer_log_q_z in log_q_z]
log_p_z = [tf.reduce_mean(layer_log_p_z) for layer_log_p_z in log_p_z]
log_p_x_z = tf.reduce_mean(log_p_x_z)
klqp = [layer_log_q_z - layer_log_p_z for layer_log_q_z, layer_log_p_z in zip(log_q_z, log_p_z)]
klqp = [tf.maximum(min_kl, layer_klqp) for layer_klqp in klqp]
total_klqp = tf.add_n(klqp)
elbo = log_p_x_z - beta * total_klqp
recon_means, _ = noise(None, z)
z, _ = prior(batch=num_samples)
sample_means, _ = noise(None, z)
return VAEOutputs(
log_p_x_z=log_p_x_z,
elbo=elbo,
sample_means=sample_means,
recon_means=recon_means,
klqp=klqp,
total_klqp=total_klqp,
post_z=post_z,
prior_z=z)
DLGMOutputs = collections.namedtuple(
"DLGMOutputs",
"elbo, sample_means, mcmc_log_p, recon_means, p_accept, post_z, post_z_chain, q_z, xentpq"
)
@gin.configurable("dlgm")
class DLGM(object):
def __init__(self,
z_dims=64,
beta=1.0,
beta_steps=0,
step_size=0.2,
num_leapfrog_steps=5,
num_hmc_steps=2,
use_neutra=True,
condition_bijector=False,
bijector_type="iaf",
encoder_type="dense",
q_loss_type="klqp",
min_kl=0.0,
symm_factor=0.5,
save_chain_state=False,
chain_warmup_epochs=5,
use_q_z_for_gen=False,
no_gen_train_steps=0,
dataset=None,
use_bijector_for_ais=False,
prior_type="simple",
adapt_step_size=False,
step_size_gain=1e-3,
use_q_z_for_ais=False,
affine_rank=1,
step_size_warmup=0):
self.train_size = dataset.train_size
self._use_q_z_for_ais = use_q_z_for_ais
if dataset.name == "mnist":
output_shape = [28, 28, 1]
elif dataset.name == "cifar10":
output_shape = [32, 32, 3]
self._z_dims = z_dims
self._use_bijector_for_ais = use_bijector_for_ais
if beta_steps > 0:
frac = tf.to_float(
tf.train.get_or_create_global_step()) / tf.to_float(beta_steps)
frac = tf.minimum(frac, 1.0)
self._beta = frac * beta
else:
self._beta = tf.constant(beta)
self._min_kl = tf.to_float(min_kl)
self._use_neutra = use_neutra
self._num_leapfrog_steps = num_leapfrog_steps
self._num_hmc_steps = num_hmc_steps
self._q_loss_type = q_loss_type
self._symm_factor = symm_factor
self._save_chain_state = save_chain_state
self._chain_warmup_epochs = chain_warmup_epochs
self._use_q_z_for_gen = use_q_z_for_gen
self._no_gen_train_steps = no_gen_train_steps
self._step_size_gain = step_size_gain
self._adapt_step_size = adapt_step_size
self._step_size_warmup = step_size_warmup
self._init_step_size = step_size
if self._adapt_step_size:
self._step_size = tf.get_variable("step_size", initializer=step_size)
else:
self._step_size = tf.constant(step_size)
if self._save_chain_state:
self._chain_state = tf.get_variable(
"train_chain_state", [self.train_size, z_dims], trainable=False)
if bijector_type == "affine":
# TriL + shift
num_outputs = (z_dims * (z_dims + 1)) // 2 + z_dims
elif bijector_type == "affine_lr":
num_outputs = z_dims * 2 + z_dims * affine_rank
elif condition_bijector and bijector_type not in ["conv_shift_scale", "shift_scale"]:
num_outputs = 3 * z_dims
else:
num_outputs = 2 * z_dims
if encoder_type == "hier_conv":
#assert dataset.name == "cifar10"
#self._encoder = ConvHierEncoder("encoder")
#self._prior_posterior = ConvHierPriorPost("prior_post")
#self._decoder = lambda z: self._prior_posterior(z=z)[2]
#self._prior = lambda z=None, batch=None: self._prior_posterior(z=z, batch=batch)[:2]
#self._recog = lambda images, z=None: self._prior_posterior(images=images, z=z, encoder=self._encoder)[:2]
pass
else:
if encoder_type == "dense":
self._encoder = DenseEncoder(
"encoder", num_outputs=num_outputs, activation=tf.nn.softplus)
self._decoder = DenseDecoder(
"decoder", output_shape=output_shape, activation=tf.nn.softplus)
elif encoder_type == "conv":
self._encoder = ConvEncoder("encoder", num_outputs=num_outputs)
self._decoder = ConvDecoder("decoder", output_shape=output_shape)
conv_z_shape = [4, 4, self._z_dims]
elif encoder_type == "conv2":
self._encoder = ConvEncoder2("encoder", num_outputs=num_outputs)
self._decoder = ConvDecoder2("decoder", output_shape=output_shape)
conv_z_shape = [4, 4, self._z_dims]
elif encoder_type == "conv3":
self._encoder = ConvEncoder3("encoder", num_outputs=num_outputs)
self._decoder = ConvDecoder3("decoder", output_shape=output_shape)
conv_z_shape = [8, 8, self._z_dims]
elif encoder_type == "conv4":
self._encoder = ConvEncoder4("encoder", num_outputs=num_outputs)
self._decoder = ConvDecoder4("decoder", output_shape=output_shape)
conv_z_shape = [8, 8, self._z_dims]
if prior_type == "simple":
self._prior = SimplePrior("prior", num_dims=self._z_dims)
elif prior_type == "simple_3d":
self._prior = Simple3DPrior("prior", shape=conv_z_shape)
if bijector_type == "iaf":
recog = DenseRecognitionIAF(
"recog", encoder=self._encoder, condition_iaf=condition_bijector)
elif bijector_type == "rnvp":
recog = DenseRecognitionRNVP(
"recog",
encoder=self._encoder,
condition_bijector=condition_bijector)
elif bijector_type == "shift_scale":
recog = DenseRecognition(
"recog",
encoder=self._encoder)
elif bijector_type == "conv_shift_scale":
recog = ConvShiftScale("recog", encoder=self._encoder)
elif bijector_type == "affine":
recog = DenseRecognitionAffine("recog", encoder=self._encoder, z_dims=z_dims)
elif bijector_type == "affine_lr":
recog = DenseRecognitionAffineLR("recog", encoder=self._encoder, z_dims=z_dims, rank=affine_rank)
elif bijector_type == "conv_iaf":
recog = ConvIAF("recog", encoder=self._encoder, condition_iaf=condition_bijector)
self._recog = recog
if dataset.name == "mnist":
self._noise = DenseMNISTNoise("noise", decoder=self._decoder)
else:
self._noise = DenseCIFAR10TNoise("noise", decoder=self._decoder)
def AdjustedStepSize(self):
if self._step_size_warmup > 0:
global_step = tf.train.get_or_create_global_step()
max_step = self._init_step_size * tf.to_float(
global_step) / self._step_size_warmup
return tf.where(global_step > self._step_size_warmup, self._step_size,
tf.minimum(max_step, self._step_size))
else:
return self._step_size
def RecogVars(self):
return self._encoder.variables + self._recog.variables
def GenVars(self):
return (
self._prior.variables + self._decoder.variables + self._noise.variables)
def MakeDLGM(self,
images,
other_z_init=None,
use_other_z_init=None,
num_samples=64):
z, log_q_z, bijector = self._recog(images)
_, log_p_z = self._prior(z)
_, log_p_x_z = self._noise(images, z)
post_z = z
q_z = z
if use_other_z_init is not None:
z_init = [tf.cond(use_other_z_init, lambda: tf.identity(other_layer_z),
lambda: tf.identity(layer_z)) for other_layer_z, layer_z in zip(z, other_z_init)]
z_init = z
log_q_z = [tf.reduce_mean(layer_log_q_z) for layer_log_q_z in log_q_z]
log_p_z = [tf.reduce_mean(layer_log_p_z) for layer_log_p_z in log_p_z]
log_p_x_z = tf.reduce_mean(log_p_x_z)
klqp = [layer_log_q_z - layer_log_p_z for layer_log_q_z, layer_log_p_z in zip(log_q_z, log_p_z)]
klqp = [tf.maximum(self._min_kl, layer_klqp) for layer_klqp in klqp]
total_klqp = tf.add_n(klqp)
elbo = log_p_x_z - self._beta * total_klqp
def TargetLogProbFn(*z):
for post_z_e, z_e in zip(post_z, z):
tf.logging.info("Shape here: %s %s", post_z_e.shape, z_e.shape)
z_e.set_shape(post_z_e.shape)
_, log_p_z = self._prior(z)
_, log_p_x_z = self._noise(images, z)
return tf.add_n(log_p_z) + log_p_x_z
kernel = tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=TargetLogProbFn,
step_size=self.AdjustedStepSize(),
num_leapfrog_steps=self._num_leapfrog_steps)
if self._use_neutra:
kernel = tfp.mcmc.TransformedTransitionKernel(
inner_kernel=kernel, bijector=bijector)
states, kernel_results = tfp.mcmc.sample_chain(
num_results=self._num_hmc_steps, current_state=z, kernel=kernel)
z = [tf.stop_gradient(s[-1, Ellipsis]) for s in states]
post_z = z
_, log_q_z, _ = self._recog(images, z=z)
xentpq = -tf.add_n([tf.reduce_mean(layer_log_q_z) for layer_log_q_z in log_q_z])
if self._use_q_z_for_gen:
z = q_z
recon_means, _ = self._noise(None, z)
_, log_p_z = self._prior(z)
_, log_p_x_z = self._noise(images, z)
mcmc_log_p = tf.reduce_mean(tf.add_n(log_p_z) + log_p_x_z)
if self._use_neutra:
log_accept_ratio = kernel_results.inner_results.log_accept_ratio
else:
log_accept_ratio = kernel_results.log_accept_ratio
p_accept = tf.reduce_mean(tf.exp(tf.minimum(log_accept_ratio, 0.)))
z, _ = self._prior(batch=num_samples)
sample_means, _ = self._noise(None, z)
return DLGMOutputs(
elbo=elbo,
sample_means=sample_means,
mcmc_log_p=mcmc_log_p,
recon_means=recon_means,
p_accept=p_accept,
post_z=post_z,
post_z_chain=states,
q_z=z_init,
xentpq=xentpq)
def GetPosterior(self, images):
outputs = self.MakeDLGM(images)
return outputs.post_z
def TrainOp(self, data_idx, images):
global_step = tf.train.get_or_create_global_step()
learning_rate = LearningRate(self.train_size, global_step)
if self._save_chain_state:
other_z_init = tf.gather(self._chain_state, data_idx)
use_other_z_init = (
global_step > self._chain_warmup_epochs * self.train_size // TRAIN_BATCH)
else:
other_z_init = None
use_other_z_init = None
outputs = self.MakeDLGM(
images, other_z_init=other_z_init, use_other_z_init=use_other_z_init)
opt = tf.train.AdamOptimizer(learning_rate=learning_rate)
#gen_opt = tf.train.AdamOptimizer(learning_rate=learning_rate)
utils.LogAndSummarizeMetrics({
"learning_rate": learning_rate,
"elbo": outputs.elbo,
"mcmc_log_p": outputs.mcmc_log_p,
"mcmc_p_accept": outputs.p_accept,
"step_size": self.AdjustedStepSize(),
}, False)
tf.summary.image(
"sample_means", utils.StitchImages(outputs.sample_means))
if self._save_chain_state:
with tf.control_dependencies([outputs.post_z]):
chain_state_update_op = tf.scatter_update(self._chain_state, data_idx,
outputs.post_z)
else:
chain_state_update_op = tf.no_op()
if self._adapt_step_size:
new_step_size = self._step_size + self._step_size_gain * (outputs.p_accept - 0.651)
new_step_size = tf.clip_by_value(new_step_size, 1e-3, 0.5)
step_size_op = self._step_size.assign(
tf.where(global_step > self._step_size_warmup, new_step_size,
self._step_size))
else:
step_size_op = tf.no_op()
with tf.name_scope("recog_train"):
if self._q_loss_type == "klqp":
loss = -outputs.elbo
elif self._q_loss_type == "symm":
loss = (
self._symm_factor * -outputs.elbo +
(1.0 - self._symm_factor) * outputs.xentpq)
elif self._q_loss_type == "klpq":
loss = outputs.xentpq
if self._save_chain_state:
# Not super efficient...
loss = tf.cond(use_other_z_init, lambda: tf.identity(loss),
lambda: tf.identity(-outputs.elbo))
recog_train_op = tf.contrib.training.create_train_op(
loss,
opt,
summarize_gradients=True,
variables_to_train=self.RecogVars(),
transform_grads_fn=utils.ProcessGradients)
with tf.name_scope("gen_train"):
gen_loss = tf.cond(global_step < self._no_gen_train_steps,
lambda: -outputs.elbo, lambda: -outputs.mcmc_log_p)
gen_train_op = tf.contrib.training.create_train_op(
gen_loss,
opt,
None,
summarize_gradients=True,
variables_to_train=self.GenVars(),
transform_grads_fn=utils.ProcessGradients)
return tf.group(recog_train_op, gen_train_op, chain_state_update_op, step_size_op)
def EvalOp(self, data_idx, images):
outputs = self.MakeDLGM(images)
tf.summary.image("data", utils.StitchImages(images[:64]))
tf.summary.image(
"recon_means", utils.StitchImages(outputs.recon_means[:64]))
return utils.LogAndSummarizeMetrics({
"elbo": outputs.elbo,
"xentpq": outputs.xentpq,
"mcmc_log_p": outputs.mcmc_log_p,
"mcmc_p_accept": outputs.p_accept,
})
def AIS(self, images, num_chains):
def ProposalLogProbFn(*z):
if self._use_q_z_for_ais:
_, log_p_z, _ = self._recog(images, z=z)
else:
_, log_p_z = self._prior(z)
return tf.add_n(log_p_z)
def TargetLogProbFn(*z):
_, log_p_z = self._prior(z)
_, log_p_x_z = self._noise(images, z)
return tf.add_n(log_p_z) + log_p_x_z
images = tf.tile(images, [num_chains, 1, 1, 1])
if self._use_q_z_for_ais:
z_init, _, _ = self._recog(images)
else:
z_init, _ = self._prior(batch=tf.shape(images)[0])
if self._use_bijector_for_ais:
_, _, bijector = self._recog(images)
else:
bijector = None
ais_outputs = utils.AIS(ProposalLogProbFn, TargetLogProbFn, z_init, bijector=bijector)
recons, _ = self._noise(None, ais_outputs.z_fin)
tf.summary.image("data", utils.StitchImages(images[:64]))
tf.summary.image("recon_means", utils.StitchImages(recons[:64]))
tf.summary.scalar("p_accept", tf.reduce_mean(ais_outputs.p_accept))
return AISOutputs(
log_p=tf.reduce_logsumexp(
tf.reshape(ais_outputs.log_p, [num_chains, -1]) - tf.log(
tf.to_float(num_chains)), 0),
p_accept=ais_outputs.p_accept,
recon=recons,
z_fin=ais_outputs.z_fin)
@gin.configurable("vae")
class VAE(object):
def __init__(self,
z_dims=64,
condition_bijector=False,
bijector_type="iaf",
encoder_type="dense",
beta=1.0,
beta_steps=0,
min_kl=0,
use_q_z_for_ais=False,
dataset=None,
prior_type="simple",
affine_rank=1):
self.train_size = dataset.train_size
if dataset.name == "mnist":
output_shape = [28, 28, 1]
elif dataset.name == "cifar10":
output_shape = [32, 32, 3]
self._z_dims = z_dims
self._beta = beta
self._use_q_z_for_ais = use_q_z_for_ais
if beta_steps > 0:
frac = tf.to_float(
tf.train.get_or_create_global_step()) / tf.to_float(beta_steps)
frac = tf.minimum(frac, 1.0)
self._beta = frac * beta
else:
self._beta = tf.constant(beta)
self._min_kl = tf.to_float(min_kl)
if bijector_type == "affine":
# TriL + shift
num_outputs = (z_dims * (z_dims + 1)) // 2 + z_dims
elif bijector_type == "affine_lr":
num_outputs = z_dims * 2 + z_dims * affine_rank
elif condition_bijector and bijector_type not in ["conv_shift_scale", "shift_scale"]:
num_outputs = 3 * z_dims
else:
num_outputs = 2 * z_dims
if encoder_type == "hier_conv":
assert dataset.name == "cifar10"
self._encoder = ConvHierEncoder("encoder")
self._prior_posterior = ConvHierPriorPost("prior_post")
self._decoder = lambda z: self._prior_posterior(z=z)[2]
self._prior = lambda z=None, batch=None: self._prior_posterior(z=z, batch=batch)[:2]
self._recog = lambda images, z=None: self._prior_posterior(images=images, z=z, encoder=self._encoder)[:2]
else:
if encoder_type == "dense":
self._encoder = DenseEncoder(
"encoder", num_outputs=num_outputs, activation=tf.nn.softplus)
self._decoder = DenseDecoder(
"decoder", output_shape=output_shape, activation=tf.nn.softplus)
elif encoder_type == "conv":
self._encoder = ConvEncoder("encoder", num_outputs=num_outputs)
self._decoder = ConvDecoder("decoder", output_shape=output_shape)
conv_z_shape = [4, 4, self._z_dims]
elif encoder_type == "conv2":
self._encoder = ConvEncoder2("encoder", num_outputs=num_outputs)
self._decoder = ConvDecoder2("decoder", output_shape=output_shape)
conv_z_shape = [4, 4, self._z_dims]
elif encoder_type == "conv3":
self._encoder = ConvEncoder3("encoder", num_outputs=num_outputs)
self._decoder = ConvDecoder3("decoder", output_shape=output_shape)
conv_z_shape = [8, 8, self._z_dims]
elif encoder_type == "conv4":
self._encoder = ConvEncoder4("encoder", num_outputs=num_outputs)
self._decoder = ConvDecoder4("decoder", output_shape=output_shape)
conv_z_shape = [8, 8, self._z_dims]
if prior_type == "simple":
self._prior = SimplePrior("prior", num_dims=self._z_dims)
elif prior_type == "simple_3d":
self._prior = Simple3DPrior("prior", shape=conv_z_shape)
if bijector_type == "iaf":
recog = DenseRecognitionIAF(
"recog", encoder=self._encoder, condition_iaf=condition_bijector)
elif bijector_type == "rnvp":
recog = DenseRecognitionRNVP(
"recog",
encoder=self._encoder,
condition_bijector=condition_bijector)
elif bijector_type == "shift_scale":
recog = DenseRecognition("recog", encoder=self._encoder)
elif bijector_type == "conv_shift_scale":
recog = ConvShiftScale("recog", encoder=self._encoder)
elif bijector_type == "affine":
recog = DenseRecognitionAffine("recog", encoder=self._encoder, z_dims=z_dims)
elif bijector_type == "conv_iaf":
recog = ConvIAF("recog", encoder=self._encoder, condition_iaf=condition_bijector)
elif bijector_type == "affine_lr":
recog = DenseRecognitionAffineLR("recog", encoder=self._encoder, z_dims=z_dims, rank=affine_rank)
# Drop the bijector return.
self._recog = lambda *args, **kwargs: recog(*args, **kwargs)[:2]
if dataset.name == "mnist":
self._noise = DenseMNISTNoise("noise", decoder=self._decoder)
else:
self._noise = DenseCIFAR10TNoise("noise", decoder=self._decoder)
def MakeVAE(self, images, beta_override=None, num_samples=64):
if beta_override is not None:
beta = beta_override
else:
beta = self._beta
return MakeVAE(images, self._recog, self._prior, self._noise, beta,
num_samples, self._min_kl)
def TrainOp(self, data_idx, images):
outputs = self.MakeVAE(images)
global_step = tf.train.get_or_create_global_step()
learning_rate = LearningRate(self.train_size, global_step)
opt = tf.train.AdamOptimizer(learning_rate=learning_rate)
metrics = {
"learning_rate": learning_rate,
"log_p_x_z": outputs.log_p_x_z,
"elbo": outputs.elbo,
"klqp": outputs.total_klqp,
"beta": self._beta,
}
for i, layer_klqp in enumerate(outputs.klqp):
metrics["klqp_%d"%i] = layer_klqp
utils.LogAndSummarizeMetrics(metrics, False)
tf.summary.image(
"sample_means", utils.StitchImages(outputs.sample_means))
return tf.contrib.training.create_train_op(
-outputs.elbo,
opt,
summarize_gradients=True,
transform_grads_fn=utils.ProcessGradients)
def GetPosterior(self, images):
outputs = self.MakeVAE(images)
return outputs.post_z
def EvalOp(self, data_idx, images):
outputs = self.MakeVAE(images, 1.0)
tf.summary.image("data", utils.StitchImages(images[:64]))
tf.summary.image(
"recon_means", utils.StitchImages(outputs.recon_means[:64]))
metrics = {
"elbo": outputs.elbo,
"klqp": outputs.total_klqp,
}
for i, layer_klqp in enumerate(outputs.klqp):
metrics["klqp_%d"%i] = layer_klqp
return utils.LogAndSummarizeMetrics(metrics)
def AIS(self, images, num_chains):
outputs = self.MakeVAE(images)
def ProposalLogProbFn(*z):
if self._use_q_z_for_ais:
_, log_p_z = self._recog(images, z=z)
else:
_, log_p_z = self._prior(z)
return tf.add_n(log_p_z)
def TargetLogProbFn(*z):
_, log_p_z = self._prior(z)
_, log_p_x_z = self._noise(images, z)
return tf.add_n(log_p_z) + log_p_x_z
images = tf.tile(images, [num_chains, 1, 1, 1])
if self._use_q_z_for_ais:
z_init, _ = self._recog(images)
else:
z_init, _ = self._prior(batch=tf.shape(images)[0])
ais_outputs = utils.AIS(ProposalLogProbFn, TargetLogProbFn, z_init)
recons, _ = self._noise(None, ais_outputs.z_fin)
tf.summary.image("data", utils.StitchImages(images[:64]))
tf.summary.image("recon_means", utils.StitchImages(recons[:64]))
tf.summary.scalar("p_accept", tf.reduce_mean(ais_outputs.p_accept))
return AISOutputs(
log_p=tf.reduce_logsumexp(
tf.reshape(ais_outputs.log_p, [num_chains, -1]) - tf.log(
tf.to_float(num_chains)), 0),
p_accept=ais_outputs.p_accept,
recon=recons,
z_fin=ais_outputs.z_fin)
@gin.configurable("train")
def Train(model, dataset, train_dir, master, epochs=600, polyak_averaging=0.0, warmstart_ckpt=""):
data_idx, images = dataset.TrainBatch(TRAIN_BATCH, epochs)
train_op = model.TrainOp(data_idx, images)
if polyak_averaging > 0.0:
tf.logging.info("Using polyak averaging")
ema = tf.train.ExponentialMovingAverage(decay=polyak_averaging)
with tf.control_dependencies([train_op]):
train_op = ema.apply()
utils.LogAndSaveHParams()
tf.Session.reset(master)
if warmstart_ckpt:
tf.init_from_checkpoint(warmstart_ckpt, {"/": "/"})
hooks = [
tf.train.StopAtStepHook(last_step=dataset.train_size * epochs //
TRAIN_BATCH),
tf.train.LoggingTensorHook(utils.GetLoggingOutputs(), every_n_secs=60)
]
tf.contrib.training.train(
train_op,
logdir=train_dir,
master=master,
hooks=hooks,
save_checkpoint_secs=120,
save_summaries_steps=60)
def Eval(model, dataset, train_dir, eval_dir, master,
use_polyak_averaging=False, max_number_of_evaluations=None):
data_idx, images = dataset.TestBatch(TEST_BATCH)
eval_op = model.EvalOp(data_idx, images)
utils.LogAndSaveHParams()
tf.train.get_or_create_global_step()
if use_polyak_averaging:
tf.logging.info("Using polyak averaging")
ema = tf.train.ExponentialMovingAverage(decay=0.99)
saver = tf.train.Saver(ema.variables_to_restore())
else:
saver = tf.train.Saver()
scaffold = tf.train.Scaffold(saver=saver)
tf.Session.reset(master)
hooks = [
# Just for logging.
tf.contrib.training.StopAfterNEvalsHook(dataset.test_size // TEST_BATCH),
tf.contrib.training.SummaryAtEndHook(eval_dir),
tf.train.LoggingTensorHook(utils.GetLoggingOutputs(), at_end=True)
]
tf.contrib.training.evaluate_repeatedly(
train_dir,
eval_ops=eval_op,
hooks=hooks,
# LOL...
eval_interval_secs=120,
max_number_of_evaluations=max_number_of_evaluations,
master=master,
scaffold=scaffold)
def AISEvalShard(shard, master, num_workers, num_chains, dataset, use_polyak_averaging, writer, train_dir, model_fn, batch):
tf.logging.info("Thread started")
model = model_fn()
tf.logging.info("Built model")
shard_idx = tf.placeholder(tf.int64, [])
tf.logging.info("built data")
data_iterator = dataset.AISIterator(batch, shard_idx, num_workers)
images, _ = data_iterator.get_next()
tf.logging.info("Built mA")
ais_outputs = model.AIS(images, num_chains)
log_p = ais_outputs.log_p
p_accept = ais_outputs.p_accept
tf.logging.info("Built mB")
if shard == 1:
utils.LogAndSaveHParams()
summary_op = tf.summary.merge_all()
global_step = tf.train.get_or_create_global_step()
if use_polyak_averaging:
tf.logging.info("Using polyak averaging")
ema = tf.train.ExponentialMovingAverage(decay=0.99)
saver = tf.train.Saver(ema.variables_to_restore())
else:
saver = tf.train.Saver()
tf.logging.info("Built mC")
global_step_val = []
tf.logging.info("Starting shard %d, %s", shard, master)
#with tf.MonitoredSession(
# tf.train.ChiefSessionCreator(
# master=master,
# checkpoint_dir=train_dir)) as sess:
while True:
try:
tf.Session.reset(master)
with tf.Session(master) as sess:
all_log_p = np.zeros([0])
saver.restore(sess, tf.train.latest_checkpoint(train_dir))
sess.run(data_iterator.initializer, {shard_idx: shard})
try:
step_num = 0
while True:
fetch = {
"log_p": log_p,
"global_step": global_step,
"p_accept": p_accept
}
if shard == 0:
fetch["summary"] = summary_op
tf.logging.info("Shard %d step %d started.", shard, step_num)
fetch = sess.run(fetch)
tf.logging.info("Shard %d step %d done.", shard, step_num)
tf.logging.info("Shard %d log_p %.2f, p_accept: %.2f", shard,
np.mean(fetch["log_p"]),
np.mean(fetch["p_accept"]))
all_log_p = np.hstack([all_log_p, fetch["log_p"]])
if shard == 0 and step_num == 0:
global_step_val.append(fetch["global_step"])
writer.add_summary(fetch["summary"], global_step_val[0])
step_num += 1
except tf.errors.OutOfRangeError:
tf.logging.info("Shard %d done.", shard)
pass
return all_log_p
except tf.errors.AbortedError:
pass
def AISEval(model_fn, dataset, train_dir, eval_dir, worker_master_pattern,
num_workers, num_chains, use_polyak_averaging=False):
tf.reset_default_graph()
log_p_ph = tf.placeholder(tf.float32, [None])
log_p_summary = tf.summary.scalar("log_p", tf.reduce_mean(log_p_ph))
writer = tf.summary.FileWriter(eval_dir)
with futures.ThreadPoolExecutor(max_workers=num_workers) as executor:
results = []
for shard in range(num_workers):
tf.logging.info("Submitting shard %d", shard)
master = worker_master_pattern.format(shard)
results.append(
executor.submit(AISEvalShard, shard, master, num_workers, num_chains,
dataset, use_polyak_averaging, writer, train_dir,
model_fn, AIS_BATCH))
all_log_p = np.zeros([0])
for result in results:
log_p = result.result()
all_log_p = np.hstack([all_log_p, log_p])
log_p = np.mean(all_log_p)
tf.logging.info("Log P: %.2f", log_p)
with tf.Session() as sess:
writer.add_summary(
sess.run(log_p_summary, {log_p_ph: all_log_p}), 0)
writer.flush()
return log_p
MODEL_TO_CLASS = {"vae": VAE, "dlgm": DLGM}
def main(argv):
del argv # Unused.
utils.BindHParams(FLAGS.hparams)
if FLAGS.data_type == "mnist":
dataset = utils.MNISTDataset(FLAGS.mnist_data_dir, FLAGS.test_is_valid)
elif FLAGS.data_type == "fashion_mnist":
dataset = utils.MNISTDataset(FLAGS.fashion_mnist_data_dir, FLAGS.test_is_valid)
elif FLAGS.data_type == "cifar10":
dataset = utils.CIFAR10Dataset(FLAGS.cifar10_data_dir, FLAGS.test_is_valid)
elif FLAGS.data_type == "fake":
dataset = utils.FakeMNISTDataset()
if FLAGS.mode == "train":
model = MODEL_TO_CLASS[FLAGS.model](dataset=dataset)
Train(model, dataset, FLAGS.train_dir, FLAGS.master,
polyak_averaging=FLAGS.polyak_averaging)
elif FLAGS.mode == "eval":
model = MODEL_TO_CLASS[FLAGS.model](dataset=dataset)
Eval(model, dataset, FLAGS.train_dir, FLAGS.eval_dir,
FLAGS.master,
use_polyak_averaging=FLAGS.polyak_averaging > 0.0)
elif FLAGS.mode == "ais_eval":
replica_log_p = []
if FLAGS.ais_replicas:
replicas = FLAGS.ais_replicas
else:
replicas = list(range(FLAGS.ais_num_replicas))
for i in replicas:
train_dir = FLAGS.train_dir.format(i)
eval_dir = FLAGS.eval_dir.format(i)
model_fn = lambda: MODEL_TO_CLASS[FLAGS.model](dataset=dataset)
log_p = AISEval(model_fn, dataset, train_dir, eval_dir,
FLAGS.ais_worker_pattern, FLAGS.ais_num_workers,
FLAGS.ais_num_chains,
use_polyak_averaging=FLAGS.polyak_averaging > 0.0)
replica_log_p.append(log_p)
log_p = np.mean(replica_log_p)
std_log_p = np.std(replica_log_p)
tf.logging.info("Log P: %.2f +- %.2f", log_p,
std_log_p / np.sqrt(len(replicas)))
tf.logging.info("All log_p: %s", replica_log_p)
elif FLAGS.mode == "ais_eval2":
if FLAGS.ais_replicas:
replicas = FLAGS.ais_replicas
else:
replicas = list(range(FLAGS.ais_num_replicas))
for i in replicas:
tf.reset_default_graph()
train_dir = FLAGS.train_dir.format(i)
eval_dir = FLAGS.eval_dir.format(i)
model_fn = lambda: MODEL_TO_CLASS[FLAGS.model](dataset=dataset)
sentinel_filename = os.path.join(eval_dir, "ais_shard_%d_done" % FLAGS.ais_shard)
if tf.gfile.Exists(sentinel_filename):
continue
batch = FLAGS.ais_batch_size
assert (dataset.test_size // FLAGS.ais_num_workers) % batch == 0
writer = tf.summary.FileWriter(eval_dir)
log_p = AISEvalShard(FLAGS.ais_shard, "", FLAGS.ais_num_workers, FLAGS.ais_num_chains,
dataset, FLAGS.polyak_averaging > 0.0, writer, train_dir, model_fn, batch)
tf.gfile.MakeDirs(eval_dir)
with tf.gfile.Open(os.path.join(eval_dir, "ais_shard_%d" % FLAGS.ais_shard), "w") as f:
np.savetxt(f, log_p)
with tf.gfile.Open(sentinel_filename, "w") as f:
f.write("done")
if __name__ == "__main__":
flags.DEFINE_string("mnist_data_dir", "", "")
flags.DEFINE_string("fashion_mnist_data_dir", "", "")
flags.DEFINE_string("cifar10_data_dir", "", "")
flags.DEFINE_string("data_type", "mnist", "")
flags.DEFINE_enum("mode", "train", ["train", "eval", "ais_eval", "ais_eval2"], "")
flags.DEFINE_enum("model", "vae", list(MODEL_TO_CLASS.keys()), "")
flags.DEFINE_string("train_dir", "/tmp/vae/train", "")
flags.DEFINE_string("eval_dir", "/tmp/vae/eval", "")
flags.DEFINE_string("master", "", "")
flags.DEFINE_string("ais_worker_pattern", "", "")
flags.DEFINE_integer("ais_shard", 0, "")
flags.DEFINE_integer("ais_num_workers", 1, "")
flags.DEFINE_integer("ais_num_chains", 1, "")
flags.DEFINE_integer("ais_num_replicas", 1, "")
flags.DEFINE_list("ais_replicas", "", "Manual listing of replicas")
flags.DEFINE_integer("ais_batch_size", 25, "")
flags.DEFINE_float("polyak_averaging", 0.0, "")
flags.DEFINE_boolean("test_is_valid", False, "")
flags.DEFINE(utils.YAMLDictParser(), "hparams", "", "")
app.run(main)
| 35.057241 | 124 | 0.651744 | 23,491 | 0.327826 | 0 | 0 | 56,082 | 0.782645 | 0 | 0 | 8,428 | 0.117616 |
48e948236c66512a216844a7ad0e87904606f55a | 2,034 | py | Python | flask_oauth2_login/base.py | BasicBeluga/flask-oauth2-login | 5a12ec70bcea72b2de079c072213be54f29b70b7 | [
"MIT"
]
| 42 | 2015-01-13T08:51:04.000Z | 2022-01-14T04:15:31.000Z | flask_oauth2_login/base.py | BasicBeluga/flask-oauth2-login | 5a12ec70bcea72b2de079c072213be54f29b70b7 | [
"MIT"
]
| 5 | 2015-04-29T19:31:11.000Z | 2020-03-28T19:37:43.000Z | flask_oauth2_login/base.py | BasicBeluga/flask-oauth2-login | 5a12ec70bcea72b2de079c072213be54f29b70b7 | [
"MIT"
]
| 28 | 2015-06-16T20:30:40.000Z | 2021-04-08T15:33:10.000Z | from flask import request, session, url_for
from requests_oauthlib import OAuth2Session
class OAuth2Login(object):
def __init__(self, app=None):
if app:
self.init_app(app)
self.app = app
def get_config(self, app, name, default_value=None):
return app.config.get(self.config_prefix + name, default_value)
def init_app(self, app):
self.client_id = self.get_config(app, "CLIENT_ID")
self.client_secret = self.get_config(app, "CLIENT_SECRET")
self.scope = self.get_config(app, "SCOPE", self.default_scope).split(",")
self.redirect_scheme = self.get_config(app, "REDIRECT_SCHEME", "https")
app.add_url_rule(
self.get_config(app, "REDIRECT_PATH", self.default_redirect_path),
self.redirect_endpoint,
self.login,
)
@property
def redirect_uri(self):
return url_for(
self.redirect_endpoint,
_external=True,
_scheme=self.redirect_scheme,
)
def session(self):
return OAuth2Session(
self.client_id,
redirect_uri=self.redirect_uri,
scope=self.scope,
)
def authorization_url(self, **kwargs):
sess = self.session()
auth_url, state = sess.authorization_url(self.auth_url, **kwargs)
session[self.state_session_key] = state
return auth_url
def login(self):
sess = self.session()
# Get token
try:
sess.fetch_token(
self.token_url,
code=request.args["code"],
client_secret=self.client_secret,
)
# TODO: Check state
except Warning:
# Ignore warnings
pass
except Exception as e:
return self.login_failure_func(e)
# Get profile
try:
profile = self.get_profile(sess)
except Exception as e:
return self.login_failure_func(e)
return self.login_success_func(sess.token, profile)
def login_success(self, f):
self.login_success_func = f
return f
def login_failure(self, f):
self.login_failure_func = f
return f
def get_profile(self, sess):
raise NotImplementedError
| 24.214286 | 77 | 0.675025 | 1,942 | 0.954769 | 0 | 0 | 149 | 0.073255 | 0 | 0 | 141 | 0.069322 |
48ea7b107947ea8206fa8a2bda41ca826b065a52 | 7,530 | py | Python | segmentation/utils/transforms.py | voldemortX/DST-CBC | e392313c129f6814c1a1c0f20c0abbd5505c3d7d | [
"BSD-3-Clause"
]
| 103 | 2020-04-21T01:25:16.000Z | 2022-03-24T07:45:45.000Z | segmentation/utils/transforms.py | voldemortX/DST-CBC | e392313c129f6814c1a1c0f20c0abbd5505c3d7d | [
"BSD-3-Clause"
]
| 13 | 2021-03-24T06:52:21.000Z | 2022-01-18T08:17:50.000Z | segmentation/utils/transforms.py | voldemortX/DST-CBC | e392313c129f6814c1a1c0f20c0abbd5505c3d7d | [
"BSD-3-Clause"
]
| 12 | 2020-04-29T02:33:11.000Z | 2021-12-28T07:59:20.000Z | # Mostly copied and modified from torch/vision/references/segmentation to support unlabeled data
# Copied functions from fmassa/vision-1 to support multi-dimensional masks loaded from numpy ndarray
import numpy as np
from PIL import Image
import random
import torch
import utils.functional as F
# For 2/3 dimensional tensors only
def get_tensor_image_size(img):
if img.dim() == 2:
h, w = img.size()
else:
h = img.size()[1]
w = img.size()[2]
return h, w
class Compose(object):
def __init__(self, transforms):
self.transforms = transforms
def __call__(self, image, target, *args):
for t in self.transforms:
image, target = t(image, target)
return (image, target, *args)
class Resize(object):
def __init__(self, size_image, size_label):
self.size_image = size_image
self.size_label = size_label
def __call__(self, image, target):
image = image if type(image) == str else F.resize(image, self.size_image, interpolation=Image.LINEAR)
target = target if type(target) == str else F.resize(target, self.size_label, interpolation=Image.NEAREST)
return image, target
# Pad image with zeros, yet pad target with 255 (ignore label) on bottom & right if
# given a bigger desired size (or else nothing is done at all)
class ZeroPad(object):
def __init__(self, size):
self.h, self.w = size
@staticmethod
def zero_pad(image, target, h, w):
oh, ow = get_tensor_image_size(image)
pad_h = h - oh if oh < h else 0
pad_w = w - ow if ow < w else 0
image = F.pad(image, (0, 0, pad_w, pad_h), fill=0)
target = target if type(target) == str else F.pad(target, (0, 0, pad_w, pad_h), fill=255)
return image, target
def __call__(self, image, target):
return self.zero_pad(image, target, self.h, self.w)
class RandomResize(object):
def __init__(self, min_size, max_size=None):
self.min_size = min_size
if max_size is None:
max_size = min_size
self.max_size = max_size
def __call__(self, image, target):
min_h, min_w = self.min_size
max_h, max_w = self.max_size
h = random.randint(min_h, max_h)
w = random.randint(min_w, max_w)
image = F.resize(image, (h, w), interpolation=Image.LINEAR)
target = target if type(target) == str else F.resize(target, (h, w), interpolation=Image.NEAREST)
return image, target
class RandomScale(object):
def __init__(self, min_scale, max_scale=None):
self.min_scale = min_scale
if max_scale is None:
max_scale = min_scale
self.max_scale = max_scale
def __call__(self, image, target):
scale = random.uniform(self.min_scale, self.max_scale)
h, w = get_tensor_image_size(image)
h = int(scale * h)
w = int(scale * w)
image = F.resize(image, (h, w), interpolation=Image.LINEAR)
target = target if type(target) == str else F.resize(target, (h, w), interpolation=Image.NEAREST)
return image, target
class RandomCrop(object):
def __init__(self, size):
self.size = size
@staticmethod
def get_params(img, output_size):
h, w = get_tensor_image_size(img)
th, tw = output_size
if w == tw and h == th:
return 0, 0, h, w
i = random.randint(0, h - th)
j = random.randint(0, w - tw)
return i, j, th, tw
def __call__(self, image, target):
# Pad if needed
ih, iw = get_tensor_image_size(image)
if ih < self.size[0] or iw < self.size[1]:
image, target = ZeroPad.zero_pad(image, target,
max(self.size[0], ih),
max(self.size[1], iw))
i, j, h, w = self.get_params(image, self.size)
image = F.crop(image, i, j, h, w)
target = target if type(target) == str else F.crop(target, i, j, h, w)
return image, target
class RandomHorizontalFlip(object):
def __init__(self, flip_prob):
self.flip_prob = flip_prob
def __call__(self, image, target):
t = random.random()
if t < self.flip_prob:
image = F.hflip(image)
target = target if (type(target) == str or t >= self.flip_prob) else F.hflip(target)
return image, target
class ToTensor(object):
def __init__(self, keep_scale=False, reverse_channels=False):
# keep_scale = True => Images or whatever are not divided by 255
# reverse_channels = True => RGB images are changed to BGR(the default behavior of openCV & Caffe,
# let's wish them all go to heaven,
# for they wasted me days!)
self.keep_scale = keep_scale
self.reverse_channels = reverse_channels
def __call__(self, image, target):
image = image if type(image) == str else self._pil_to_tensor(image)
target = target if type(target) == str else self.label_to_tensor(target)
return image, target
@staticmethod
def label_to_tensor(pic): # 3 dimensional arrays or normal segmentation masks
if isinstance(pic, np.ndarray):
return torch.as_tensor(pic.transpose((2, 0, 1)), dtype=torch.float32)
else:
return torch.as_tensor(np.asarray(pic).copy(), dtype=torch.int64)
def _pil_to_tensor(self, pic):
# Convert a PIL Image to tensor(a direct copy)
if pic.mode == 'I':
img = torch.from_numpy(np.array(pic, np.int32, copy=False))
elif pic.mode == 'I;16':
img = torch.from_numpy(np.array(pic, np.int16, copy=False))
elif pic.mode == 'F':
img = torch.from_numpy(np.array(pic, np.float32, copy=False))
elif pic.mode == '1':
img = 255 * torch.from_numpy(np.array(pic, np.uint8, copy=False))
else:
img = torch.ByteTensor(torch.ByteStorage.from_buffer(pic.tobytes()))
# PIL image mode: L, LA, P, I, F, RGB, YCbCr, RGBA, CMYK
if pic.mode == 'YCbCr':
nchannel = 3
elif pic.mode == 'I;16':
nchannel = 1
else:
nchannel = len(pic.mode)
img = img.view(pic.size[1], pic.size[0], nchannel)
if self.reverse_channels: # Beware this only works with 3 channels(can't use -1 with tensors)
img = img[:, :, [2, 1, 0]]
# put it from HWC to CHW format
# yikes, this transpose takes 80% of the loading time/CPU
img = img.transpose(0, 1).transpose(0, 2).contiguous()
if isinstance(img, torch.ByteTensor):
if self.keep_scale:
return img.float()
else:
return img.float().div(255)
else:
return img
class Normalize(object):
def __init__(self, mean, std):
self.mean = mean
self.std = std
def __call__(self, image, target):
image = F.normalize(image, mean=self.mean, std=self.std)
return image, target
# Init with a python list as the map(mainly for cityscapes's id -> train_id)
class LabelMap(object):
def __init__(self, label_id_map):
self.label_id_map = torch.tensor(label_id_map)
def __call__(self, image, target):
target = target if type(target) == str else self.label_id_map[target]
return image, target
| 34.541284 | 114 | 0.6 | 6,784 | 0.90093 | 0 | 0 | 964 | 0.128021 | 0 | 0 | 1,140 | 0.151394 |
48ea83dadb4e88f0d593497119582f4e6d402985 | 9,036 | py | Python | server.py | drunkHatch/CMPUT404-assignment-webserver | 37336241ae790509804569834e2063893d37db44 | [
"Apache-2.0"
]
| null | null | null | server.py | drunkHatch/CMPUT404-assignment-webserver | 37336241ae790509804569834e2063893d37db44 | [
"Apache-2.0"
]
| null | null | null | server.py | drunkHatch/CMPUT404-assignment-webserver | 37336241ae790509804569834e2063893d37db44 | [
"Apache-2.0"
]
| null | null | null | # coding: utf-8
import socketserver
import re
import socket
import datetime
import os
import mimetypes as MT
import sys
# Copyright 2013 Abram Hindle, Eddie Antonio Santos
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Furthermore it is derived from the Python documentation examples thus
# some of the code is Copyright © 2001-2013 Python Software
# Foundation; All Rights Reserved
#
# http://docs.python.org/2/library/socketserver.html
#
# run: python freetests.py
# try: curl -v -X GET http://127.0.0.1:8080/
# status codes could be handled
STATUS_CODE_RESPONSE = {
0: " 0 Surprise!",
200: " 200 OK",
301: " 301 Moved Permanently",
404: " 404 Not Found",
405: " 405 Method Not Allowed"
}
# methods could be handled
HTTP_REQUEST_METHODS = {
"GET": 1,
}
# some hard coded text
END_OF_LINE_RESPONSE = "\r\n"
PROTOCOL_RESPONSE = "HTTP/1.1"
DIRECTORY_TO_SERVE = "www"
# open file error here
GOODFILE = 1
ISADIRECTORY = 2
NOFILE = 3
# response generate class
class MyServerResponse:
def __init__(self, status=0, expire_time="-1", content_type="default", \
accept_ranges="none"):
self.response_header = {
"status_response": PROTOCOL_RESPONSE + STATUS_CODE_RESPONSE[status],
"date_response": "Date: " + datetime.datetime.now().\
strftime('%A, %d %b %Y %X %Z'),
"expires": "Expires: " + expire_time,
"content_type": "Content-Type: " + content_type,
"accept_ranges": "Accept-Ranges: " + accept_ranges,
"redirect_address": "Location: http://",
"allow_header": "ALlow: GET"
}
# send header via various status_code
def send_header(self, conn, status_code):
tmp = self.response_header["status_response"] + END_OF_LINE_RESPONSE
conn.sendall(tmp.encode("utf-8"))
if status_code == 200:
tmp = self.response_header["expires"] + END_OF_LINE_RESPONSE
conn.sendall(tmp.encode("utf-8"))
tmp = self.response_header["content_type"] + END_OF_LINE_RESPONSE
conn.sendall(tmp.encode("utf-8"))
elif status_code == 301:
tmp = self.response_header["redirect_address"] + \
END_OF_LINE_RESPONSE
conn.sendall(tmp.encode("utf-8"))
elif status_code == 405:
tmp = self.response_header["allow_header"] + END_OF_LINE_RESPONSE
conn.sendall(tmp.encode("utf-8"))
def set_status_response(self, status_code):
self.response_header["status_response"] = \
PROTOCOL_RESPONSE + STATUS_CODE_RESPONSE[status_code]
# request for storing received request attributes
class MyServerRequest:
def __init__(self):
self.method = None
self.url = None
def method_is_valid(self):
if self.method in HTTP_REQUEST_METHODS:
return True
else:
return False
# add more implementation here
def url_is_valid(self):
return True
class MyWebServer(socketserver.BaseRequestHandler):
def handle(self):
rest_protocol_flag = False
standard_rest_cmd = "GET / HTTP/1.1"
# init the socket
self.request.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
full_data = b""
with self.request as conn:
# declaration here
new_request = MyServerRequest()
status_code = 0
open_file = True
file = None
content_type = "void of magic"
file_name = "none"
type_of_file = "default"
open_result = -100
new_response = MyServerResponse()
# recv all data
while True:
data = conn.recv(1024)
if not data: break
full_data += data
if b"\r\n" in data:
break
if b"utf" in full_data:
print(full_data)
pass
str_full_data = full_data.decode("utf-8")
splited_commands = re.split('[\r|\n]+', str_full_data)
whole_request = splited_commands[0].split(' ')
# if we can find request from recved data
if len(whole_request) > 0:
new_request.method = whole_request[0] # try to pick methods
new_request.url = whole_request[1] # try to pick url
# if method we get could not be handled
if not new_request.method_is_valid():
status_code = 405
open_file = False
content_type = "none"
new_response.set_status_response(status_code)
# if no errors occured and then try to open requested url
if open_file:
open_result, file, file_name = openRequestedFile(new_request.url)
# try opening requested file, and return corresponding status_code
status_code = checkErrorsOfOpenedFile\
(status_code, open_result, file, file_name)
# SECURITY: check permission of opened file
status_code = checkPermissionOfRequestedFile\
(status_code, open_result, file, file_name)
new_response.set_status_response(status_code)
if status_code == 200 and file_name != None:
type_of_file = MT.guess_type(file_name, False)[0]
elif status_code == 301:
new_response.response_header["redirect_address"] += \
self.server.server_address[0] + ":" + \
str(self.server.server_address[1]) + \
new_request.url + "/"
new_response.set_status_response(status_code)
if open_result == GOODFILE and type_of_file != None:
new_response.response_header["content_type"] = "Content-Type: "
new_response.response_header["content_type"] += type_of_file
new_response.send_header(conn, status_code)
self.request.sendall(b"\r\n")
# then open file/directory and send it
if file:
self.request.sendfile(file)
#self.request.sendall(b"\r\n")
conn.close()
# argument: requested url
# return value: open file result, opened file object, local path
def openRequestedFile(client_request_url):
cru = client_request_url
if cru[-1] == r'/':
cru += "index.html"
complete_path = DIRECTORY_TO_SERVE + cru
try:
result = open(complete_path, 'rb')
content_type = cru.split(".")
return GOODFILE, result, cru
except IsADirectoryError as e:
return ISADIRECTORY, None, None
except FileNotFoundError as n:
return NOFILE, None, None
# check type and error of opened file
def checkErrorsOfOpenedFile(status_code,open_result, file, file_name):
if open_result == GOODFILE:
status_code = 200
type_of_file = MT.guess_type(file_name, False)[0]
elif open_result == ISADIRECTORY:
status_code = 301
elif open_result == NOFILE:
status_code = 404
return status_code
# SECURITY: check the permission of opened file
def checkPermissionOfRequestedFile(status_code,open_result, file, file_name):
if file_name == None:
return status_code
abs_path_of_serving_dir = os.getcwd()
abs_path_of_serving_dir += "/www/"
length_of_serving_dir = len(abs_path_of_serving_dir)
abs_path_of_request = os.path.abspath(file.name)
length_of_requested_object = len(abs_path_of_request)
if length_of_serving_dir > length_of_requested_object:
status_code = 404
elif abs_path_of_serving_dir != abs_path_of_request[:length_of_serving_dir]:
status_code = 404
return status_code
if __name__ == "__main__":
HOST, PORT = "localhost", 8080
socketserver.TCPServer.allow_reuse_address = True
# Create the server, binding to localhost on port 8080
server = socketserver.TCPServer((HOST, PORT), MyWebServer)
# https://stackoverflow.com/questions/15260558/python-tcpserver-address-already-in-use-but-i-close-the-server-and-i-use-allow
# Activate the server; this will keep running until you
# interrupt the program with Ctrl-C
try:
server.serve_forever()
except KeyboardInterrupt: # exit if ctrl+C
sys.exit(0)
| 34.888031 | 129 | 0.623174 | 5,355 | 0.592564 | 0 | 0 | 0 | 0 | 0 | 0 | 2,669 | 0.295341 |
48ebc333c8d0ba26cd1d7f0f9c59510601ab4ec4 | 1,788 | py | Python | cloudkitty/rating/hash/controllers/root.py | wanghuiict/cloudkitty | 11ff713042eb0354f497f7051130630c46860735 | [
"Apache-2.0"
]
| 97 | 2015-10-18T02:53:17.000Z | 2022-03-07T05:15:39.000Z | cloudkitty/rating/hash/controllers/root.py | shanafang9/cloudkitty | 911c90569ccb09ecf0d7aa11a5a707c8ebda09cf | [
"Apache-2.0"
]
| 1 | 2017-11-29T15:39:27.000Z | 2017-11-29T15:39:27.000Z | cloudkitty/rating/hash/controllers/root.py | shanafang9/cloudkitty | 911c90569ccb09ecf0d7aa11a5a707c8ebda09cf | [
"Apache-2.0"
]
| 54 | 2015-10-27T10:55:02.000Z | 2022-02-18T08:23:19.000Z | # -*- coding: utf-8 -*-
# Copyright 2015 Objectif Libre
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
from wsme import types as wtypes
import wsmeext.pecan as wsme_pecan
from cloudkitty import rating
from cloudkitty.rating.hash.controllers import field as field_api
from cloudkitty.rating.hash.controllers import group as group_api
from cloudkitty.rating.hash.controllers import mapping as mapping_api
from cloudkitty.rating.hash.controllers import service as service_api
from cloudkitty.rating.hash.controllers import threshold as threshold_api
from cloudkitty.rating.hash.datamodels import mapping as mapping_models
class HashMapConfigController(rating.RatingRestControllerBase):
"""Controller exposing all management sub controllers."""
_custom_actions = {
'types': ['GET']
}
services = service_api.HashMapServicesController()
fields = field_api.HashMapFieldsController()
groups = group_api.HashMapGroupsController()
mappings = mapping_api.HashMapMappingsController()
thresholds = threshold_api.HashMapThresholdsController()
@wsme_pecan.wsexpose([wtypes.text])
def get_types(self):
"""Return the list of every mapping type available.
"""
return mapping_models.MAP_TYPE.values
| 38.042553 | 78 | 0.758949 | 636 | 0.355705 | 0 | 0 | 179 | 0.100112 | 0 | 0 | 750 | 0.419463 |
48ec30ea94720d1931e1f3786be697d0ca01359f | 8,431 | py | Python | .test/test/task2/Aufgabe1/python-lib/cuddlefish/docs/webdocs.py | sowinski/testsubtree | d09b72e6b366e8e29e038445a1fa6987b2456625 | [
"MIT"
]
| null | null | null | .test/test/task2/Aufgabe1/python-lib/cuddlefish/docs/webdocs.py | sowinski/testsubtree | d09b72e6b366e8e29e038445a1fa6987b2456625 | [
"MIT"
]
| null | null | null | .test/test/task2/Aufgabe1/python-lib/cuddlefish/docs/webdocs.py | sowinski/testsubtree | d09b72e6b366e8e29e038445a1fa6987b2456625 | [
"MIT"
]
| null | null | null | import os, re, errno
import markdown
import cgi
from cuddlefish import packaging
from cuddlefish.docs import apirenderer
from cuddlefish._version import get_versions
INDEX_PAGE = '/doc/static-files/base.html'
BASE_URL_INSERTION_POINT = '<base '
VERSION_INSERTION_POINT = '<div id="version">'
THIRD_PARTY_PACKAGE_SUMMARIES = '<ul id="third-party-package-summaries">'
HIGH_LEVEL_PACKAGE_SUMMARIES = '<ul id="high-level-package-summaries">'
LOW_LEVEL_PACKAGE_SUMMARIES = '<ul id="low-level-package-summaries">'
CONTENT_ID = '<div id="main-content">'
TITLE_ID = '<title>'
DEFAULT_TITLE = 'Add-on SDK Documentation'
def get_documentation(package_name, modules_json, doc_path):
documented_modules = []
for root, dirs, files in os.walk(doc_path):
subdir_path = root.split(os.sep)[len(doc_path.split(os.sep)):]
for filename in files:
if filename.endswith(".md"):
modname = filename[:-len(".md")]
modpath = subdir_path + [modname]
documented_modules.append(modpath)
return documented_modules
def tag_wrap(text, tag, attributes={}):
result = '\n<' + tag
for name in attributes.keys():
result += ' ' + name + '=' + '"' + attributes[name] + '"'
result +='>' + text + '</'+ tag + '>\n'
return result
def is_third_party(package_json):
return (not is_high_level(package_json)) and \
(not(is_low_level(package_json)))
def is_high_level(package_json):
return 'jetpack-high-level' in package_json.get('keywords', [])
def is_low_level(package_json):
return 'jetpack-low-level' in package_json.get('keywords', [])
def insert_after(target, insertion_point_id, text_to_insert):
insertion_point = target.find(insertion_point_id) + len(insertion_point_id)
return target[:insertion_point] + text_to_insert + target[insertion_point:]
class WebDocs(object):
def __init__(self, root, base_url = None):
self.root = root
self.pkg_cfg = packaging.build_pkg_cfg(root)
self.packages_json = packaging.build_pkg_index(self.pkg_cfg)
self.base_page = self._create_base_page(root, base_url)
def create_guide_page(self, path):
path, ext = os.path.splitext(path)
md_path = path + '.md'
md_content = unicode(open(md_path, 'r').read(), 'utf8')
guide_content = markdown.markdown(md_content)
return self._create_page(guide_content)
def create_module_page(self, path):
path, ext = os.path.splitext(path)
md_path = path + '.md'
module_content = apirenderer.md_to_div(md_path)
return self._create_page(module_content)
def create_package_page(self, package_name):
package_content = self._create_package_detail(package_name)
return self._create_page(package_content)
def _create_page(self, page_content):
page = self._insert_title(self.base_page, page_content)
page = insert_after(page, CONTENT_ID, page_content)
return page.encode('utf8')
def _create_module_list(self, package_json):
package_name = package_json['name']
libs = package_json['files'][1]['lib'][1]
doc_path = package_json.get('doc', None)
if not doc_path:
return ''
modules = get_documentation(package_name, libs, doc_path)
modules.sort()
module_items = ''
relative_doc_path = doc_path[len(self.root) + 1:]
relative_doc_path_pieces = relative_doc_path.split(os.sep)
del relative_doc_path_pieces[-1]
relative_doc_URL = "/".join(relative_doc_path_pieces)
for module in modules:
module_link = tag_wrap('/'.join(module), 'a', \
{'href': relative_doc_URL + '/' + '/'.join(module) + '.html'})
module_items += module_link
return module_items
def _create_package_summaries(self, packages_json, include):
packages = ''
for package_name in packages_json.keys():
package_json = packages_json[package_name]
if not include(package_json):
continue
package_path = self.pkg_cfg["packages"][package_name]["root_dir"]
package_directory = package_path[len(self.root) + 1:]
package_directory = "/".join(package_directory.split(os.sep))
package_link = tag_wrap(package_name, 'a', {'href': \
package_directory + "/" \
+ 'index.html'})
text = tag_wrap(package_link, 'h4')
text += self._create_module_list(package_json)
packages += tag_wrap(text, 'li', {'class':'package-summary', \
'style':'display: block;'})
return packages
def _create_base_page(self, root, base_url):
base_page = unicode(open(root + INDEX_PAGE, 'r').read(), 'utf8')
if base_url:
base_tag = 'href="' + base_url + '"'
base_page = insert_after(base_page, BASE_URL_INSERTION_POINT, base_tag)
sdk_version = get_versions()["version"]
base_page = insert_after(base_page, VERSION_INSERTION_POINT, "Version " + sdk_version)
third_party_summaries = \
self._create_package_summaries(self.packages_json, is_third_party)
base_page = insert_after(base_page, \
THIRD_PARTY_PACKAGE_SUMMARIES, third_party_summaries)
high_level_summaries = \
self._create_package_summaries(self.packages_json, is_high_level)
base_page = insert_after(base_page, \
HIGH_LEVEL_PACKAGE_SUMMARIES, high_level_summaries)
low_level_summaries = \
self._create_package_summaries(self.packages_json, is_low_level)
base_page = insert_after(base_page, \
LOW_LEVEL_PACKAGE_SUMMARIES, low_level_summaries)
return base_page
def _create_package_detail_row(self, field_value, \
field_descriptor, field_name):
meta = tag_wrap(tag_wrap(field_descriptor, 'span', \
{'class':'meta-header'}), 'td')
value = tag_wrap(tag_wrap(field_value, 'span', \
{'class':field_name}), 'td')
return tag_wrap(meta + value, 'tr')
def _create_package_detail_table(self, package_json):
table_contents = ''
if package_json.get('author', None):
table_contents += self._create_package_detail_row(\
cgi.escape(package_json['author']), 'Author', 'author')
if package_json.get('version', None):
table_contents += self._create_package_detail_row(\
package_json['version'], 'Version', 'version')
if package_json.get('license', None):
table_contents += self._create_package_detail_row(\
package_json['license'], 'License', 'license')
if package_json.get('dependencies', None):
table_contents += self._create_package_detail_row(\
', '.join(package_json['dependencies']), \
'Dependencies', 'dependencies')
table_contents += self._create_package_detail_row(\
self._create_module_list(package_json), 'Modules', 'modules')
return tag_wrap(tag_wrap(table_contents, 'tbody'), 'table', \
{'class':'meta-table'})
def _create_package_detail(self, package_name):
package_json = self.packages_json.get(package_name, None)
if not package_json:
raise IOError(errno.ENOENT, 'Package not found')
# pieces of the package detail: 1) title, 2) table, 3) description
package_title = tag_wrap(package_name, 'h1')
table = self._create_package_detail_table(package_json)
description = ''
if package_json.get('readme', None):
description += tag_wrap(tag_wrap(\
markdown.markdown(\
package_json['readme']), 'p'), 'div', {'class':'docs'})
return tag_wrap(package_title + table + description, 'div', \
{'class':'package-detail'})
def _insert_title(self, target, content):
match = re.search('<h1>.*</h1>', content)
if match:
title = match.group(0)[len('<h1>'):-len('</h1>')] + ' - ' + \
DEFAULT_TITLE
else:
title = DEFAULT_TITLE
target = insert_after(target, TITLE_ID, title)
return target
| 44.373684 | 94 | 0.632428 | 6,570 | 0.779267 | 0 | 0 | 0 | 0 | 0 | 0 | 1,014 | 0.12027 |
48eca2b30f95acacb8513624eb0235e73603734b | 183 | py | Python | src/c3nav/site/templatetags/route_render.py | johnjohndoe/c3nav | a17f863a3512e305595c16b0300796b6bae81241 | [
"Apache-2.0"
]
| 132 | 2016-11-12T01:45:23.000Z | 2022-03-08T15:17:10.000Z | src/c3nav/site/templatetags/route_render.py | johnjohndoe/c3nav | a17f863a3512e305595c16b0300796b6bae81241 | [
"Apache-2.0"
]
| 66 | 2016-09-29T09:46:19.000Z | 2022-03-11T23:26:18.000Z | src/c3nav/site/templatetags/route_render.py | johnjohndoe/c3nav | a17f863a3512e305595c16b0300796b6bae81241 | [
"Apache-2.0"
]
| 42 | 2016-09-29T08:34:57.000Z | 2022-03-08T15:17:15.000Z | from django import template
register = template.Library()
@register.filter
def negate(value):
return -value
@register.filter
def subtract(value, arg):
return value - arg
| 13.071429 | 29 | 0.726776 | 0 | 0 | 0 | 0 | 118 | 0.644809 | 0 | 0 | 0 | 0 |
48edc6b7f87e0875d85de78f96a9bd1a71a88a84 | 9,827 | py | Python | coax/experience_replay/_prioritized.py | sleepy-owl/coax | 37c3e667b81537768beb25bb59d0f05124624128 | [
"MIT"
]
| null | null | null | coax/experience_replay/_prioritized.py | sleepy-owl/coax | 37c3e667b81537768beb25bb59d0f05124624128 | [
"MIT"
]
| null | null | null | coax/experience_replay/_prioritized.py | sleepy-owl/coax | 37c3e667b81537768beb25bb59d0f05124624128 | [
"MIT"
]
| null | null | null | # ------------------------------------------------------------------------------------------------ #
# MIT License #
# #
# Copyright (c) 2020, Microsoft Corporation #
# #
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software #
# and associated documentation files (the "Software"), to deal in the Software without #
# restriction, including without limitation the rights to use, copy, modify, merge, publish, #
# distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the #
# Software is furnished to do so, subject to the following conditions: #
# #
# The above copyright notice and this permission notice shall be included in all copies or #
# substantial portions of the Software. #
# #
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING #
# BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND #
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, #
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, #
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. #
# ------------------------------------------------------------------------------------------------ #
import jax
import numpy as onp
import chex
from ..reward_tracing import TransitionBatch
from ..utils import SumTree
from ._base import BaseReplayBuffer
__all__ = (
'PrioritizedReplayBuffer',
)
class PrioritizedReplayBuffer(BaseReplayBuffer):
r"""
A simple ring buffer for experience replay, with prioritized sampling.
This class uses *proportional* sampling, which means that the transitions are sampled with
relative probability :math:`p_i` defined as:
.. math::
p_i\ =\ \frac
{\left(|\mathcal{A}_i| + \epsilon\right)^\alpha}
{\sum_{j=1}^N \left(|\mathcal{A}_j| + \epsilon\right)^\alpha}
Here :math:`\mathcal{A}_i` are advantages provided at insertion time and :math:`N` is the
capacity of the buffer, which may be quite large. The :math:`\mathcal{A}_i` are typically just
TD errors collected from a value-function updater, e.g. :func:`QLearning.td_error
<coax.td_learning.QLearning.td_error>`.
Since the prioritized samples are biased, the :attr:`sample` method also produces non-trivial
importance weights (stored in the :class:`TransitionBatch.W
<coax.reward_tracing.TransitionBatch>` attribute). The logic for constructing these weights for
a sample of batch size :math:`n` is:
.. math::
w_i\ =\ \frac{\left(Np_i\right)^{-\beta}}{\max_{j=1}^n \left(Np_j\right)^{-\beta}}
See section 3.4 of https://arxiv.org/abs/1511.05952 for more details.
Parameters
----------
capacity : positive int
The capacity of the experience replay buffer.
alpha : positive float, optional
The sampling temperature :math:`\alpha>0`.
beta : positive float, optional
The importance-weight exponent :math:`\beta>0`.
epsilon : positive float, optional
The small regulator :math:`\epsilon>0`.
random_seed : int, optional
To get reproducible results.
"""
def __init__(self, capacity, alpha=1.0, beta=1.0, epsilon=1e-4, random_seed=None):
if not (isinstance(capacity, int) and capacity > 0):
raise TypeError(f"capacity must be a positive int, got: {capacity}")
if not (isinstance(alpha, (float, int)) and alpha > 0):
raise TypeError(f"alpha must be a positive float, got: {alpha}")
if not (isinstance(beta, (float, int)) and beta > 0):
raise TypeError(f"beta must be a positive float, got: {beta}")
if not (isinstance(epsilon, (float, int)) and epsilon > 0):
raise TypeError(f"epsilon must be a positive float, got: {epsilon}")
self._capacity = int(capacity)
self._alpha = float(alpha)
self._beta = float(beta)
self._epsilon = float(epsilon)
self._random_seed = random_seed
self._rnd = onp.random.RandomState(random_seed)
self.clear() # sets: self._deque, self._index
@property
def capacity(self):
return self._capacity
@property
def alpha(self):
return self._alpha
@alpha.setter
def alpha(self, new_alpha):
if not (isinstance(new_alpha, (float, int)) and new_alpha > 0):
raise TypeError(f"alpha must be a positive float, got: {new_alpha}")
if onp.isclose(new_alpha, self._alpha, rtol=0.01):
return # noop if new value is too close to old value (not worth the computation cost)
new_values = onp.where(
self._sumtree.values <= 0, 0., # only change exponents for positive values
onp.exp(onp.log(onp.maximum(self._sumtree.values, 1e-15)) * (new_alpha / self._alpha)))
self._sumtree.set_values(..., new_values)
self._alpha = float(new_alpha)
@property
def beta(self):
return self._beta
@beta.setter
def beta(self, new_beta):
if not (isinstance(new_beta, (float, int)) and new_beta > 0):
raise TypeError(f"beta must be a positive float, got: {new_beta}")
self._beta = float(new_beta)
@property
def epsilon(self):
return self._epsilon
@epsilon.setter
def epsilon(self, new_epsilon):
if not (isinstance(new_epsilon, (float, int)) and new_epsilon > 0):
raise TypeError(f"epsilon must be a positive float, got: {new_epsilon}")
self._epsilon = float(new_epsilon)
def add(self, transition_batch, Adv):
r"""
Add a transition to the experience replay buffer.
Parameters
----------
transition_batch : TransitionBatch
A :class:`TransitionBatch <coax.reward_tracing.TransitionBatch>` object.
Adv : ndarray
A batch of advantages, used to construct the priorities :math:`p_i`.
"""
if not isinstance(transition_batch, TransitionBatch):
raise TypeError(
f"transition_batch must be a TransitionBatch, got: {type(transition_batch)}")
transition_batch.idx = self._index + onp.arange(transition_batch.batch_size)
idx = transition_batch.idx % self.capacity # wrap around
chex.assert_equal_shape([idx, Adv])
self._storage[idx] = list(transition_batch.to_singles())
self._sumtree.set_values(idx, onp.power(onp.abs(Adv) + self.epsilon, self.alpha))
self._index += transition_batch.batch_size
def sample(self, batch_size=32):
r"""
Get a batch of transitions to be used for bootstrapped updates.
Parameters
----------
batch_size : positive int, optional
The desired batch size of the sample.
Returns
-------
transitions : TransitionBatch
A :class:`TransitionBatch <coax.reward_tracing.TransitionBatch>` object.
"""
idx = self._sumtree.sample(n=batch_size)
P = self._sumtree.values[idx] / self._sumtree.root_value # prioritized, biased propensities
W = onp.power(P * len(self), -self.beta) # inverse propensity weights (β≈1)
W /= W.max() # for stability, ensure only down-weighting (see sec. 3.4 of arxiv:1511.05952)
transition_batch = _concatenate_leaves(self._storage[idx])
chex.assert_equal_shape([transition_batch.W, W])
transition_batch.W *= W
return transition_batch
def update(self, idx, Adv):
r"""
Update the priority weights of transitions previously added to the buffer.
Parameters
----------
idx : 1d array of ints
The identifiers of the transitions to be updated.
Adv : ndarray
The corresponding updated advantages.
"""
idx = onp.asarray(idx, dtype='int32')
Adv = onp.asarray(Adv, dtype='float32')
chex.assert_equal_shape([idx, Adv])
chex.assert_rank([idx, Adv], 1)
idx_lookup = idx % self.capacity # wrap around
new_values = onp.where(
_get_transition_batch_idx(self._storage[idx_lookup]) == idx, # only update if ids match
onp.power(onp.abs(Adv) + self.epsilon, self.alpha),
self._sumtree.values[idx_lookup])
self._sumtree.set_values(idx_lookup, new_values)
def clear(self):
r""" Clear the experience replay buffer. """
self._storage = onp.full(shape=(self.capacity,), fill_value=None, dtype='object')
self._sumtree = SumTree(capacity=self.capacity)
self._index = 0
def __len__(self):
return min(self.capacity, self._index)
def __bool__(self):
return bool(len(self))
def __iter__(self):
return iter(self._storage[:len(self)])
def _concatenate_leaves(pytrees):
return jax.tree_multimap(lambda *leaves: onp.concatenate(leaves, axis=0), *pytrees)
@onp.vectorize
def _get_transition_batch_idx(transition):
return transition.idx
| 38.996032 | 100 | 0.596418 | 7,396 | 0.752391 | 0 | 0 | 1,467 | 0.149237 | 0 | 0 | 5,585 | 0.568159 |
48edd7f48e568a644eaeb1b10b708e137aa7c9cf | 433 | py | Python | src/OTLMOW/OEFModel/Classes/Wilddet.py | davidvlaminck/OTLClassPython | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
]
| 2 | 2022-02-01T08:58:11.000Z | 2022-02-08T13:35:17.000Z | src/OTLMOW/OEFModel/Classes/Wilddet.py | davidvlaminck/OTLMOW | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
]
| null | null | null | src/OTLMOW/OEFModel/Classes/Wilddet.py | davidvlaminck/OTLMOW | 71330afeb37c3ea6d9981f521ff8f4a3f8b946fc | [
"MIT"
]
| null | null | null | # coding=utf-8
from OTLMOW.OEFModel.EMObject import EMObject
# Generated with OEFClassCreator. To modify: extend, do not edit
class Wilddet(EMObject):
"""Een wilddetectiesysteem zal de weggebruikers waarschuwen bij de aanwezigheid van eventueel overstekend wild"""
typeURI = 'https://lgc.data.wegenenverkeer.be/ns/installatie#Wilddet'
label = 'Wilddetectiesysteem'
def __init__(self):
super().__init__()
| 28.866667 | 117 | 0.745958 | 303 | 0.699769 | 0 | 0 | 0 | 0 | 0 | 0 | 271 | 0.625866 |
48eeffaa35d544f23807d7f9663c5e18d1819a1f | 16,332 | py | Python | test/python/testworkflow.py | kokizzu/txtai | 1a3848bac006e9963ad2eef466405f8da644fecb | [
"Apache-2.0"
]
| null | null | null | test/python/testworkflow.py | kokizzu/txtai | 1a3848bac006e9963ad2eef466405f8da644fecb | [
"Apache-2.0"
]
| 47 | 2021-10-02T22:48:03.000Z | 2021-12-29T02:36:20.000Z | test/python/testworkflow.py | kokizzu/txtai | 1a3848bac006e9963ad2eef466405f8da644fecb | [
"Apache-2.0"
]
| null | null | null | """
Workflow module tests
"""
import contextlib
import glob
import io
import os
import tempfile
import sys
import unittest
import numpy as np
import torch
from txtai.api import API
from txtai.embeddings import Documents, Embeddings
from txtai.pipeline import Nop, Segmentation, Summary, Translation, Textractor
from txtai.workflow import Workflow, Task, ConsoleTask, ExportTask, FileTask, ImageTask, RetrieveTask, StorageTask, WorkflowTask
# pylint: disable = C0411
from utils import Utils
# pylint: disable=R0904
class TestWorkflow(unittest.TestCase):
"""
Workflow tests.
"""
@classmethod
def setUpClass(cls):
"""
Initialize test data.
"""
# Default YAML workflow configuration
cls.config = """
# Embeddings index
writable: true
embeddings:
scoring: bm25
path: google/bert_uncased_L-2_H-128_A-2
content: true
# Text segmentation
segmentation:
sentences: true
# Workflow definitions
workflow:
index:
tasks:
- action: segmentation
- action: index
search:
tasks:
- search
transform:
tasks:
- transform
"""
def testBaseWorkflow(self):
"""
Tests a basic workflow
"""
translate = Translation()
# Workflow that translate text to Spanish
workflow = Workflow([Task(lambda x: translate(x, "es"))])
results = list(workflow(["The sky is blue", "Forest through the trees"]))
self.assertEqual(len(results), 2)
def testChainWorkflow(self):
"""
Tests a chain of workflows
"""
workflow1 = Workflow([Task(lambda x: [y * 2 for y in x])])
workflow2 = Workflow([Task(lambda x: [y - 1 for y in x])], batch=4)
results = list(workflow2(workflow1([1, 2, 4, 8, 16, 32])))
self.assertEqual(results, [1, 3, 7, 15, 31, 63])
def testComplexWorkflow(self):
"""
Tests a complex workflow
"""
textractor = Textractor(paragraphs=True, minlength=150, join=True)
summary = Summary("t5-small")
embeddings = Embeddings({"path": "sentence-transformers/nli-mpnet-base-v2"})
documents = Documents()
def index(x):
documents.add(x)
return x
# Extract text and summarize articles
articles = Workflow([FileTask(textractor), Task(lambda x: summary(x, maxlength=15))])
# Complex workflow that extracts text, runs summarization then loads into an embeddings index
tasks = [WorkflowTask(articles, r".\.pdf$"), Task(index, unpack=False)]
data = ["file://" + Utils.PATH + "/article.pdf", "Workflows can process audio files, documents and snippets"]
# Convert file paths to data tuples
data = [(x, element, None) for x, element in enumerate(data)]
# Execute workflow, discard results as they are streamed
workflow = Workflow(tasks)
data = list(workflow(data))
# Build the embeddings index
embeddings.index(documents)
# Cleanup temporary storage
documents.close()
# Run search and validate result
index, _ = embeddings.search("search text", 1)[0]
self.assertEqual(index, 0)
self.assertEqual(data[0][1], "txtai builds an AI-powered index over sections")
def testConcurrentWorkflow(self):
"""
Tests running concurrent task actions
"""
nop = Nop()
workflow = Workflow([Task([nop, nop], concurrency="thread")])
results = list(workflow([2, 4]))
self.assertEqual(results, [(2, 2), (4, 4)])
workflow = Workflow([Task([nop, nop], concurrency="process")])
results = list(workflow([2, 4]))
self.assertEqual(results, [(2, 2), (4, 4)])
workflow = Workflow([Task([nop, nop], concurrency="unknown")])
results = list(workflow([2, 4]))
self.assertEqual(results, [(2, 2), (4, 4)])
def testConsoleWorkflow(self):
"""
Tests a console task
"""
# Excel export
workflow = Workflow([ConsoleTask()])
output = io.StringIO()
with contextlib.redirect_stdout(output):
list(workflow([{"id": 1, "text": "Sentence 1"}, {"id": 2, "text": "Sentence 2"}]))
self.assertIn("Sentence 2", output.getvalue())
def testExportWorkflow(self):
"""
Tests an export task
"""
# Excel export
path = os.path.join(tempfile.gettempdir(), "export.xlsx")
workflow = Workflow([ExportTask(output=path)])
list(workflow([{"id": 1, "text": "Sentence 1"}, {"id": 2, "text": "Sentence 2"}]))
self.assertGreater(os.path.getsize(path), 0)
# Export CSV
path = os.path.join(tempfile.gettempdir(), "export.csv")
workflow = Workflow([ExportTask(output=path)])
list(workflow([{"id": 1, "text": "Sentence 1"}, {"id": 2, "text": "Sentence 2"}]))
self.assertGreater(os.path.getsize(path), 0)
# Export CSV with timestamp
path = os.path.join(tempfile.gettempdir(), "export-timestamp.csv")
workflow = Workflow([ExportTask(output=path, timestamp=True)])
list(workflow([{"id": 1, "text": "Sentence 1"}, {"id": 2, "text": "Sentence 2"}]))
# Find timestamped file and ensure it has data
path = glob.glob(os.path.join(tempfile.gettempdir(), "export-timestamp*.csv"))[0]
self.assertGreater(os.path.getsize(path), 0)
def testExtractWorkflow(self):
"""
Tests column extraction tasks
"""
workflow = Workflow([Task(lambda x: x, unpack=False, column=0)], batch=1)
results = list(workflow([(0, 1)]))
self.assertEqual(results[0], 0)
results = list(workflow([(0, (1, 2), None)]))
self.assertEqual(results[0], (0, 1, None))
results = list(workflow([1]))
self.assertEqual(results[0], 1)
def testImageWorkflow(self):
"""
Tests an image task
"""
workflow = Workflow([ImageTask()])
results = list(workflow([Utils.PATH + "/books.jpg"]))
self.assertEqual(results[0].size, (1024, 682))
def testInvalidWorkflow(self):
"""
Tests task with invalid parameters
"""
with self.assertRaises(TypeError):
Task(invalid=True)
def testMergeWorkflow(self):
"""
Tests merge tasks
"""
task = Task([lambda x: [pow(y, 2) for y in x], lambda x: [pow(y, 3) for y in x]], merge="hstack")
# Test hstack (column-wise) merge
workflow = Workflow([task])
results = list(workflow([2, 4]))
self.assertEqual(results, [(4, 8), (16, 64)])
# Test vstack (row-wise) merge
task.merge = "vstack"
results = list(workflow([2, 4]))
self.assertEqual(results, [4, 8, 16, 64])
# Test concat (values joined into single string) merge
task.merge = "concat"
results = list(workflow([2, 4]))
self.assertEqual(results, ["4. 8", "16. 64"])
# Test no merge
task.merge = None
results = list(workflow([2, 4, 6]))
self.assertEqual(results, [[4, 16, 36], [8, 64, 216]])
# Test generated (id, data, tag) tuples are properly returned
workflow = Workflow([Task(lambda x: [(0, y, None) for y in x])])
results = list(workflow([(1, "text", "tags")]))
self.assertEqual(results[0], (0, "text", None))
def testMergeUnbalancedWorkflow(self):
"""
Test merge tasks with unbalanced outputs (i.e. one action produce more output than another for same input).
"""
nop = Nop()
segment1 = Segmentation(sentences=True)
task = Task([nop, segment1])
# Test hstack
workflow = Workflow([task])
results = list(workflow(["This is a test sentence. And another sentence to split."]))
self.assertEqual(
results, [("This is a test sentence. And another sentence to split.", ["This is a test sentence.", "And another sentence to split."])]
)
# Test vstack
task.merge = "vstack"
workflow = Workflow([task])
results = list(workflow(["This is a test sentence. And another sentence to split."]))
self.assertEqual(
results, ["This is a test sentence. And another sentence to split.", "This is a test sentence.", "And another sentence to split."]
)
def testNumpyWorkflow(self):
"""
Tests a numpy workflow
"""
task = Task([lambda x: np.power(x, 2), lambda x: np.power(x, 3)], merge="hstack")
# Test hstack (column-wise) merge
workflow = Workflow([task])
results = list(workflow(np.array([2, 4])))
self.assertTrue(np.array_equal(np.array(results), np.array([[4, 8], [16, 64]])))
# Test vstack (row-wise) merge
task.merge = "vstack"
results = list(workflow(np.array([2, 4])))
self.assertEqual(results, [4, 8, 16, 64])
# Test no merge
task.merge = None
results = list(workflow(np.array([2, 4, 6])))
self.assertTrue(np.array_equal(np.array(results), np.array([[4, 16, 36], [8, 64, 216]])))
def testRetrieveWorkflow(self):
"""
Tests a retrieve task
"""
# Test retrieve with generated temporary directory
workflow = Workflow([RetrieveTask()])
results = list(workflow(["file://" + Utils.PATH + "/books.jpg"]))
self.assertTrue(results[0].endswith("books.jpg"))
# Test retrieve with specified temporary directory
workflow = Workflow([RetrieveTask(directory=os.path.join(tempfile.gettempdir(), "retrieve"))])
results = list(workflow(["file://" + Utils.PATH + "/books.jpg"]))
self.assertTrue(results[0].endswith("books.jpg"))
def testScheduleWorkflow(self):
"""
Tests workflow schedules
"""
# Test workflow schedule with Python
workflow = Workflow([Task()])
workflow.schedule("* * * * * *", ["test"], 1)
self.assertEqual(len(workflow.tasks), 1)
# Test workflow schedule with YAML
workflow = """
segmentation:
sentences: true
workflow:
segment:
schedule:
cron: '* * * * * *'
elements:
- a sentence to segment
iterations: 1
tasks:
- action: segmentation
task: console
"""
output = io.StringIO()
with contextlib.redirect_stdout(output):
app = API(workflow)
app.wait()
self.assertIn("a sentence to segment", output.getvalue())
def testScheduleErrorWorkflow(self):
"""
Tests workflow schedules with errors
"""
def action(elements):
raise FileNotFoundError
# Test workflow proceeds after exception raised
with self.assertLogs() as logs:
workflow = Workflow([Task(action=action)])
workflow.schedule("* * * * * *", ["test"], 1)
self.assertIn("FileNotFoundError", " ".join(logs.output))
def testStorageWorkflow(self):
"""
Tests a storage task
"""
workflow = Workflow([StorageTask()])
results = list(workflow(["local://" + Utils.PATH, "test string"]))
self.assertEqual(len(results), 19)
def testTensorTransformWorkflow(self):
"""
Tests a tensor workflow with list transformations
"""
# Test one-one list transformation
task = Task(lambda x: x.tolist())
workflow = Workflow([task])
results = list(workflow(np.array([2])))
self.assertEqual(results, [2])
# Test one-many list transformation
task = Task(lambda x: [x.tolist() * 2])
workflow = Workflow([task])
results = list(workflow(np.array([2])))
self.assertEqual(results, [2, 2])
def testTorchWorkflow(self):
"""
Tests a torch workflow
"""
# pylint: disable=E1101,E1102
task = Task([lambda x: torch.pow(x, 2), lambda x: torch.pow(x, 3)], merge="hstack")
# Test hstack (column-wise) merge
workflow = Workflow([task])
results = np.array([x.numpy() for x in workflow(torch.tensor([2, 4]))])
self.assertTrue(np.array_equal(results, np.array([[4, 8], [16, 64]])))
# Test vstack (row-wise) merge
task.merge = "vstack"
results = list(workflow(torch.tensor([2, 4])))
self.assertEqual(results, [4, 8, 16, 64])
# Test no merge
task.merge = None
results = np.array([x.numpy() for x in workflow(torch.tensor([2, 4, 6]))])
self.assertTrue(np.array_equal(np.array(results), np.array([[4, 16, 36], [8, 64, 216]])))
def testYamlFunctionWorkflow(self):
"""
Tests YAML workflow with a function action
"""
# Create function and add to module
def action(elements):
return [x * 2 for x in elements]
sys.modules[__name__].action = action
workflow = """
workflow:
run:
tasks:
- testworkflow.action
"""
app = API(workflow)
self.assertEqual(list(app.workflow("run", [1, 2])), [2, 4])
def testYamlIndexWorkflow(self):
"""
Tests reading a YAML index workflow in Python.
"""
app = API(self.config)
self.assertEqual(
list(app.workflow("index", ["This is a test sentence. And another sentence to split."])),
["This is a test sentence.", "And another sentence to split."],
)
# Read from file
path = os.path.join(tempfile.gettempdir(), "workflow.yml")
with open(path, "w", encoding="utf-8") as f:
f.write(self.config)
app = API(path)
self.assertEqual(
list(app.workflow("index", ["This is a test sentence. And another sentence to split."])),
["This is a test sentence.", "And another sentence to split."],
)
# Read from YAML object
app = API(API.read(self.config))
self.assertEqual(
list(app.workflow("index", ["This is a test sentence. And another sentence to split."])),
["This is a test sentence.", "And another sentence to split."],
)
def testYamlSearchWorkflow(self):
"""
Test reading a YAML search workflow in Python.
"""
# Test search
app = API(self.config)
list(app.workflow("index", ["This is a test sentence. And another sentence to split."]))
self.assertEqual(
list(app.workflow("search", ["another"]))[0]["text"],
"And another sentence to split.",
)
def testYamlWorkflowTask(self):
"""
Tests YAML workflow with a workflow task
"""
# Create function and add to module
def action(elements):
return [x * 2 for x in elements]
sys.modules[__name__].action = action
workflow = """
workflow:
run:
tasks:
- testworkflow.action
flow:
tasks:
- run
"""
app = API(workflow)
self.assertEqual(list(app.workflow("flow", [1, 2])), [2, 4])
def testYamlTransformWorkflow(self):
"""
Test reading a YAML transform workflow in Python.
"""
# Test search
app = API(self.config)
self.assertEqual(len(list(app.workflow("transform", ["text"]))[0]), 128)
def testYamlError(self):
"""
Tests reading a YAML workflow with errors.
"""
# Read from string
config = """
# Workflow definitions
workflow:
error:
tasks:
- action: error
"""
with self.assertRaises(KeyError):
API(config)
| 30.873346 | 146 | 0.558903 | 15,811 | 0.968099 | 0 | 0 | 749 | 0.045861 | 0 | 0 | 6,007 | 0.367806 |
48f141e3c4e406a1ed8e50060eb75658e2cb4aab | 202 | py | Python | apps/summary/urls.py | sotkonstantinidis/testcircle | 448aa2148fbc2c969e60f0b33ce112d4740a8861 | [
"Apache-2.0"
]
| 3 | 2019-02-24T14:24:43.000Z | 2019-10-24T18:51:32.000Z | apps/summary/urls.py | sotkonstantinidis/testcircle | 448aa2148fbc2c969e60f0b33ce112d4740a8861 | [
"Apache-2.0"
]
| 17 | 2017-03-14T10:55:56.000Z | 2022-03-11T23:20:19.000Z | apps/summary/urls.py | sotkonstantinidis/testcircle | 448aa2148fbc2c969e60f0b33ce112d4740a8861 | [
"Apache-2.0"
]
| 2 | 2016-02-01T06:32:40.000Z | 2019-09-06T04:33:50.000Z | from django.conf.urls import url
from .views import SummaryPDFCreateView
urlpatterns = [
url(r'^(?P<id>[\d]+)/$',
SummaryPDFCreateView.as_view(),
name='questionnaire_summary'),
]
| 18.363636 | 39 | 0.658416 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 42 | 0.207921 |
48f3e0cd5e4cb55eec34f20d3487909f95548f7a | 1,418 | py | Python | utipy/array/blend.py | LudvigOlsen/utipy | c287f7eed15b3591118bba49ecdfc2b2605f59a0 | [
"MIT"
]
| null | null | null | utipy/array/blend.py | LudvigOlsen/utipy | c287f7eed15b3591118bba49ecdfc2b2605f59a0 | [
"MIT"
]
| 1 | 2022-02-16T15:24:33.000Z | 2022-02-16T15:24:33.000Z | utipy/array/blend.py | LudvigOlsen/utipy | c287f7eed15b3591118bba49ecdfc2b2605f59a0 | [
"MIT"
]
| null | null | null | """
@author: ludvigolsen
"""
from typing import Union
import numpy as np
import pandas as pd
from utipy.utils.check_instance import check_instance
from utipy.utils.convert_to_type import convert_to_type
def blend(x1: Union[list, np.ndarray, pd.Series], x2: Union[list, np.ndarray, pd.Series], amount: float = 0.5) -> Union[list, np.ndarray, pd.Series]:
"""
Blend two arrays
Parameters
----------
x1 : list, np.ndarray, pd.Series
The first array.
x2 : list, np.ndarray, pd.Series
The second array.
amount : float
Blend rate.
Percentage between 0-1
0: Keep only x1.
1: Keep only x2.
0.1: 10% x2 / 90% x1.
A value in-between 0-1 will result in integers becoming floats.
Returns
-------
list, np.ndarray, pd.Series
Blended array with type of the original (x1)
Examples
--------
Uncomment code to run.
# x1 = [1,2,3,4,5]
# x2 = [4,5,6,7,8]
# blend(x1, x2, amount = 0.5)
returns [2.5,3.5,4.5,5.5,6.5]
"""
# Get instance types (np.ndarray, list, pd.Series)
instance_type = check_instance(x1)
x1_weighted = np.multiply(x1, (1 - amount))
x2_weighted = np.multiply(x2, amount)
blended = x1_weighted + x2_weighted
# Convert to original type (np.ndarray, list, pd.Series)
return convert_to_type(blended, instance_type)
| 24.448276 | 149 | 0.608604 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 851 | 0.600141 |
48f4b8e6c0c1a95b21e6fbc67429a32685a3063d | 126 | py | Python | output/models/ms_data/regex/hangul_compatibility_jamo_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
]
| 1 | 2021-08-14T17:59:21.000Z | 2021-08-14T17:59:21.000Z | output/models/ms_data/regex/hangul_compatibility_jamo_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
]
| 4 | 2020-02-12T21:30:44.000Z | 2020-04-15T20:06:46.000Z | output/models/ms_data/regex/hangul_compatibility_jamo_xsd/__init__.py | tefra/xsdata-w3c-tests | b6b6a4ac4e0ab610e4b50d868510a8b7105b1a5f | [
"MIT"
]
| null | null | null | from output.models.ms_data.regex.hangul_compatibility_jamo_xsd.hangul_compatibility_jamo import Doc
__all__ = [
"Doc",
]
| 21 | 99 | 0.801587 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 5 | 0.039683 |
48f68e5109bfeba6516e554517563cbef752a170 | 519 | py | Python | ex082.py | favitoria/python123 | 99074c309b700f48ddc6aa0811a1891145281af7 | [
"MIT"
]
| null | null | null | ex082.py | favitoria/python123 | 99074c309b700f48ddc6aa0811a1891145281af7 | [
"MIT"
]
| null | null | null | ex082.py | favitoria/python123 | 99074c309b700f48ddc6aa0811a1891145281af7 | [
"MIT"
]
| null | null | null | resposta = 'Ss'
numeros = 0
listaTODOS = []
listaPAR = []
listaIMPAR = []
while resposta != 'N':
numeros = int(input('Digite um número: '))
resposta = str(input('Deseja continuar [S/N]? '))
if numeros % 2 == 0:
listaPAR.append(numeros)
elif numeros % 2 == 1:
listaIMPAR.append(numeros)
listaTODOS.append(numeros)
print(f'Os valores PARES digitados foram: {listaPAR}')
print(f'Os valores IMPARES digitados foram: {listaIMPAR}')
listaTODOS.sort()
print(f'No TOTAL foram: {listaTODOS}') | 30.529412 | 58 | 0.660886 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.351923 |
48f6af2a7976b7669c6376018cbf7149ae87451d | 2,218 | py | Python | CodingInterview2/29_PrintMatrix/print_matrix.py | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
]
| 10 | 2020-07-06T11:00:58.000Z | 2022-01-29T09:25:24.000Z | CodingInterview2/29_PrintMatrix/print_matrix.py | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
]
| null | null | null | CodingInterview2/29_PrintMatrix/print_matrix.py | hscspring/TheAlgorithms-Python | 5c2faea1d2d25a9a81a4786e053b0cc58ab46c6f | [
"MIT"
]
| 3 | 2020-07-13T06:39:23.000Z | 2020-08-15T16:29:48.000Z | """
面试题 29:顺时针打印矩阵
题目:输入一个矩阵,按照从外向里以顺时针的顺序依次打印出每一个数字。
"""
def make_matrix(rows: int, cols: int) -> list:
res = []
k = 0
for i in range(rows):
tmp = []
for j in range(cols):
k += 1
tmp.append(k)
res.append(tmp)
return res
def print_matrix_clockwisely(matrix: list) -> list:
"""
Print the given matrix clockwesely.
Parameters
-----------
matrix: list[list]
the given matrix.
Returns
---------
out: list
the clockwise order of the matrix.
Notes
------
"""
if not matrix:
return []
if not matrix[0]:
return []
res = []
start = 0
rows, cols = len(matrix), len(matrix[0])
while rows > 2 * start and cols > 2 * start:
print_circle2(matrix, rows, cols, start, res)
start += 1
return res
def print_circle(matrix: list, rows: int, cols: int, start: int, res: list):
endx = cols - 1 - start
endy = rows - 1 - start
# left -> right
for i in range(start, endx+1):
res.append(matrix[start][i])
# up -> below
if start < endy:
for i in range(start+1, endy+1):
res.append(matrix[i][endx])
# right -> left
if start < endx and start < endy:
for i in reversed(range(start, endx)):
res.append(matrix[endy][i])
# below -> up
if start < endx and start < endy - 1:
for i in reversed(range(start+1, endy)):
res.append(matrix[i][start])
def print_circle2(matrix: list, rows: int, cols: int, start: int, res: list):
endx = cols - 1 - start
endy = rows - 1 - start
# left -> right
for i in range(start, endx+1):
res.append(matrix[start][i])
# up -> below
for i in range(start+1, endy+1):
res.append(matrix[i][endx])
# right -> left
if start < endy:
for i in reversed(range(start, endx)):
res.append(matrix[endy][i])
# below -> up
if start < endx:
for i in reversed(range(start+1, endy)):
res.append(matrix[i][start])
if __name__ == '__main__':
m = make_matrix(1,5)
print(m)
res = print_matrix_clockwisely(m)
print(res)
| 21.533981 | 77 | 0.540126 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 509 | 0.220537 |
48f6c64933693697a368fb1d2ae925d6fe4cb255 | 1,170 | py | Python | migrations/versions/ee5315dcf3e1_.py | wildintellect/tasking-manager | 373fb231404628e6ae9a1838539b9c3cb23ad73c | [
"BSD-2-Clause"
]
| 3 | 2018-04-24T08:12:31.000Z | 2020-09-02T18:11:21.000Z | migrations/versions/ee5315dcf3e1_.py | wildintellect/tasking-manager | 373fb231404628e6ae9a1838539b9c3cb23ad73c | [
"BSD-2-Clause"
]
| 28 | 2019-01-04T17:39:00.000Z | 2021-05-06T23:06:24.000Z | migrations/versions/ee5315dcf3e1_.py | wildintellect/tasking-manager | 373fb231404628e6ae9a1838539b9c3cb23ad73c | [
"BSD-2-Clause"
]
| 3 | 2020-02-29T20:46:09.000Z | 2020-11-20T19:44:04.000Z | """empty message
Revision ID: ee5315dcf3e1
Revises: 9f5b73af01db
Create Date: 2017-05-24 10:39:46.586986
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ee5315dcf3e1'
down_revision = '9f5b73af01db'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('users', sa.Column('email_address', sa.String(), nullable=True))
op.add_column('users', sa.Column('facebook_id', sa.String(), nullable=True))
op.add_column('users', sa.Column('is_email_verified', sa.Boolean(), nullable=True))
op.add_column('users', sa.Column('linkedin_id', sa.String(), nullable=True))
op.add_column('users', sa.Column('twitter_id', sa.String(), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'twitter_id')
op.drop_column('users', 'linkedin_id')
op.drop_column('users', 'is_email_verified')
op.drop_column('users', 'facebook_id')
op.drop_column('users', 'email_address')
# ### end Alembic commands ###
| 31.621622 | 87 | 0.694017 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 574 | 0.490598 |
48f9216cd7b0c9c64c3f0cc8145822d20126a1a1 | 572 | py | Python | python/random-videogame-generator.py | iamashiq/Hacktoberfest2021-2 | 9823996e9e97a25fcf70abc6fd6c55e4b60da568 | [
"MIT"
]
| 6 | 2021-10-04T07:57:24.000Z | 2021-11-15T13:35:21.000Z | python/random-videogame-generator.py | iamashiq/Hacktoberfest2021-2 | 9823996e9e97a25fcf70abc6fd6c55e4b60da568 | [
"MIT"
]
| 2 | 2021-10-14T16:55:50.000Z | 2021-10-31T12:17:20.000Z | python/random-videogame-generator.py | iamashiq/Hacktoberfest2021-2 | 9823996e9e97a25fcf70abc6fd6c55e4b60da568 | [
"MIT"
]
| 33 | 2021-10-03T05:00:58.000Z | 2021-11-05T19:49:19.000Z | print("Are you trying to find new videogames to play?")
print("let me help you!")
print("do you like shooting games, yes or no")
shooting=input()
if shooting = "yes"
print("do you like battle royale games?")
br=input()
if br="yes"
print("you should try out call of duty!")
else if br="no"
print("you should try overwatch!")
else if shooting="no"
print("do you like sports games, yes or no")
sports=input()
if sports="yes"
print("try out Fifa or NBA2k!")
else if sports="no"
print("I know, try out rocket league!")
| 30.105263 | 55 | 0.636364 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 320 | 0.559441 |
48f9edbd6a5a7ba5a520ddc41c7a0b91f9666bf5 | 1,382 | py | Python | cosmic_ray/operators/unary_operator_replacement.py | rob-smallshire/cosmic-ray | 4fd751b38eee30568f8366e09452d7aa60be4e26 | [
"MIT"
]
| null | null | null | cosmic_ray/operators/unary_operator_replacement.py | rob-smallshire/cosmic-ray | 4fd751b38eee30568f8366e09452d7aa60be4e26 | [
"MIT"
]
| null | null | null | cosmic_ray/operators/unary_operator_replacement.py | rob-smallshire/cosmic-ray | 4fd751b38eee30568f8366e09452d7aa60be4e26 | [
"MIT"
]
| null | null | null | """Implementation of the unary-operator-replacement operator.
"""
import ast
from .operator import Operator
from ..util import build_mutations
# None indicates we want to delete the operator
OPERATORS = (ast.UAdd, ast.USub, ast.Invert, ast.Not, None)
def _to_ops(from_op):
"""
The sequence of operators which `from_op` could be mutated to.
"""
for to_op in OPERATORS:
if to_op and isinstance(from_op, ast.Not):
# 'not' can only be removed but not replaced with
# '+', '-' or '~' b/c that may lead to strange results
pass
elif isinstance(from_op, ast.UAdd) and (to_op is None):
# '+1' => '1' yields equivalent mutations
pass
else:
yield to_op
class MutateUnaryOperator(Operator):
"""An operator that modifies unary operators."""
def visit_UnaryOp(self, node): # pylint: disable=invalid-name
"""
http://greentreesnakes.readthedocs.io/en/latest/nodes.html#UnaryOp
"""
return self.visit_mutation_site(
node,
len(build_mutations([node.op], _to_ops)))
def mutate(self, node, idx):
"Perform the `idx`th mutation on node."
_, to_op = build_mutations([node.op], _to_ops)[idx]
if to_op:
node.op = to_op()
return node
return node.operand
| 28.791667 | 78 | 0.607815 | 615 | 0.445007 | 507 | 0.36686 | 0 | 0 | 0 | 0 | 549 | 0.39725 |
48fa5657a82772ca80f844d0c1f8bca709ceaf35 | 2,069 | py | Python | src/icolos/core/workflow_steps/calculation/rmsd.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
]
| 11 | 2022-01-30T14:36:13.000Z | 2022-03-22T09:40:57.000Z | src/icolos/core/workflow_steps/calculation/rmsd.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
]
| 2 | 2022-03-23T07:56:49.000Z | 2022-03-24T12:01:42.000Z | src/icolos/core/workflow_steps/calculation/rmsd.py | jharrymoore/Icolos | c60cc00c34208ab7011d41d52a74651763673e7a | [
"Apache-2.0"
]
| 8 | 2022-01-28T10:32:31.000Z | 2022-03-22T09:40:59.000Z | from typing import List
from pydantic import BaseModel
from icolos.core.containers.compound import Conformer, unroll_conformers
from icolos.utils.enums.step_enums import StepRMSDEnum, StepDataManipulationEnum
from icolos.core.workflow_steps.step import _LE
from icolos.core.workflow_steps.calculation.base import StepCalculationBase
_SR = StepRMSDEnum()
_SDM = StepDataManipulationEnum()
class StepRMSD(StepCalculationBase, BaseModel):
def __init__(self, **data):
super().__init__(**data)
# extend parameters
if _SR.METHOD not in self.settings.additional.keys():
self.settings.additional[_SR.METHOD] = _SR.METHOD_ALIGNMOL
def _calculate_RMSD(self, conformers: List[Conformer]):
for conf in conformers:
rmsd_matrix = self._calculate_rms_matrix(
conformers=[conf] + conf.get_extra_data()[_SDM.KEY_MATCHED],
rms_method=self._get_rms_method(),
)
# use the specified tag name if it is the first value and append an index in case there are more
for idx, col in enumerate(rmsd_matrix.columns[1:]):
combined_tag = "".join([_SR.RMSD_TAG, "" if idx == 0 else str(idx)])
rmsd_value = rmsd_matrix.iloc[[0]][col][0]
conf.get_molecule().SetProp(combined_tag, str(rmsd_value))
conf.get_extra_data()[_SDM.KEY_MATCHED][idx].get_molecule().SetProp(
combined_tag, str(rmsd_value)
)
def execute(self):
# this assumes that the conformers that are to be matched for the calculation of the RMSD matrix, are attached
# as a list in a generic data field with a specified key
conformers = unroll_conformers(compounds=self.get_compounds())
self._calculate_RMSD(conformers=conformers)
self._logger.log(
f"Annotated {len(conformers)} conformers with RMSD values (tag: {_SR.RMSD_TAG}).",
_LE.INFO,
)
# TODO: add a nice pandas DF with the RMSD values to a generic data field
| 43.104167 | 118 | 0.669889 | 1,676 | 0.810053 | 0 | 0 | 0 | 0 | 0 | 0 | 439 | 0.21218 |
48fb1aa9e5e10603d8a878537cb85772b452f285 | 468 | py | Python | iot/iot_portal/doctype/iot_homepage/iot_homepage.py | srdgame/symlink_iot | 6ec524498cccaf2f49f7264a3b284a8956bd430c | [
"MIT"
]
| 4 | 2017-09-26T09:21:19.000Z | 2021-12-22T10:26:36.000Z | iot/iot_portal/doctype/iot_homepage/iot_homepage.py | srdgame/symlink_iot | 6ec524498cccaf2f49f7264a3b284a8956bd430c | [
"MIT"
]
| 1 | 2017-11-21T20:53:10.000Z | 2017-12-11T02:17:06.000Z | iot/iot_portal/doctype/iot_homepage/iot_homepage.py | srdgame/symlink_iot | 6ec524498cccaf2f49f7264a3b284a8956bd430c | [
"MIT"
]
| 9 | 2017-03-17T04:12:22.000Z | 2022-03-21T09:33:11.000Z | # -*- coding: utf-8 -*-
# Copyright (c) 2017, Dirk Chang and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
from frappe.website.utils import delete_page_cache
class IOTHomepage(Document):
def validate(self):
if not self.description:
self.description = frappe._("This is an example website auto-generated from IOT")
delete_page_cache('iot_home')
| 31.2 | 84 | 0.782051 | 193 | 0.412393 | 0 | 0 | 0 | 0 | 0 | 0 | 183 | 0.391026 |
48fb52f8c130468ec6ba0fdb93a761de09a44b65 | 368 | py | Python | src/garage/envs/env_spec.py | Maltimore/garage | a3f44b37eeddca37d157766a9a72e8772f104bcd | [
"MIT"
]
| 2 | 2020-03-15T14:35:15.000Z | 2021-02-15T16:38:00.000Z | src/garage/envs/env_spec.py | Maltimore/garage | a3f44b37eeddca37d157766a9a72e8772f104bcd | [
"MIT"
]
| null | null | null | src/garage/envs/env_spec.py | Maltimore/garage | a3f44b37eeddca37d157766a9a72e8772f104bcd | [
"MIT"
]
| 1 | 2020-02-24T03:04:23.000Z | 2020-02-24T03:04:23.000Z | """EnvSpec class."""
class EnvSpec:
"""EnvSpec class.
Args:
observation_space (akro.Space): The observation space of the env.
action_space (akro.Space): The action space of the env.
"""
def __init__(self, observation_space, action_space):
self.observation_space = observation_space
self.action_space = action_space
| 23 | 73 | 0.668478 | 344 | 0.934783 | 0 | 0 | 0 | 0 | 0 | 0 | 195 | 0.529891 |
48fc04ddecaf2a0349002da2c688a1f9e69caacb | 105 | py | Python | exercises/exe41 - 50/exe047.py | thomas-rohde/Classes-Python | f862995510b7aabf68bc14aecf815f597034d8a1 | [
"MIT"
]
| null | null | null | exercises/exe41 - 50/exe047.py | thomas-rohde/Classes-Python | f862995510b7aabf68bc14aecf815f597034d8a1 | [
"MIT"
]
| null | null | null | exercises/exe41 - 50/exe047.py | thomas-rohde/Classes-Python | f862995510b7aabf68bc14aecf815f597034d8a1 | [
"MIT"
]
| null | null | null | t = int(input('Digite um nº: '))
for t0 in range(1, 11):
print('{} X {} = {}'.format(t, t0, t * t0))
| 26.25 | 47 | 0.495238 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 31 | 0.292453 |
48fe1f175aa02923066c86fda95e2c0081a49955 | 98,484 | py | Python | pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 8 | 2019-05-09T17:04:00.000Z | 2021-06-09T06:50:51.000Z | pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 4 | 2019-05-31T16:42:59.000Z | 2020-01-31T21:57:17.000Z | pysnmp-with-texts/CISCO-DIAMETER-BASE-PROTOCOL-MIB.py | agustinhenze/mibs.snmplabs.com | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | [
"Apache-2.0"
]
| 10 | 2019-04-30T05:51:36.000Z | 2022-02-16T03:33:41.000Z | #
# PySNMP MIB module CISCO-DIAMETER-BASE-PROTOCOL-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/CISCO-DIAMETER-BASE-PROTOCOL-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:54:20 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsUnion, ValueRangeConstraint, ConstraintsIntersection, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsUnion", "ValueRangeConstraint", "ConstraintsIntersection", "SingleValueConstraint")
ciscoExperiment, = mibBuilder.importSymbols("CISCO-SMI", "ciscoExperiment")
InetAddressType, InetAddress = mibBuilder.importSymbols("INET-ADDRESS-MIB", "InetAddressType", "InetAddress")
SnmpAdminString, = mibBuilder.importSymbols("SNMP-FRAMEWORK-MIB", "SnmpAdminString")
ModuleCompliance, ObjectGroup, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "ObjectGroup", "NotificationGroup")
Gauge32, ObjectIdentity, Unsigned32, NotificationType, iso, MibIdentifier, Counter64, Counter32, Bits, Integer32, ModuleIdentity, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "Gauge32", "ObjectIdentity", "Unsigned32", "NotificationType", "iso", "MibIdentifier", "Counter64", "Counter32", "Bits", "Integer32", "ModuleIdentity", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks")
RowStatus, StorageType, TruthValue, DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "StorageType", "TruthValue", "DisplayString", "TextualConvention")
ciscoDiameterBasePMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 9, 10, 133))
ciscoDiameterBasePMIB.setRevisions(('2006-08-24 00:01',))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setRevisionsDescriptions(('Initial version of this MIB module.',))
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setLastUpdated('200608240001Z')
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setOrganization('Cisco Systems, Inc.')
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setContactInfo('Cisco Systems Customer Service Postal: 170 W Tasman Drive San Jose, CA 95134 USA Tel: +1 800 553-NETS E-mail: [email protected]')
if mibBuilder.loadTexts: ciscoDiameterBasePMIB.setDescription("The MIB module for entities implementing the Diameter Base Protocol. Initial Cisco'ized version of the IETF draft draft-zorn-dime-diameter-base-protocol-mib-00.txt.")
ciscoDiameterBasePMIBNotifs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 0))
ciscoDiameterBasePMIBObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1))
ciscoDiameterBasePMIBConform = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2))
cdbpLocalCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1))
cdbpLocalStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2))
cdbpPeerCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3))
cdbpPeerStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4))
cdbpRealmCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5))
cdbpRealmStats = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6))
cdbpTrapCfgs = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7))
ciscoDiaBaseProtEnableProtocolErrorNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 1), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnableProtocolErrorNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnableProtocolErrorNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtProtocolErrorNotif notification.')
ciscoDiaBaseProtProtocolErrorNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsProtocolErrors"))
if mibBuilder.loadTexts: ciscoDiaBaseProtProtocolErrorNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtProtocolErrorNotif.setDescription('An ciscoDiaBaseProtProtocolErrorNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnableProtocolErrorNotif is true(1) 2) the value of cdbpPeerStatsProtocolErrors changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
ciscoDiaBaseProtEnableTransientFailureNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 2), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnableTransientFailureNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnableTransientFailureNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtTransientFailureNotif notification.')
ciscoDiaBaseProtTransientFailureNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 2)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransientFailures"))
if mibBuilder.loadTexts: ciscoDiaBaseProtTransientFailureNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtTransientFailureNotif.setDescription('An ciscoDiaBaseProtTransientFailureNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnableTransientFailureNotif is true(1) 2) the value of cdbpPeerStatsTransientFailures changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
ciscoDiaBaseProtEnablePermanentFailureNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 3), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePermanentFailureNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePermanentFailureNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPermanentFailureNotif notification.')
ciscoDiaBaseProtPermanentFailureNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 3)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsPermanentFailures"))
if mibBuilder.loadTexts: ciscoDiaBaseProtPermanentFailureNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtPermanentFailureNotif.setDescription('An ciscoDiaBaseProtPermanentFailureNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePermanentFailureNotif is true(1) 2) the value of cdbpPeerStatsPermanentFailures changes. It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
ciscoDiaBaseProtEnablePeerConnectionDownNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 4), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionDownNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionDownNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPeerConnectionDownNotif notification.')
ciscoDiaBaseProtPeerConnectionDownNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 4)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"))
if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionDownNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionDownNotif.setDescription('An ciscoDiaBaseProtPeerConnectionDownNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePeerConnectionDownNotif is true(1) 2) cdbpPeerStatsState changes to closed(1). It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
ciscoDiaBaseProtEnablePeerConnectionUpNotif = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 7, 5), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionUpNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtEnablePeerConnectionUpNotif.setDescription('Setting the value of this object to true(1) enables the ciscoDiaBaseProtPeerConnectionUpNotif notification.')
ciscoDiaBaseProtPeerConnectionUpNotif = NotificationType((1, 3, 6, 1, 4, 1, 9, 10, 133, 0, 5)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"))
if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionUpNotif.setStatus('current')
if mibBuilder.loadTexts: ciscoDiaBaseProtPeerConnectionUpNotif.setDescription('An ciscoDiaBaseProtPeerConnectionUpNotif notification is sent when both the following conditions are true: 1) the value of ciscoDiaBaseProtEnablePeerConnectionUpNotif is true(1) 2) the value of cdbpPeerStatsState changes to either rOpen(6)or iOpen(7). It can be utilized by an NMS to trigger logical/physical entity table maintenance polls.')
cdbpLocalId = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 1), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalId.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalId.setDescription("The implementation identification string for the Diameter software in use on the system, for example; 'diameterd'")
cdbpLocalIpAddrTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2), )
if mibBuilder.loadTexts: cdbpLocalIpAddrTable.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddrTable.setDescription("The table listing the Diameter local host's IP Addresses.")
cdbpLocalIpAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddrIndex"))
if mibBuilder.loadTexts: cdbpLocalIpAddrEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddrEntry.setDescription('A row entry representing a Diameter local host IP Address.')
cdbpLocalIpAddrIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpLocalIpAddrIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddrIndex.setDescription('A number uniquely identifying the number of IP Addresses supported by this Diameter host.')
cdbpLocalIpAddrType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalIpAddrType.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddrType.setDescription('The type of internet address stored in cdbpLocalIpAddress.')
cdbpLocalIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 2, 1, 3), InetAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalIpAddress.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalIpAddress.setDescription('The IP-Address of the host, which is of the type specified in cdbpLocalIpAddrType.')
cdbpLocalTcpListenPort = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalTcpListenPort.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalTcpListenPort.setDescription("This object represents Diameter TCP 'listen' port.")
cdbpLocalSctpListenPort = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalSctpListenPort.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalSctpListenPort.setDescription("This object represents Diameter SCTP 'listen' port.")
cdbpLocalOriginHost = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 5), SnmpAdminString()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdbpLocalOriginHost.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalOriginHost.setDescription('This object represents the Local Origin Host.')
cdbpLocalRealm = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 6), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalRealm.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalRealm.setDescription('This object represents the Local Realm Name.')
cdbpRedundancyEnabled = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 7), TruthValue().clone('false')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdbpRedundancyEnabled.setStatus('current')
if mibBuilder.loadTexts: cdbpRedundancyEnabled.setDescription('This parameter indicates if cisco redundancy has been enabled, it is enabled if set to true and disabled if set to false.')
cdbpRedundancyInfraState = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14))).clone(namedValues=NamedValues(("rfUnknown", 0), ("rfDisabled", 1), ("rfInitialization", 2), ("rfNegotiation", 3), ("rfStandbyCold", 4), ("rfStandbyConfig", 5), ("rfStandbyFileSys", 6), ("rfStandbyBulk", 7), ("rfStandbyHot", 8), ("rfActiveFast", 9), ("rfActiveDrain", 10), ("rfActivePreconfig", 11), ("rfActivePostconfig", 12), ("rfActive", 13), ("rfActiveExtraload", 14)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRedundancyInfraState.setStatus('current')
if mibBuilder.loadTexts: cdbpRedundancyInfraState.setDescription("This parameter indicates the current state of cisco redundancy infrastructure state. rfUnknown(0) - unknown state rfDisabled(1) - RF is not functioning at this time rfInitialization(2) - co-ordinating init with platform rfNegotiation(3) - initial negotiation with peer to determine active-standby rfStandbyCold(4) - peer is active, we're cold rfStandbyConfig(5) - sync config from active to standby rfStandbyFileSys(6) - sync file sys from active to standby rfStandbyBulk(7) - clients bulk sync from active to standby rfStandbyHot(8) - standby ready-n-able to be active rfActiveFast(9) - immediate notification of standby going active rfActiveDrain(10) - drain queued messages from peer rfActivePreconfig(11) - active and before config rfActivePostconfig(12) - active and post config rfActive(13) - actively processing new calls rfActiveExtraload(14) - actively processing new calls extra resources other Processing is failed and I have extra load.")
cdbpRedundancyLastSwitchover = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 9), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRedundancyLastSwitchover.setStatus('current')
if mibBuilder.loadTexts: cdbpRedundancyLastSwitchover.setDescription('This object represents the Last Switchover Time.')
cdbpLocalApplTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10), )
if mibBuilder.loadTexts: cdbpLocalApplTable.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplTable.setDescription('The table listing the Diameter applications supported by this server.')
cdbpLocalApplEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplIndex"))
if mibBuilder.loadTexts: cdbpLocalApplEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplEntry.setDescription('A row entry representing a Diameter application on this server.')
cdbpLocalApplIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpLocalApplIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplIndex.setDescription('A number uniquely identifying a supported Diameter application. Upon reload, cdbpLocalApplIndex values may be changed.')
cdbpLocalApplStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 2), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalApplStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpLocalApplStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplStorageType.setDescription('The storage type for this conceptual row. None of the columnar objects is writable when the conceptual row is permanent.')
cdbpLocalApplRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 10, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalApplRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalApplRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdsgStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpLocalApplIndex has been set. cdbpLocalApplIndex may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpLocalApplStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpLocalApplStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpLocalApplStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpLocalVendorTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11), )
if mibBuilder.loadTexts: cdbpLocalVendorTable.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorTable.setDescription('The table listing the vendor IDs supported by local Diameter.')
cdbpLocalVendorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorIndex"))
if mibBuilder.loadTexts: cdbpLocalVendorEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorEntry.setDescription('A row entry representing a vendor ID supported by local Diameter.')
cdbpLocalVendorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpLocalVendorIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorIndex.setDescription('A number uniquely identifying the vendor ID supported by local Diameter. Upon reload, cdbpLocalVendorIndex values may be changed.')
cdbpLocalVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 9, 10415, 12645))).clone(namedValues=NamedValues(("diameterVendorIetf", 0), ("diameterVendorCisco", 9), ("diameterVendor3gpp", 10415), ("diameterVendorVodafone", 12645))).clone('diameterVendorIetf')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalVendorId.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorId.setDescription('The active vendor ID used for peer connections. diameterVendorIetf(0) - Diameter vendor id ietf diameterVendorCisco(9) - Diameter vendor id cisco diameterVendor3gpp(10415) - Diameter vendor id 3gpp diameterVendorVodafone(12645) - Diameter vendor id vodafone.')
cdbpLocalVendorStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.')
cdbpLocalVendorRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 11, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpLocalVendorRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalVendorRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpLocalVendorRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpLocalVendorId has been set. cdbpLocalVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpLocalVendorRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpLocalVendorRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpLocalVendorRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpAppAdvToPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12), )
if mibBuilder.loadTexts: cdbpAppAdvToPeerTable.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerTable.setDescription('The table listing the applications advertised by this host to each peer and the types of service supported: accounting, authentication or both.')
cdbpAppAdvToPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerVendorId"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerIndex"))
if mibBuilder.loadTexts: cdbpAppAdvToPeerEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.')
cdbpAppAdvToPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpAppAdvToPeerVendorId.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerVendorId.setDescription('The IANA Enterprise Code value assigned to the vendor of the Diameter device.')
cdbpAppAdvToPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpAppAdvToPeerIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerIndex.setDescription('A number uniquely identifying the Diameter applications advertised as supported by this host to each peer. Upon reload, cdbpAppAdvToPeerIndex values may be changed.')
cdbpAppAdvToPeerServices = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpAppAdvToPeerServices.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerServices.setDescription('The type of services supported for each application, accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.')
cdbpAppAdvToPeerStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 4), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.')
cdbpAppAdvToPeerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 1, 12, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpAppAdvToPeerRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvToPeerRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpAppAdvToPeerRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpAppAdvToPeerVendorId has been set. cdbpAppAdvToPeerVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpAppAdvToPeerRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpAppAdvToPeerRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpAppAdvToPeerRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpLocalStatsTotalPacketsIn = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 1), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsIn.setDescription('The total number of packets received by Diameter Base Protocol.')
cdbpLocalStatsTotalPacketsOut = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 2), Counter32()).setUnits('packets').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalStatsTotalPacketsOut.setDescription('The total number of packets transmitted by Diameter Base Protocol.')
cdbpLocalStatsTotalUpTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 3), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalStatsTotalUpTime.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalStatsTotalUpTime.setDescription('This object represents the total time the Diameter server has been up until now.')
cdbpLocalResetTime = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 4), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpLocalResetTime.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalResetTime.setDescription("If the server keeps persistent state (e.g., a process) and supports a 'reset' operation (e.g., can be told to re-read configuration files), this value will be the time elapsed (in hundredths of a second) since the server was 'reset'. For software that does not have persistence or does not support a 'reset' operation, this value will be zero.")
cdbpLocalConfigReset = MibScalar((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("reset", 2), ("initializing", 3), ("running", 4))).clone('other')).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdbpLocalConfigReset.setStatus('current')
if mibBuilder.loadTexts: cdbpLocalConfigReset.setDescription('Status/action object to reinitialize any persistent server state. When set to reset(2), any persistent server state (such as a process) is reinitialized as if the server had just been started. This value will never be returned by a read operation. When read, one of the following values will be returned: other(1) - server in some unknown state. reset(2) - command to reinitialize server state. initializing(3) - server (re)initializing. running(4) - server currently running.')
cdbpPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1), )
if mibBuilder.loadTexts: cdbpPeerTable.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerTable.setDescription('The table listing information regarding the discovered or configured Diameter peer servers.')
cdbpPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"))
if mibBuilder.loadTexts: cdbpPeerEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.')
cdbpPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpPeerIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIndex.setDescription('A number uniquely identifying each Diameter peer with which the host server communicates. Upon reload, cdbpPeerIndex values may be changed.')
cdbpPeerId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 2), SnmpAdminString()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerId.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerId.setDescription('The server identifier for the Diameter peer. It must be unique and non-empty.')
cdbpPeerPortConnect = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerPortConnect.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerPortConnect.setDescription('The connection port this server used to connect to the Diameter peer. If there is no active connection, this value will be zero(0).')
cdbpPeerPortListen = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 4), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535)).clone(3868)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerPortListen.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerPortListen.setDescription('The port the server is listening on.')
cdbpPeerProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("tcp", 1), ("sctp", 2))).clone('tcp')).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerProtocol.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerProtocol.setDescription('The transport protocol (tcp/sctp) the Diameter peer is using. tcp(1) - Transmission Control Protocol sctp(2) - Stream Control Transmission Protocol.')
cdbpPeerSecurity = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("tls", 2), ("ipsec", 3))).clone('other')).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerSecurity.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerSecurity.setDescription('The security the Diameter peer is using. other(1) - Unknown Security Protocol. tls(2) - Transport Layer Security Protocol. ipsec(3) - Internet Protocol Security.')
cdbpPeerFirmwareRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 7), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerFirmwareRevision.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerFirmwareRevision.setDescription('Firmware revision of peer. If no firmware revision, the revision of the Diameter software module may be reported instead.')
cdbpPeerStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 8), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpPeerStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStorageType.setDescription('The storage type for this conceptual row. Only cdbpPeerPortListen object is writable when the conceptual row is permanent.')
cdbpPeerRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 1, 1, 9), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpPeerRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpPeerId has been set. cdbpPeerId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpPeerRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpPeerRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpPeerRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpPeerIpAddrTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2), )
if mibBuilder.loadTexts: cdbpPeerIpAddrTable.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddrTable.setDescription('The table listing the Diameter server IP Addresses.')
cdbpPeerIpAddrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddressIndex"))
if mibBuilder.loadTexts: cdbpPeerIpAddrEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddrEntry.setDescription('A row entry representing peer Diameter server IP Addresses.')
cdbpPeerIpAddressIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpPeerIpAddressIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddressIndex.setDescription('A number uniquely identifying the number of IP Addresses supported by all Diameter peers.')
cdbpPeerIpAddressType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 2), InetAddressType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerIpAddressType.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddressType.setDescription('The type of address stored in diameterPeerIpAddress.')
cdbpPeerIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 2, 1, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: cdbpPeerIpAddress.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerIpAddress.setDescription('The active IP Address(es) used for connections.')
cdbpAppAdvFromPeerTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3), )
if mibBuilder.loadTexts: cdbpAppAdvFromPeerTable.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerTable.setDescription('The table listing the applications advertised by each peer to this host and the types of service supported: accounting, authentication or both.')
cdbpAppAdvFromPeerEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerVendorId"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerIndex"))
if mibBuilder.loadTexts: cdbpAppAdvFromPeerEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerEntry.setDescription('A row entry representing a discovered or configured Diameter peer server.')
cdbpAppAdvFromPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpAppAdvFromPeerVendorId.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerVendorId.setDescription('The IANA Enterprise Code value assigned to the vendor of the Diameter device.')
cdbpAppAdvFromPeerIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpAppAdvFromPeerIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerIndex.setDescription('A number uniquely identifying the applications advertised as supported from each Diameter peer.')
cdbpAppAdvFromPeerType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 3, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpAppAdvFromPeerType.setStatus('current')
if mibBuilder.loadTexts: cdbpAppAdvFromPeerType.setDescription('The type of services supported for each application, accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.')
cdbpPeerVendorTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4), )
if mibBuilder.loadTexts: cdbpPeerVendorTable.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorTable.setDescription('The table listing the Vendor IDs supported by the peer.')
cdbpPeerVendorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorIndex"))
if mibBuilder.loadTexts: cdbpPeerVendorEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorEntry.setDescription('A row entry representing a Vendor ID supported by the peer.')
cdbpPeerVendorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpPeerVendorIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorIndex.setDescription('A number uniquely identifying the Vendor ID supported by the peer. Upon reload, cdbpPeerVendorIndex values may be changed.')
cdbpPeerVendorId = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 9, 10415, 12645))).clone(namedValues=NamedValues(("diameterVendorIetf", 0), ("diameterVendorCisco", 9), ("diameterVendor3gpp", 10415), ("diameterVendorVodafone", 12645))).clone('diameterVendorIetf')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerVendorId.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorId.setDescription('The active vendor ID used for peer connections. diameterVendorIetf(0) - Diameter vendor id ietf diameterVendorCisco(9) - Diameter vendor id cisco diameterVendor3gpp(10415) - Diameter vendor id 3gpp diameterVendorVodafone(12645) - Diameter vendor id vodafone.')
cdbpPeerVendorStorageType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 3), StorageType().clone('nonVolatile')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setReference('Textual Conventions for SMIv2, Section 2.')
if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorStorageType.setDescription('The storage type for this conceptual row. None of the objects are writable when the conceptual row is permanent.')
cdbpPeerVendorRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 3, 4, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: cdbpPeerVendorRowStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerVendorRowStatus.setDescription("The status of this conceptual row. To create a row in this table, a manager must set this object to either createAndGo(4) or createAndWait(5). Until instances of all corresponding columns are appropriately configured, the value of the corresponding instance of the cdbpPeerVendorRowStatus column is 'notReady'. In particular, a newly created row cannot be made active until the corresponding cdbpPeerVendorId has been set. Also, a newly created row cannot be made active until the corresponding 'cdbpPeerIndex' has been set. cdbpPeerVendorId may not be modified while the value of this object is active(1): An attempt to set these objects while the value of cdbpPeerVendorRowStatus is active(1) will result in an inconsistentValue error. Entries in this table with cdbpPeerVendorRowStatus equal to active(1) remain in the table until destroyed. Entries in this table with cdbpPeerVendorRowStatus equal to values other than active(1) will be destroyed after timeout (5 minutes).")
cdbpPeerStatsTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1), )
if mibBuilder.loadTexts: cdbpPeerStatsTable.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTable.setDescription('The table listing the Diameter peer statistics.')
cdbpPeerStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIndex"))
if mibBuilder.loadTexts: cdbpPeerStatsEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsEntry.setDescription('A row entry representing a Diameter peer.')
cdbpPeerStatsState = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8))).clone(namedValues=NamedValues(("closed", 1), ("waitConnAck", 2), ("waitICEA", 3), ("elect", 4), ("waitReturns", 5), ("rOpen", 6), ("iOpen", 7), ("closing", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsState.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsState.setDescription('Connection state in the Peer State Machine of the peer with which this Diameter server is communicating. closed(1) - Connection closed with this peer. waitConnAck(2) - Waiting for an acknowledgment from this peer. waitICEA(3) - Waiting for a Capabilities-Exchange- Answer from this peer. elect(4) - When the peer and the server are both trying to bring up a connection with each other at the same time. An election process begins which determines which socket remains open. waitReturns(5) - Waiting for election returns. r-open(6) - Responder transport connection is used for communication. i-open(7) - Initiator transport connection is used for communication. closing(8) - Actively closing and doing cleanup.')
cdbpPeerStatsStateDuration = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 2), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsStateDuration.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsStateDuration.setDescription('This object represents the Peer state duration.')
cdbpPeerStatsLastDiscCause = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("rebooting", 1), ("busy", 2), ("doNotWantToTalk", 3), ("election", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsLastDiscCause.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsLastDiscCause.setDescription("The last cause for a peers disconnection. rebooting(1) - A scheduled reboot is imminent. busy(2) - The peer's internal resources are constrained, and it has determined that the transport connection needs to be shutdown. doNotWantToTalk(3) - The peer has determined that it does not see a need for the transport connection to exist, since it does not expect any messages to be exchanged in the foreseeable future. electionLost(4) - The peer has determined that it has lost the election process and has therefore disconnected the transport connection.")
cdbpPeerStatsWhoInitDisconnect = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("host", 1), ("peer", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsWhoInitDisconnect.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsWhoInitDisconnect.setDescription('Did the host or peer initiate the disconnect? host(1) - If this server initiated the disconnect. peer(2) - If the peer with which this server was connected initiated the disconnect.')
cdbpPeerStatsDWCurrentStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("okay", 1), ("suspect", 2), ("down", 3), ("reopen", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWCurrentStatus.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWCurrentStatus.setDescription('This object indicates the connection status. okay(1) - Indicates the connection is presumed working. suspect(2) - Indicates the connection is possibly congested or down. down(3) - The peer is no longer reachable, causing the transport connection to be shutdown. reopen(4) - Three watchdog messages are exchanged with accepted round trip times, and the connection to the peer is considered stabilized.')
cdbpPeerStatsTimeoutConnAtmpts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 6), Counter32()).setUnits('attempts').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsTimeoutConnAtmpts.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTimeoutConnAtmpts.setDescription('If there is no transport connection with a peer, this is the number of times the server attempts to connect to that peer. This is reset on disconnection.')
cdbpPeerStatsASRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 7), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsASRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsASRsIn.setDescription('Abort-Session-Request messages received from the peer.')
cdbpPeerStatsASRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 8), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsASRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsASRsOut.setDescription('Abort-Session-Request messages sent to the peer.')
cdbpPeerStatsASAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 9), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsASAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsASAsIn.setDescription('Number of Abort-Session-Answer messages received from the peer.')
cdbpPeerStatsASAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 10), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsASAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsASAsOut.setDescription('Number of Abort-Session-Answer messages sent to the peer.')
cdbpPeerStatsACRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 11), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsACRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsACRsIn.setDescription('Number of Accounting-Request messages received from the peer.')
cdbpPeerStatsACRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 12), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsACRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsACRsOut.setDescription('Number of Accounting-Request messages sent to the peer.')
cdbpPeerStatsACAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 13), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsACAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsACAsIn.setDescription('Number of Accounting-Answer messages received from the peer.')
cdbpPeerStatsACAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 14), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsACAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsACAsOut.setDescription('Number of Accounting-Answer messages sent to the peer.')
cdbpPeerStatsCERsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 15), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsCERsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsCERsIn.setDescription('Number of Capabilities-Exchange-Request messages received from the peer.')
cdbpPeerStatsCERsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 16), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsCERsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsCERsOut.setDescription('Number of Capabilities-Exchange-Request messages sent to the peer.')
cdbpPeerStatsCEAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 17), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsCEAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsCEAsIn.setDescription('Number of Capabilities-Exchange-Answer messages received from the peer.')
cdbpPeerStatsCEAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 18), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsCEAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsCEAsOut.setDescription('Number of Capabilities-Exchange-Answer messages sent to the peer.')
cdbpPeerStatsDWRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 19), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWRsIn.setDescription('Number of Device-Watchdog-Request messages received from the peer.')
cdbpPeerStatsDWRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 20), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWRsOut.setDescription('Number of Device-Watchdog-Request messages sent to the peer.')
cdbpPeerStatsDWAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 21), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWAsIn.setDescription('Number of Device-Watchdog-Answer messages received from the peer.')
cdbpPeerStatsDWAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 22), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWAsOut.setDescription('Number of Device-Watchdog-Answer messages sent to the peer.')
cdbpPeerStatsDPRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 23), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDPRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDPRsIn.setDescription('Number of Disconnect-Peer-Request messages received.')
cdbpPeerStatsDPRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 24), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDPRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDPRsOut.setDescription('Number of Disconnect-Peer-Request messages sent.')
cdbpPeerStatsDPAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 25), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDPAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDPAsIn.setDescription('Number of Disconnect-Peer-Answer messages received.')
cdbpPeerStatsDPAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 26), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDPAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDPAsOut.setDescription('Number of Disconnect-Peer-Answer messages sent.')
cdbpPeerStatsRARsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 27), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRARsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRARsIn.setDescription('Number of Re-Auth-Request messages received.')
cdbpPeerStatsRARsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 28), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRARsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRARsOut.setDescription('Number of Re-Auth-Request messages sent.')
cdbpPeerStatsRAAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 29), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRAAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRAAsIn.setDescription('Number of Re-Auth-Answer messages received.')
cdbpPeerStatsRAAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 30), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRAAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRAAsOut.setDescription('Number of Re-Auth-Answer messages sent.')
cdbpPeerStatsSTRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 31), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsSTRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsSTRsIn.setDescription('Number of Session-Termination-Request messages received from the peer.')
cdbpPeerStatsSTRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 32), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsSTRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsSTRsOut.setDescription('Number of Session-Termination-Request messages sent to the peer.')
cdbpPeerStatsSTAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 33), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsSTAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsSTAsIn.setDescription('Number of Session-Termination-Answer messages received from the peer.')
cdbpPeerStatsSTAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 34), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsSTAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsSTAsOut.setDescription('Number of Session-Termination-Answer messages sent to the peer.')
cdbpPeerStatsDWReqTimer = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 35), TimeTicks()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsDWReqTimer.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsDWReqTimer.setDescription('Device-Watchdog Request Timer, which is the interval between packets sent to peers.')
cdbpPeerStatsRedirectEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 36), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsRedirectEvents.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsRedirectEvents.setDescription('Redirect Event count, which is the number of redirects sent from a peer.')
cdbpPeerStatsAccDupRequests = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 37), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccDupRequests.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccDupRequests.setDescription('The number of duplicate Diameter Accounting-Request packets received.')
cdbpPeerStatsMalformedReqsts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 38), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsMalformedReqsts.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsMalformedReqsts.setDescription('The number of malformed Diameter packets received.')
cdbpPeerStatsAccsNotRecorded = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 39), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccsNotRecorded.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccsNotRecorded.setDescription('The number of Diameter Accounting-Request packets which were received and responded to but not recorded.')
cdbpPeerStatsAccRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 40), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccRetrans.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccRetrans.setDescription('The number of Diameter Accounting-Request packets retransmitted to this Diameter server.')
cdbpPeerStatsTotalRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 41), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsTotalRetrans.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTotalRetrans.setDescription('The number of Diameter packets retransmitted to this Diameter server, not to include Diameter Accounting-Request packets retransmitted.')
cdbpPeerStatsAccPendReqstsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 42), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccPendReqstsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccPendReqstsOut.setDescription('The number of Diameter Accounting-Request packets sent to this peer that have not yet timed out or received a response. This variable is incremented when an Accounting-Request is sent to this server and decremented due to receipt of an Accounting-Response, a timeout or a retransmission.')
cdbpPeerStatsAccReqstsDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 43), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsAccReqstsDropped.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsAccReqstsDropped.setDescription('The number of Accounting-Requests to this server that have been dropped.')
cdbpPeerStatsHByHDropMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 44), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsHByHDropMessages.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsHByHDropMessages.setDescription('An answer message that is received with an unknown hop-by-hop identifier. Does not include accounting requests dropped.')
cdbpPeerStatsEToEDupMessages = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 45), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsEToEDupMessages.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsEToEDupMessages.setDescription('Duplicate answer messages that are to be locally consumed. Does not include duplicate accounting requests received.')
cdbpPeerStatsUnknownTypes = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 46), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsUnknownTypes.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsUnknownTypes.setDescription('The number of Diameter packets of unknown type which were received.')
cdbpPeerStatsProtocolErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 47), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsProtocolErrors.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsProtocolErrors.setDescription('This object represents the Number of protocol errors returned to peer, but not including redirects.')
cdbpPeerStatsTransientFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 48), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsTransientFailures.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTransientFailures.setDescription('This object represents the transient failure count.')
cdbpPeerStatsPermanentFailures = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 49), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsPermanentFailures.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsPermanentFailures.setDescription('This object represents the Number of permanent failures returned to peer.')
cdbpPeerStatsTransportDown = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 4, 1, 1, 50), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpPeerStatsTransportDown.setStatus('current')
if mibBuilder.loadTexts: cdbpPeerStatsTransportDown.setDescription('This object represents the Number of unexpected transport failures.')
cdbpRealmKnownPeersTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1), )
if mibBuilder.loadTexts: cdbpRealmKnownPeersTable.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeersTable.setDescription('The table listing the Diameter realms and known peers.')
cdbpRealmKnownPeersEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteIndex"), (0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeersIndex"))
if mibBuilder.loadTexts: cdbpRealmKnownPeersEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeersEntry.setDescription('A row entry representing a Diameter realm and known peers.')
cdbpRealmKnownPeersIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpRealmKnownPeersIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeersIndex.setDescription('A number uniquely identifying a peer known to this realm. Upon reload, cdbpRealmKnownPeersIndex values may be changed.')
cdbpRealmKnownPeers = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmKnownPeers.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeers.setDescription('The index of the peer this realm knows about. This is an ordered list, where the ordering signifies the order in which the peers are tried. Same as the cdbpPeerIndex')
cdbpRealmKnownPeersChosen = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("roundRobin", 1), ("loadBalance", 2), ("firstPreferred", 3), ("mostRecentFirst", 4), ("other", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmKnownPeersChosen.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmKnownPeersChosen.setDescription('How the realm chooses which peer to send packets to. roundRobin(1) - The peer used for each transaction is selected based on the order in which peers are configured. loadBalance(2) - The peer used for each transaction is based on the load metric (maybe implementation dependent) of all peers defined for the realm, with the least loaded server selected first. firstPreferred(3) - The first defined server is always used for transactions unless failover occurs. mostRecentFirst(4) - The most recently used server is used first for each transaction.')
cdbpRealmMessageRouteTable = MibTable((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1), )
if mibBuilder.loadTexts: cdbpRealmMessageRouteTable.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteTable.setDescription('The table listing the Diameter realm-based message route information.')
cdbpRealmMessageRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1), ).setIndexNames((0, "CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteIndex"))
if mibBuilder.loadTexts: cdbpRealmMessageRouteEntry.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteEntry.setDescription('A row entry representing a Diameter realm based message route server.')
cdbpRealmMessageRouteIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 1), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295)))
if mibBuilder.loadTexts: cdbpRealmMessageRouteIndex.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteIndex.setDescription('A number uniquely identifying each realm.')
cdbpRealmMessageRouteRealm = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 2), SnmpAdminString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRealm.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRealm.setDescription('This object represents the realm name')
cdbpRealmMessageRouteApp = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 3), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(1, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteApp.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteApp.setDescription('Application id used to route packets to this realm.')
cdbpRealmMessageRouteType = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("acct", 1), ("auth", 2), ("both", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteType.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteType.setDescription('The types of service supported for each realm application: accounting, authentication or both. acct(1) - accounting auth(2) - authentication both(3) - both accounting and authentication.')
cdbpRealmMessageRouteAction = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("local", 1), ("relay", 2), ("proxy", 3), ("redirect", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteAction.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteAction.setDescription('The action is used to identify how a message should be treated based on the realm, application and type. local(1) - Diameter messages that resolve to a route entry with the Local Action set to Local can be satisfied locally, and do not need to be routed to another server. relay(2) - All Diameter messages that fall within this category MUST be routed to a next-hop server, without modifying any non-routing AVPs. proxy(3) - All Diameter messages that fall within this category MUST be routed to a next-hop server. redirect(4) - Diameter messages that fall within this category MUST have the identity of the home Diameter server(s) appended, and returned to the sender of the message.')
cdbpRealmMessageRouteACRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 6), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsIn.setDescription('Number of Accounting-Request messages received from the realm.')
cdbpRealmMessageRouteACRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 7), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteACRsOut.setDescription('Number of Accounting-Request messages sent to the realm.')
cdbpRealmMessageRouteACAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 8), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsIn.setDescription('Number of Accounting-Answer messages received from the realm.')
cdbpRealmMessageRouteACAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 9), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteACAsOut.setDescription('Number of Accounting-Answer messages sent to the realm.')
cdbpRealmMessageRouteRARsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 10), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsIn.setDescription('Number of Re-Auth-Request messages received from the realm.')
cdbpRealmMessageRouteRARsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 11), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRARsOut.setDescription('Number of Re-Auth-Request messages sent to the realm.')
cdbpRealmMessageRouteRAAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 12), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsIn.setDescription('Number of Re-Auth-Answer messages received from the realm.')
cdbpRealmMessageRouteRAAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 13), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteRAAsOut.setDescription('Number of Re-Auth-Answer messages sent to the realm.')
cdbpRealmMessageRouteSTRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 14), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsIn.setDescription('Number of Session-Termination-Request messages received from the realm.')
cdbpRealmMessageRouteSTRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 15), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTRsOut.setDescription('Number of Session-Termination-Request messages sent to the realm.')
cdbpRealmMessageRouteSTAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 16), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsIn.setDescription('Number of Session-Termination-Answer messages received from the realm.')
cdbpRealmMessageRouteSTAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 17), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteSTAsOut.setDescription('Number of Session-Termination-Answer messages sent to the realm.')
cdbpRealmMessageRouteASRsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 18), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsIn.setDescription('Number of Abort-Session-Request messages received from the realm.')
cdbpRealmMessageRouteASRsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 19), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteASRsOut.setDescription('Number of Abort-Session-Request messages sent to the realm.')
cdbpRealmMessageRouteASAsIn = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 20), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsIn.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsIn.setDescription('Number of Abort-Session-Answer messages received from the realm.')
cdbpRealmMessageRouteASAsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 21), Counter32()).setUnits('messages').setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteASAsOut.setDescription('Number of Abort-Session-Answer messages sent to the realm.')
cdbpRealmMessageRouteAccRetrans = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteAccRetrans.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteAccRetrans.setDescription('The number of Diameter accounting packets retransmitted to this realm.')
cdbpRealmMessageRouteAccDupReqsts = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteAccDupReqsts.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteAccDupReqsts.setDescription('The number of duplicate Diameter accounting packets sent to this realm.')
cdbpRealmMessageRoutePendReqstsOut = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 24), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRoutePendReqstsOut.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRoutePendReqstsOut.setDescription('The number of Diameter Accounting-Request packets sent to this peer that have not yet timed out or received a response. This variable is incremented when an Accounting-Request is sent to this server and decremented due to receipt of an Accounting-Response, a timeout or a retransmission.')
cdbpRealmMessageRouteReqstsDrop = MibTableColumn((1, 3, 6, 1, 4, 1, 9, 10, 133, 1, 6, 1, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cdbpRealmMessageRouteReqstsDrop.setStatus('current')
if mibBuilder.loadTexts: cdbpRealmMessageRouteReqstsDrop.setDescription('The number of requests dropped by this realm.')
ciscoDiameterBasePMIBCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 1))
ciscoDiameterBasePMIBGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2))
ciscoDiameterBasePMIBCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 1, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerStatsGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBNotificationsGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBTrapCfgGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBLocalStatsSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBPeerStatsSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBRealmCfgSkippedGroup"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiameterBasePMIBRealmStatsSkippedGroup"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBCompliance = ciscoDiameterBasePMIBCompliance.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBCompliance.setDescription('The compliance statement for Diameter Base Protocol entities.')
ciscoDiameterBasePMIBLocalCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 1)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalRealm"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyEnabled"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyInfraState"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRedundancyLastSwitchover"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalOriginHost"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalVendorRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBLocalCfgGroup = ciscoDiameterBasePMIBLocalCfgGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalCfgGroup.setDescription('A collection of objects providing configuration common to the server.')
ciscoDiameterBasePMIBPeerCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 2)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerPortConnect"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerPortListen"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerProtocol"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerSecurity"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerFirmwareRevision"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerRowStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddressType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerIpAddress"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerVendorRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBPeerCfgGroup = ciscoDiameterBasePMIBPeerCfgGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerCfgGroup.setDescription('A collection of objects providing configuration of the Diameter peers.')
ciscoDiameterBasePMIBPeerStatsGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 3)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsState"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsStateDuration"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsLastDiscCause"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsWhoInitDisconnect"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWCurrentStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTimeoutConnAtmpts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsASAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsACAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCERsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCERsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCEAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsCEAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDPAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRARsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRARsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRAAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRAAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsSTAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWReqTimer"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRedirectEvents"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccDupRequests"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsMalformedReqsts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccsNotRecorded"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTotalRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccPendReqstsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccReqstsDropped"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsHByHDropMessages"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsEToEDupMessages"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsUnknownTypes"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsProtocolErrors"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransientFailures"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsPermanentFailures"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsTransportDown"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBPeerStatsGroup = ciscoDiameterBasePMIBPeerStatsGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerStatsGroup.setDescription('A collection of objects providing statistics of the Diameter peers.')
ciscoDiameterBasePMIBNotificationsGroup = NotificationGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 4)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtProtocolErrorNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtTransientFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPermanentFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPeerConnectionDownNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtPeerConnectionUpNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBNotificationsGroup = ciscoDiameterBasePMIBNotificationsGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBNotificationsGroup.setDescription('The set of notifications which an agent is required to implement.')
ciscoDiameterBasePMIBTrapCfgGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 5)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnableProtocolErrorNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnableTransientFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePermanentFailureNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePeerConnectionDownNotif"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "ciscoDiaBaseProtEnablePeerConnectionUpNotif"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBTrapCfgGroup = ciscoDiameterBasePMIBTrapCfgGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBTrapCfgGroup.setDescription('A collection of objects providing configuration for base protocol notifications.')
ciscoDiameterBasePMIBLocalCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 6)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalId"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddrType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalIpAddress"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalTcpListenPort"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalSctpListenPort"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalUpTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalResetTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalConfigReset"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalApplRowStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerServices"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerStorageType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvToPeerRowStatus"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBLocalCfgSkippedGroup = ciscoDiameterBasePMIBLocalCfgSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalCfgSkippedGroup.setDescription('A collection of objects providing configuration common to the server.')
ciscoDiameterBasePMIBLocalStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 7)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalPacketsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalStatsTotalUpTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalResetTime"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpLocalConfigReset"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBLocalStatsSkippedGroup = ciscoDiameterBasePMIBLocalStatsSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBLocalStatsSkippedGroup.setDescription('A collection of objects providing statistics common to the server.')
ciscoDiameterBasePMIBPeerCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 8)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpAppAdvFromPeerType"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBPeerCfgSkippedGroup = ciscoDiameterBasePMIBPeerCfgSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerCfgSkippedGroup.setDescription('A collection of objects providing configuration for Diameter peers.')
ciscoDiameterBasePMIBPeerStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 9)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWCurrentStatus"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsDWReqTimer"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsRedirectEvents"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsAccDupRequests"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpPeerStatsEToEDupMessages"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBPeerStatsSkippedGroup = ciscoDiameterBasePMIBPeerStatsSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBPeerStatsSkippedGroup.setDescription('A collection of objects providing statistics of Diameter peers.')
ciscoDiameterBasePMIBRealmCfgSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 10)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeers"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmKnownPeersChosen"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBRealmCfgSkippedGroup = ciscoDiameterBasePMIBRealmCfgSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBRealmCfgSkippedGroup.setDescription('A collection of objects providing configuration for realm message routing.')
ciscoDiameterBasePMIBRealmStatsSkippedGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 9, 10, 133, 2, 2, 11)).setObjects(("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRealm"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteApp"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteType"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAction"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteACAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRARsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRARsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRAAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteRAAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteSTAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASRsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASRsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASAsIn"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteASAsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAccRetrans"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteAccDupReqsts"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRoutePendReqstsOut"), ("CISCO-DIAMETER-BASE-PROTOCOL-MIB", "cdbpRealmMessageRouteReqstsDrop"))
if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0):
ciscoDiameterBasePMIBRealmStatsSkippedGroup = ciscoDiameterBasePMIBRealmStatsSkippedGroup.setStatus('current')
if mibBuilder.loadTexts: ciscoDiameterBasePMIBRealmStatsSkippedGroup.setDescription('A collection of objects providing statistics of realm message routing.')
mibBuilder.exportSymbols("CISCO-DIAMETER-BASE-PROTOCOL-MIB", cdbpRealmMessageRouteACRsIn=cdbpRealmMessageRouteACRsIn, cdbpRealmStats=cdbpRealmStats, ciscoDiameterBasePMIBCompliance=ciscoDiameterBasePMIBCompliance, cdbpPeerStatsSTAsOut=cdbpPeerStatsSTAsOut, cdbpPeerProtocol=cdbpPeerProtocol, cdbpPeerTable=cdbpPeerTable, ciscoDiaBaseProtPeerConnectionDownNotif=ciscoDiaBaseProtPeerConnectionDownNotif, cdbpLocalVendorIndex=cdbpLocalVendorIndex, cdbpPeerStatsDWReqTimer=cdbpPeerStatsDWReqTimer, cdbpPeerStatsACAsIn=cdbpPeerStatsACAsIn, cdbpPeerStatsDWRsOut=cdbpPeerStatsDWRsOut, ciscoDiaBaseProtEnablePeerConnectionDownNotif=ciscoDiaBaseProtEnablePeerConnectionDownNotif, cdbpPeerStatsDPAsIn=cdbpPeerStatsDPAsIn, cdbpPeerId=cdbpPeerId, cdbpAppAdvFromPeerTable=cdbpAppAdvFromPeerTable, cdbpRealmMessageRouteSTRsIn=cdbpRealmMessageRouteSTRsIn, cdbpRealmMessageRouteApp=cdbpRealmMessageRouteApp, cdbpLocalVendorEntry=cdbpLocalVendorEntry, cdbpRealmMessageRouteAccDupReqsts=cdbpRealmMessageRouteAccDupReqsts, cdbpAppAdvToPeerVendorId=cdbpAppAdvToPeerVendorId, cdbpLocalIpAddrType=cdbpLocalIpAddrType, cdbpPeerSecurity=cdbpPeerSecurity, ciscoDiaBaseProtTransientFailureNotif=ciscoDiaBaseProtTransientFailureNotif, cdbpPeerStatsAccPendReqstsOut=cdbpPeerStatsAccPendReqstsOut, ciscoDiameterBasePMIBLocalCfgGroup=ciscoDiameterBasePMIBLocalCfgGroup, cdbpRealmMessageRouteRealm=cdbpRealmMessageRouteRealm, cdbpPeerEntry=cdbpPeerEntry, cdbpRedundancyLastSwitchover=cdbpRedundancyLastSwitchover, cdbpRealmMessageRouteAction=cdbpRealmMessageRouteAction, cdbpPeerIpAddrTable=cdbpPeerIpAddrTable, cdbpPeerStatsSTAsIn=cdbpPeerStatsSTAsIn, cdbpRealmCfgs=cdbpRealmCfgs, cdbpPeerStatsTransientFailures=cdbpPeerStatsTransientFailures, cdbpRealmKnownPeersIndex=cdbpRealmKnownPeersIndex, cdbpLocalVendorTable=cdbpLocalVendorTable, cdbpPeerStorageType=cdbpPeerStorageType, cdbpAppAdvFromPeerVendorId=cdbpAppAdvFromPeerVendorId, cdbpPeerStatsRAAsOut=cdbpPeerStatsRAAsOut, cdbpLocalId=cdbpLocalId, ciscoDiameterBasePMIBNotifs=ciscoDiameterBasePMIBNotifs, ciscoDiameterBasePMIBGroups=ciscoDiameterBasePMIBGroups, cdbpPeerStats=cdbpPeerStats, cdbpRealmMessageRouteASRsOut=cdbpRealmMessageRouteASRsOut, cdbpRealmMessageRouteAccRetrans=cdbpRealmMessageRouteAccRetrans, cdbpAppAdvToPeerServices=cdbpAppAdvToPeerServices, cdbpPeerStatsACRsOut=cdbpPeerStatsACRsOut, cdbpRedundancyEnabled=cdbpRedundancyEnabled, cdbpPeerVendorRowStatus=cdbpPeerVendorRowStatus, cdbpPeerStatsUnknownTypes=cdbpPeerStatsUnknownTypes, ciscoDiameterBasePMIBCompliances=ciscoDiameterBasePMIBCompliances, cdbpPeerStatsEToEDupMessages=cdbpPeerStatsEToEDupMessages, cdbpPeerVendorEntry=cdbpPeerVendorEntry, ciscoDiaBaseProtEnableProtocolErrorNotif=ciscoDiaBaseProtEnableProtocolErrorNotif, cdbpPeerStatsTable=cdbpPeerStatsTable, cdbpPeerIpAddrEntry=cdbpPeerIpAddrEntry, ciscoDiameterBasePMIBConform=ciscoDiameterBasePMIBConform, cdbpPeerStatsSTRsOut=cdbpPeerStatsSTRsOut, cdbpRealmMessageRouteIndex=cdbpRealmMessageRouteIndex, cdbpAppAdvToPeerIndex=cdbpAppAdvToPeerIndex, ciscoDiameterBasePMIBPeerStatsGroup=ciscoDiameterBasePMIBPeerStatsGroup, ciscoDiaBaseProtEnablePeerConnectionUpNotif=ciscoDiaBaseProtEnablePeerConnectionUpNotif, cdbpLocalApplRowStatus=cdbpLocalApplRowStatus, ciscoDiaBaseProtEnablePermanentFailureNotif=ciscoDiaBaseProtEnablePermanentFailureNotif, ciscoDiameterBasePMIBPeerStatsSkippedGroup=ciscoDiameterBasePMIBPeerStatsSkippedGroup, PYSNMP_MODULE_ID=ciscoDiameterBasePMIB, ciscoDiameterBasePMIBObjects=ciscoDiameterBasePMIBObjects, cdbpLocalRealm=cdbpLocalRealm, cdbpLocalVendorId=cdbpLocalVendorId, cdbpLocalResetTime=cdbpLocalResetTime, ciscoDiameterBasePMIBRealmCfgSkippedGroup=ciscoDiameterBasePMIBRealmCfgSkippedGroup, cdbpPeerStatsDPRsIn=cdbpPeerStatsDPRsIn, cdbpPeerStatsEntry=cdbpPeerStatsEntry, cdbpPeerStatsAccDupRequests=cdbpPeerStatsAccDupRequests, cdbpRealmMessageRoutePendReqstsOut=cdbpRealmMessageRoutePendReqstsOut, cdbpTrapCfgs=cdbpTrapCfgs, ciscoDiameterBasePMIBTrapCfgGroup=ciscoDiameterBasePMIBTrapCfgGroup, cdbpAppAdvFromPeerType=cdbpAppAdvFromPeerType, cdbpPeerIndex=cdbpPeerIndex, cdbpPeerVendorId=cdbpPeerVendorId, cdbpAppAdvToPeerRowStatus=cdbpAppAdvToPeerRowStatus, cdbpLocalStatsTotalPacketsOut=cdbpLocalStatsTotalPacketsOut, cdbpPeerStatsHByHDropMessages=cdbpPeerStatsHByHDropMessages, cdbpRealmMessageRouteASAsIn=cdbpRealmMessageRouteASAsIn, cdbpLocalStats=cdbpLocalStats, cdbpPeerStatsRedirectEvents=cdbpPeerStatsRedirectEvents, cdbpPeerStatsASRsOut=cdbpPeerStatsASRsOut, cdbpPeerStatsTotalRetrans=cdbpPeerStatsTotalRetrans, cdbpRealmMessageRouteEntry=cdbpRealmMessageRouteEntry, cdbpPeerStatsState=cdbpPeerStatsState, cdbpPeerStatsSTRsIn=cdbpPeerStatsSTRsIn, cdbpPeerFirmwareRevision=cdbpPeerFirmwareRevision, cdbpLocalTcpListenPort=cdbpLocalTcpListenPort, cdbpPeerStatsCERsOut=cdbpPeerStatsCERsOut, cdbpLocalApplStorageType=cdbpLocalApplStorageType, cdbpPeerStatsAccRetrans=cdbpPeerStatsAccRetrans, cdbpPeerStatsPermanentFailures=cdbpPeerStatsPermanentFailures, cdbpLocalIpAddrIndex=cdbpLocalIpAddrIndex, cdbpRealmKnownPeersEntry=cdbpRealmKnownPeersEntry, cdbpPeerStatsDWAsIn=cdbpPeerStatsDWAsIn, cdbpLocalStatsTotalUpTime=cdbpLocalStatsTotalUpTime, cdbpPeerStatsDPAsOut=cdbpPeerStatsDPAsOut, ciscoDiaBaseProtPermanentFailureNotif=ciscoDiaBaseProtPermanentFailureNotif, ciscoDiameterBasePMIBLocalStatsSkippedGroup=ciscoDiameterBasePMIBLocalStatsSkippedGroup, cdbpPeerStatsRAAsIn=cdbpPeerStatsRAAsIn, cdbpPeerStatsStateDuration=cdbpPeerStatsStateDuration, cdbpPeerStatsProtocolErrors=cdbpPeerStatsProtocolErrors, ciscoDiameterBasePMIBNotificationsGroup=ciscoDiameterBasePMIBNotificationsGroup, cdbpRealmMessageRouteACRsOut=cdbpRealmMessageRouteACRsOut, cdbpLocalApplEntry=cdbpLocalApplEntry, cdbpPeerStatsDWAsOut=cdbpPeerStatsDWAsOut, cdbpPeerStatsAccReqstsDropped=cdbpPeerStatsAccReqstsDropped, cdbpRealmKnownPeersTable=cdbpRealmKnownPeersTable, cdbpPeerStatsAccsNotRecorded=cdbpPeerStatsAccsNotRecorded, cdbpLocalVendorRowStatus=cdbpLocalVendorRowStatus, cdbpLocalIpAddress=cdbpLocalIpAddress, cdbpLocalIpAddrEntry=cdbpLocalIpAddrEntry, cdbpRealmMessageRouteRARsIn=cdbpRealmMessageRouteRARsIn, cdbpRealmMessageRouteACAsIn=cdbpRealmMessageRouteACAsIn, cdbpLocalOriginHost=cdbpLocalOriginHost, cdbpRealmMessageRouteRAAsIn=cdbpRealmMessageRouteRAAsIn, cdbpRealmMessageRouteRAAsOut=cdbpRealmMessageRouteRAAsOut, ciscoDiameterBasePMIBPeerCfgSkippedGroup=ciscoDiameterBasePMIBPeerCfgSkippedGroup, cdbpPeerPortConnect=cdbpPeerPortConnect, cdbpPeerStatsWhoInitDisconnect=cdbpPeerStatsWhoInitDisconnect, cdbpPeerStatsCEAsOut=cdbpPeerStatsCEAsOut, cdbpAppAdvFromPeerIndex=cdbpAppAdvFromPeerIndex, cdbpRealmMessageRouteASRsIn=cdbpRealmMessageRouteASRsIn, cdbpPeerStatsLastDiscCause=cdbpPeerStatsLastDiscCause, cdbpPeerStatsASAsIn=cdbpPeerStatsASAsIn, cdbpPeerIpAddressType=cdbpPeerIpAddressType, cdbpPeerStatsRARsOut=cdbpPeerStatsRARsOut, cdbpPeerStatsDWCurrentStatus=cdbpPeerStatsDWCurrentStatus, cdbpRealmMessageRouteSTRsOut=cdbpRealmMessageRouteSTRsOut, cdbpLocalCfgs=cdbpLocalCfgs, cdbpRealmMessageRouteReqstsDrop=cdbpRealmMessageRouteReqstsDrop, cdbpLocalStatsTotalPacketsIn=cdbpLocalStatsTotalPacketsIn, cdbpPeerCfgs=cdbpPeerCfgs, cdbpRealmKnownPeers=cdbpRealmKnownPeers, cdbpPeerStatsMalformedReqsts=cdbpPeerStatsMalformedReqsts, cdbpRealmMessageRouteRARsOut=cdbpRealmMessageRouteRARsOut, cdbpRealmMessageRouteSTAsOut=cdbpRealmMessageRouteSTAsOut, cdbpLocalIpAddrTable=cdbpLocalIpAddrTable, cdbpPeerStatsACRsIn=cdbpPeerStatsACRsIn, ciscoDiameterBasePMIBRealmStatsSkippedGroup=ciscoDiameterBasePMIBRealmStatsSkippedGroup, cdbpRealmKnownPeersChosen=cdbpRealmKnownPeersChosen, cdbpLocalApplTable=cdbpLocalApplTable, cdbpRealmMessageRouteType=cdbpRealmMessageRouteType, cdbpPeerStatsASRsIn=cdbpPeerStatsASRsIn, cdbpPeerStatsTransportDown=cdbpPeerStatsTransportDown, cdbpRedundancyInfraState=cdbpRedundancyInfraState, ciscoDiameterBasePMIBPeerCfgGroup=ciscoDiameterBasePMIBPeerCfgGroup, cdbpRealmMessageRouteACAsOut=cdbpRealmMessageRouteACAsOut, cdbpAppAdvFromPeerEntry=cdbpAppAdvFromPeerEntry, ciscoDiaBaseProtEnableTransientFailureNotif=ciscoDiaBaseProtEnableTransientFailureNotif, cdbpLocalConfigReset=cdbpLocalConfigReset, cdbpPeerIpAddress=cdbpPeerIpAddress, cdbpAppAdvToPeerTable=cdbpAppAdvToPeerTable, cdbpPeerStatsTimeoutConnAtmpts=cdbpPeerStatsTimeoutConnAtmpts, cdbpPeerStatsDWRsIn=cdbpPeerStatsDWRsIn, cdbpRealmMessageRouteTable=cdbpRealmMessageRouteTable, cdbpPeerStatsRARsIn=cdbpPeerStatsRARsIn, cdbpPeerStatsACAsOut=cdbpPeerStatsACAsOut, cdbpRealmMessageRouteSTAsIn=cdbpRealmMessageRouteSTAsIn, cdbpPeerStatsASAsOut=cdbpPeerStatsASAsOut, cdbpPeerStatsDPRsOut=cdbpPeerStatsDPRsOut, cdbpPeerVendorTable=cdbpPeerVendorTable, ciscoDiaBaseProtPeerConnectionUpNotif=ciscoDiaBaseProtPeerConnectionUpNotif, cdbpPeerVendorStorageType=cdbpPeerVendorStorageType, cdbpPeerVendorIndex=cdbpPeerVendorIndex, cdbpPeerStatsCERsIn=cdbpPeerStatsCERsIn, cdbpRealmMessageRouteASAsOut=cdbpRealmMessageRouteASAsOut, ciscoDiameterBasePMIBLocalCfgSkippedGroup=ciscoDiameterBasePMIBLocalCfgSkippedGroup, cdbpPeerPortListen=cdbpPeerPortListen, cdbpAppAdvToPeerEntry=cdbpAppAdvToPeerEntry, ciscoDiaBaseProtProtocolErrorNotif=ciscoDiaBaseProtProtocolErrorNotif, ciscoDiameterBasePMIB=ciscoDiameterBasePMIB, cdbpLocalApplIndex=cdbpLocalApplIndex, cdbpAppAdvToPeerStorageType=cdbpAppAdvToPeerStorageType, cdbpLocalVendorStorageType=cdbpLocalVendorStorageType, cdbpPeerIpAddressIndex=cdbpPeerIpAddressIndex, cdbpPeerRowStatus=cdbpPeerRowStatus, cdbpLocalSctpListenPort=cdbpLocalSctpListenPort, cdbpPeerStatsCEAsIn=cdbpPeerStatsCEAsIn)
| 174.617021 | 9,504 | 0.799176 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 40,008 | 0.406239 |
48ff11c606361c503d4ae242b33d2e5d2c9cf908 | 1,337 | py | Python | py_build/funcs.py | Aesonus/py-build | 790a750492b0f6ecd52f6f564d3aa4522e255406 | [
"MIT"
]
| null | null | null | py_build/funcs.py | Aesonus/py-build | 790a750492b0f6ecd52f6f564d3aa4522e255406 | [
"MIT"
]
| null | null | null | py_build/funcs.py | Aesonus/py-build | 790a750492b0f6ecd52f6f564d3aa4522e255406 | [
"MIT"
]
| null | null | null | from __future__ import annotations
from typing import Callable, Sequence, TYPE_CHECKING
import functools
if TYPE_CHECKING:
from .build import BuildStepCallable
def split_step_name(name: str, new = ' ', old='_'):
return name.replace(old, new).capitalize()
def print_step_name(formatter=split_step_name, args: Sequence=()):
"""Gets a decorator that formats the name of the build step and prints it"""
fmt_args = args
def format_step_name(func: Callable):
@functools.wraps(func)
def decorated(*args, **kwargs):
print(formatter(func.__name__, *fmt_args))
return func(*args, **kwargs)
return decorated
return format_step_name
def print_step_doc():
def decorate_with(func: Callable):
@functools.wraps(func)
def output_func_doc(*args, **kwargs):
print(func.__doc__)
return func(*args, *kwargs)
return output_func_doc
return decorate_with
def composed(*decorators: BuildStepCallable) -> BuildStepCallable:
"""
Used to compose a decorator. Useful for defining specific
outputs and progress reports to a build step and resusing
"""
def decorated(func: BuildStepCallable):
for decorator in reversed(decorators):
func = decorator(func)
return func
return decorated
| 33.425 | 80 | 0.682872 | 0 | 0 | 0 | 0 | 298 | 0.222887 | 0 | 0 | 217 | 0.162304 |
48ff6f626f5b448c258b452afb93725c786ec289 | 3,713 | py | Python | src/jellyroll/managers.py | jacobian-archive/jellyroll | 02751b3108b6f6ae732a801d42ca3c85cc759978 | [
"BSD-3-Clause"
]
| 3 | 2015-03-02T06:34:45.000Z | 2016-11-24T18:53:59.000Z | src/jellyroll/managers.py | jacobian/jellyroll | 02751b3108b6f6ae732a801d42ca3c85cc759978 | [
"BSD-3-Clause"
]
| null | null | null | src/jellyroll/managers.py | jacobian/jellyroll | 02751b3108b6f6ae732a801d42ca3c85cc759978 | [
"BSD-3-Clause"
]
| null | null | null | import datetime
from django.db import models
from django.db.models import signals
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_unicode
from tagging.fields import TagField
class ItemManager(models.Manager):
def __init__(self):
super(ItemManager, self).__init__()
self.models_by_name = {}
def create_or_update(self, instance, timestamp=None, url=None, tags="", source="INTERACTIVE", source_id="", **kwargs):
"""
Create or update an Item from some instace.
"""
# If the instance hasn't already been saved, save it first. This
# requires disconnecting the post-save signal that might be sent to
# this function (otherwise we could get an infinite loop).
if instance._get_pk_val() is None:
try:
signals.post_save.disconnect(self.create_or_update, sender=type(instance))
except Exception, err:
reconnect = False
else:
reconnect = True
instance.save()
if reconnect:
signals.post_save.connect(self.create_or_update, sender=type(instance))
# Make sure the item "should" be registered.
if not getattr(instance, "jellyrollable", True):
return
# Check to see if the timestamp is being updated, possibly pulling
# the timestamp from the instance.
if hasattr(instance, "timestamp"):
timestamp = instance.timestamp
if timestamp is None:
update_timestamp = False
timestamp = datetime.datetime.now()
else:
update_timestamp = True
# Ditto for tags.
if not tags:
for f in instance._meta.fields:
if isinstance(f, TagField):
tags = getattr(instance, f.attname)
break
if not url:
if hasattr(instance,'url'):
url = instance.url
# Create the Item object.
ctype = ContentType.objects.get_for_model(instance)
item, created = self.get_or_create(
content_type = ctype,
object_id = force_unicode(instance._get_pk_val()),
defaults = dict(
timestamp = timestamp,
source = source,
source_id = source_id,
tags = tags,
url = url,
)
)
item.tags = tags
item.source = source
item.source_id = source_id
if update_timestamp:
item.timestamp = timestamp
# Save and return the item.
item.save()
return item
def follow_model(self, model):
"""
Follow a particular model class, updating associated Items automatically.
"""
self.models_by_name[model.__name__.lower()] = model
signals.post_save.connect(self.create_or_update, sender=model)
def get_for_model(self, model):
"""
Return a QuerySet of only items of a certain type.
"""
return self.filter(content_type=ContentType.objects.get_for_model(model))
def get_last_update_of_model(self, model, **kwargs):
"""
Return the last time a given model's items were updated. Returns the
epoch if the items were never updated.
"""
qs = self.get_for_model(model)
if kwargs:
qs = qs.filter(**kwargs)
try:
return qs.order_by('-timestamp')[0].timestamp
except IndexError:
return datetime.datetime.fromtimestamp(0)
| 35.361905 | 122 | 0.578777 | 3,486 | 0.938863 | 0 | 0 | 0 | 0 | 0 | 0 | 839 | 0.225963 |
5b011773dfebfb2a161d58f218cd80c611a2ea9c | 578 | py | Python | app_metrics.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
]
| 33 | 2018-01-16T02:04:51.000Z | 2022-03-22T22:57:29.000Z | app_metrics.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
]
| 7 | 2019-06-16T22:02:03.000Z | 2021-10-02T13:45:31.000Z | app_metrics.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
]
| 14 | 2019-06-01T21:39:24.000Z | 2022-03-14T17:56:43.000Z | """
metrics application instance
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import os
from byceps.config import ConfigurationError
from byceps.metrics.application import create_app
ENV_VAR_NAME_DATABASE_URI = 'DATABASE_URI'
database_uri = os.environ.get(ENV_VAR_NAME_DATABASE_URI)
if not database_uri:
raise ConfigurationError(
f"No database URI was specified via the '{ENV_VAR_NAME_DATABASE_URI}' "
"environment variable.",
)
app = create_app(database_uri)
| 22.230769 | 79 | 0.730104 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 272 | 0.470588 |
5b0196e4037e9465e0b4a7171647fde301968ecb | 1,927 | py | Python | mysql_tests/test_schema.py | maestro-1/gino | 3f06b4a9948a7657044686ae738ef3509b4729e1 | [
"BSD-3-Clause"
]
| 1,376 | 2019-12-26T23:41:36.000Z | 2022-03-31T11:08:04.000Z | mysql_tests/test_schema.py | maestro-1/gino | 3f06b4a9948a7657044686ae738ef3509b4729e1 | [
"BSD-3-Clause"
]
| 522 | 2017-07-22T00:49:06.000Z | 2019-12-25T17:02:22.000Z | mysql_tests/test_schema.py | maestro-1/gino | 3f06b4a9948a7657044686ae738ef3509b4729e1 | [
"BSD-3-Clause"
]
| 89 | 2020-01-02T02:12:37.000Z | 2022-03-21T14:14:51.000Z | from enum import Enum
import pytest
import gino
from gino.dialects.aiomysql import AsyncEnum
pytestmark = pytest.mark.asyncio
db = gino.Gino()
class MyEnum(Enum):
ONE = "one"
TWO = "two"
class Blog(db.Model):
__tablename__ = "s_blog"
id = db.Column(db.BigInteger(), primary_key=True)
title = db.Column(db.Unicode(255), index=True, comment="Title Comment")
visits = db.Column(db.BigInteger(), default=0)
comment_id = db.Column(db.ForeignKey("s_comment.id"))
number = db.Column(db.Enum(MyEnum), nullable=False, default=MyEnum.TWO)
number2 = db.Column(AsyncEnum(MyEnum), nullable=False, default=MyEnum.TWO)
class Comment(db.Model):
__tablename__ = "s_comment"
id = db.Column(db.BigInteger(), primary_key=True)
blog_id = db.Column(db.ForeignKey("s_blog.id", name="blog_id_fk"))
blog_seq = db.Sequence("blog_seq", metadata=db, schema="schema_test")
async def test(engine, define=True):
async with engine.acquire() as conn:
assert not await engine.dialect.has_table(conn, "non_exist")
Blog.__table__.comment = "Blog Comment"
db.bind = engine
await db.gino.create_all()
await Blog.number.type.create_async(engine, checkfirst=True)
await Blog.number2.type.create_async(engine, checkfirst=True)
await db.gino.create_all(tables=[Blog.__table__], checkfirst=True)
await blog_seq.gino.create(checkfirst=True)
await Blog.__table__.gino.create(checkfirst=True)
await db.gino.drop_all()
await db.gino.drop_all(tables=[Blog.__table__], checkfirst=True)
await Blog.__table__.gino.drop(checkfirst=True)
await blog_seq.gino.drop(checkfirst=True)
if define:
class Comment2(db.Model):
__tablename__ = "s_comment_2"
id = db.Column(db.BigInteger(), primary_key=True)
blog_id = db.Column(db.ForeignKey("s_blog.id"))
await db.gino.create_all()
await db.gino.drop_all()
| 30.109375 | 78 | 0.701609 | 868 | 0.450441 | 0 | 0 | 0 | 0 | 1,019 | 0.528801 | 153 | 0.079398 |
5b0240511c5c9c995140e0add95f3c10735d13f4 | 903 | py | Python | solutions/29-distinct-powers.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
]
| null | null | null | solutions/29-distinct-powers.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
]
| null | null | null | solutions/29-distinct-powers.py | whitegreyblack/euler | bd8e7ca444eeb51b3c923f1235906054c507ecc8 | [
"MIT"
]
| null | null | null | # problem 29
# Distinct powers
"""
Consider all integer combinations of ab for 2 ≤ a ≤ 5 and 2 ≤ b ≤ 5:
2**2=4, 2**3=8, 2**4=16, 2**5=32
3**2=9, 3**3=27, 3**4=81, 3**5=243
4**2=16, 4**3=64, 4**4=256, 4**5=1024
5**2=25, 5**3=125, 5**4=625, 5**5=3125
If they are then placed in numerical order, with any repeats removed,
we get the following sequence of 15 distinct terms:
4, 8, 9, 16, 25, 27, 32, 64, 81, 125, 243, 256, 625, 1024, 3125
How many distinct terms are in the sequence generated by ab for
2 ≤ a ≤ 100 and 2 ≤ b ≤ 100?
"""
# analysis
"""
^ | 2 | 3 | 4 | 5 | N |
---+---+---+---+----+---+
2 | 4 | 8 | 16| 25 |2^N|
---+---+---+---+----+---+
3 | 9 | 27| 81| 243|3^N|
---+---+---+---+----+---+
4 | 16| 64|256|1024|4^N|
---+---+---+---+----+---+
5 | 25|125|625|3125|5^N|
---+---+---+---+----+---+
"""
# solution
s = set(a**b for a in range(2, 101) for b in range(2, 101))
print(len(s))
| 25.083333 | 70 | 0.499446 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 837 | 0.910773 |
5b0340e8c87e83abc062cbdb7773314cbba482e5 | 2,633 | py | Python | flexget/plugins/input/input_csv.py | metaMMA/Flexget | a38986422461d7935ead1e2b4ed4c88bcd0a90f5 | [
"MIT"
]
| null | null | null | flexget/plugins/input/input_csv.py | metaMMA/Flexget | a38986422461d7935ead1e2b4ed4c88bcd0a90f5 | [
"MIT"
]
| 1 | 2017-10-09T23:06:44.000Z | 2017-10-09T23:06:44.000Z | flexget/plugins/input/input_csv.py | metaMMA/Flexget | a38986422461d7935ead1e2b4ed4c88bcd0a90f5 | [
"MIT"
]
| null | null | null | from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from future.utils import PY3
import logging
import csv
from requests import RequestException
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.cached_input import cached
log = logging.getLogger('csv')
class InputCSV(object):
"""
Adds support for CSV format. Configuration may seem a bit complex,
but this has advantage of being universal solution regardless of CSV
and internal entry fields.
Configuration format:
csv:
url: <url>
values:
<field>: <number>
Example DB-fansubs:
csv:
url: http://www.dattebayo.com/t/dump
values:
title: 3 # title is in 3th field
url: 1 # download url is in 1st field
Fields title and url are mandatory. First field is 1.
List of other common (optional) fields can be found from wiki.
"""
schema = {
'type': 'object',
'properties': {
'url': {'type': 'string', 'format': 'url'},
'values': {
'type': 'object',
'additionalProperties': {'type': 'integer'},
'required': ['title', 'url'],
},
},
'required': ['url', 'values'],
'additionalProperties': False,
}
@cached('csv')
def on_task_input(self, task, config):
entries = []
try:
r = task.requests.get(config['url'])
except RequestException as e:
raise plugin.PluginError('Error fetching `%s`: %s' % (config['url'], e))
# CSV module needs byte strings, we'll convert back to unicode later
if PY3:
page = r.text.splitlines()
else:
page = r.text.encode('utf-8').splitlines()
for row in csv.reader(page):
if not row:
continue
entry = Entry()
for name, index in list(config.get('values', {}).items()):
try:
# Convert the value back to unicode
if PY3:
entry[name] = row[index - 1].strip()
else:
entry[name] = row[index - 1].decode('utf-8').strip()
except IndexError:
raise plugin.PluginError('Field `%s` index is out of range' % name)
entries.append(entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(InputCSV, 'csv', api_ver=2)
| 29.255556 | 87 | 0.565515 | 2,119 | 0.804785 | 0 | 0 | 1,195 | 0.453855 | 0 | 0 | 1,054 | 0.400304 |
5b03dd11f975d3847001932de43a5378848ce948 | 2,043 | py | Python | gdget.py | tienfuc/gdcmdtools | 357ada27cdb6ef0cc155b8fb52b6f6368cd1f277 | [
"BSD-2-Clause"
]
| 29 | 2015-09-10T08:00:30.000Z | 2021-12-24T01:15:53.000Z | gdget.py | tienfuc/gdcmdtools | 357ada27cdb6ef0cc155b8fb52b6f6368cd1f277 | [
"BSD-2-Clause"
]
| 56 | 2015-09-10T02:56:16.000Z | 2020-10-06T13:17:21.000Z | gdget.py | tienfuc/gdcmdtools | 357ada27cdb6ef0cc155b8fb52b6f6368cd1f277 | [
"BSD-2-Clause"
]
| 4 | 2015-09-30T03:35:33.000Z | 2019-07-07T14:19:26.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import re
from gdcmdtools.base import BASE_INFO
from gdcmdtools.base import DEBUG_LEVEL
from gdcmdtools.get import GDGet
from gdcmdtools.get import export_format
import argparse
from argparse import RawTextHelpFormatter
from pprint import pprint
import logging
logger = logging.getLogger()
__THIS_APP = 'gdget'
__THIS_DESCRIPTION = 'Tool to download file from Google Drive'
__THIS_VERSION = BASE_INFO["version"]
def test():
assert True
if __name__ == '__main__':
arg_parser = argparse.ArgumentParser(
description='%s v%s - %s - %s (%s)' %
(__THIS_APP,
__THIS_VERSION,
__THIS_DESCRIPTION,
BASE_INFO["app"],
BASE_INFO["description"]),
formatter_class=RawTextHelpFormatter)
arg_parser.add_argument(
'file_id',
help='The file id or drive link for the file you\'re going to download')
help_export_format = "\n".join(
[
re.search(
".*google-apps\.(.*)",
k).group(1) +
": " +
", ".join(
export_format[k]) for k in export_format.iterkeys()])
arg_parser.add_argument(
'-f',
'--export_format',
metavar='FORMAT',
default='raw',
required=False,
help='specify the export format for downloading,\ngoogle_format: export_format\n%s' %
help_export_format)
arg_parser.add_argument(
'-s',
'--save_as',
metavar='NEW_FILE_NAME',
help='save the downloaded file as ')
arg_parser.add_argument('--debug',
choices=DEBUG_LEVEL,
default=DEBUG_LEVEL[-1],
help='define the debug level')
args = arg_parser.parse_args()
# set debug devel
logger.setLevel(getattr(logging, args.debug.upper()))
logger.debug(args)
get = GDGet(args.file_id, args.export_format, args.save_as)
result = get.run()
sys.exit(0)
| 24.614458 | 93 | 0.603035 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 482 | 0.235928 |
5b042f6383e41d397423d2d9b9c278a9f5788a29 | 325 | py | Python | Lotus/controller/common.py | Jayin/Lotus | 6a4791d81b29158a1a83aa6a5d607ab5d677dba4 | [
"Apache-2.0"
]
| null | null | null | Lotus/controller/common.py | Jayin/Lotus | 6a4791d81b29158a1a83aa6a5d607ab5d677dba4 | [
"Apache-2.0"
]
| null | null | null | Lotus/controller/common.py | Jayin/Lotus | 6a4791d81b29158a1a83aa6a5d607ab5d677dba4 | [
"Apache-2.0"
]
| null | null | null | # -*- coding: utf-8 -*-
from Lotus.app import app
from flask import render_template
@app.route('/')
def index():
return 'welcome'
@app.errorhandler(404)
def page_not_found(error):
return render_template('404.html')
@app.errorhandler(405)
def request_method_error(error):
return render_template('405.html')
| 16.25 | 38 | 0.713846 | 0 | 0 | 0 | 0 | 231 | 0.710769 | 0 | 0 | 55 | 0.169231 |
5b062f73819a8130b2460252ff5ee938a80ac7e2 | 8,261 | py | Python | src/retrocookie/git.py | cjolowicz/retrocookie | bc15dd46070ce50df21eeb016a385529d601f2b0 | [
"MIT"
]
| 15 | 2020-06-21T14:35:42.000Z | 2022-03-30T15:48:55.000Z | src/retrocookie/git.py | cjolowicz/retrocookie | bc15dd46070ce50df21eeb016a385529d601f2b0 | [
"MIT"
]
| 223 | 2020-05-22T14:35:05.000Z | 2022-03-28T00:19:23.000Z | src/retrocookie/git.py | cjolowicz/retrocookie | bc15dd46070ce50df21eeb016a385529d601f2b0 | [
"MIT"
]
| 4 | 2020-11-19T12:55:01.000Z | 2022-03-15T14:24:25.000Z | """Git interface."""
from __future__ import annotations
import contextlib
import functools
import operator
import re
import subprocess # noqa: S404
from dataclasses import dataclass
from dataclasses import field
from pathlib import Path
from typing import Any
from typing import cast
from typing import Iterator
from typing import List
from typing import Optional
import pygit2
from retrocookie.utils import removeprefix
def git(
*args: str, check: bool = True, **kwargs: Any
) -> subprocess.CompletedProcess[str]:
"""Invoke git."""
return subprocess.run( # noqa: S603,S607
["git", *args], check=check, text=True, capture_output=True, **kwargs
)
VERSION_PATTERN = re.compile(
r"""
(?P<major>\d+)\.
(?P<minor>\d+)
(\.(?P<patch>\d+))?
""",
re.VERBOSE,
)
@dataclass(frozen=True, order=True)
class Version:
"""Simplistic representation of git versions."""
major: int
minor: int
patch: int
_text: Optional[str] = field(default=None, compare=False)
@classmethod
def parse(cls, text: str) -> Version:
"""Extract major.minor[.patch] from the start of the text."""
match = VERSION_PATTERN.match(text)
if match is None:
raise ValueError(f"invalid version {text!r}")
parts = match.groupdict(default="0")
return cls(
int(parts["major"]), int(parts["minor"]), int(parts["patch"]), _text=text
)
def __str__(self) -> str:
"""Return the original representation."""
return (
self._text
if self._text is not None
else f"{self.major}.{self.minor}.{self.patch}"
)
def version() -> Version:
"""Return the git version."""
text = git("version").stdout.strip()
text = removeprefix(text, "git version ")
return Version.parse(text)
def get_default_branch() -> str:
"""Return the default branch for new repositories."""
get_configs = [
pygit2.Config.get_global_config,
pygit2.Config.get_system_config,
]
for get_config in get_configs:
with contextlib.suppress(IOError, KeyError):
config = get_config()
branch = config["init.defaultBranch"]
assert isinstance(branch, str) # noqa: S101
return branch
return "master"
class Repository:
"""Git repository."""
def __init__(
self, path: Optional[Path] = None, *, repo: Optional[pygit2.Repository] = None
) -> None:
"""Initialize."""
if repo is None:
self.path = path or Path.cwd()
self.repo = pygit2.Repository(self.path)
else:
self.path = Path(repo.workdir or repo.path)
self.repo = repo
def git(self, *args: str, **kwargs: Any) -> subprocess.CompletedProcess[str]:
"""Invoke git."""
return git(*args, cwd=self.path, **kwargs)
@classmethod
def init(cls, path: Path, *, bare: bool = False) -> Repository:
"""Create a repository."""
# https://github.com/libgit2/libgit2/issues/2849
path.parent.mkdir(exist_ok=True, parents=True)
repo = pygit2.init_repository(path, bare=bare)
return cls(path, repo=repo)
@classmethod
def clone(cls, url: str, path: Path, *, mirror: bool = False) -> Repository:
"""Clone a repository."""
options = ["--mirror"] if mirror else []
git("clone", *options, url, str(path))
return cls(path)
def create_branch(self, branch: str, ref: str = "HEAD") -> None:
"""Create a branch."""
commit = self.repo.revparse_single(ref)
self.repo.branches.create(branch, commit)
def get_current_branch(self) -> str:
"""Return the current branch."""
return self.repo.head.shorthand # type: ignore[no-any-return]
def exists_branch(self, branch: str) -> bool:
"""Return True if the branch exists."""
return branch in self.repo.branches
def switch_branch(self, branch: str) -> None:
"""Switch the current branch."""
self.repo.checkout(self.repo.branches[branch])
def update_remote(self) -> None:
"""Update the remotes."""
self.git("remote", "update")
def fetch_commits(self, source: Repository, *commits: str) -> None:
"""Fetch the given commits and their immediate parents."""
path = source.path.resolve()
self.git("fetch", "--no-tags", "--depth=2", str(path), *commits)
def push(self, remote: str, *refs: str, force: bool = False) -> None:
"""Update remote refs."""
options = ["--force-with-lease"] if force else []
self.git("push", *options, remote, *refs)
def parse_revisions(self, *revisions: str) -> List[str]:
"""Parse revisions using the format specified in gitrevisions(7)."""
process = self.git("rev-list", "--no-walk", *revisions)
result = process.stdout.split()
result.reverse()
return result
def lookup_replacement(self, commit: str) -> str:
"""Lookup the replace ref for the given commit."""
refname = f"refs/replace/{commit}"
ref = self.repo.lookup_reference(refname)
return cast(str, ref.target.hex)
def _ensure_relative(self, path: Path) -> Path:
"""Interpret the path relative to the repository root."""
return path.relative_to(self.path) if path.is_absolute() else path
def read_text(self, path: Path, *, ref: str = "HEAD") -> str:
"""Return the contents of the blob at the given path."""
commit = self.repo.revparse_single(ref)
path = self._ensure_relative(path)
blob = functools.reduce(operator.truediv, path.parts, commit.tree)
return cast(str, blob.data.decode())
def exists(self, path: Path, *, ref: str = "HEAD") -> bool:
"""Return True if a blob exists at the given path."""
commit = self.repo.revparse_single(ref)
path = self._ensure_relative(path)
try:
functools.reduce(operator.truediv, path.parts, commit.tree)
return True
except KeyError:
return False
def add(self, *paths: Path) -> None:
"""Add paths to the index."""
for path in paths:
path = self._ensure_relative(path)
self.repo.index.add(path)
else:
self.repo.index.add_all()
self.repo.index.write()
def commit(self, message: str) -> None:
"""Create a commit."""
try:
head = self.repo.head
refname = head.name
parents = [head.target]
except pygit2.GitError:
branch = get_default_branch()
refname = f"refs/heads/{branch}"
parents = []
tree = self.repo.index.write_tree()
author = committer = self.repo.default_signature
self.repo.create_commit(refname, author, committer, message, tree, parents)
def cherrypick(self, *refs: str) -> None:
"""Cherry-pick the given commits."""
self.git("cherry-pick", *refs)
@contextlib.contextmanager
def worktree(
self,
branch: str,
path: Path,
*,
base: str = "HEAD",
force: bool = False,
force_remove: bool = False,
) -> Iterator[Repository]:
"""Context manager to add and remove a worktree."""
repository = self.add_worktree(branch, path, base=base, force=force)
try:
yield repository
finally:
self.remove_worktree(path, force=force_remove)
def add_worktree(
self,
branch: str,
path: Path,
*,
base: str = "HEAD",
force: bool = False,
) -> Repository:
"""Add a worktree."""
self.git(
"worktree",
"add",
str(path),
"--no-track",
"-B" if force else "-b",
branch,
base,
)
return Repository(path)
def remove_worktree(self, path: Path, *, force: bool = False) -> None:
"""Remove a worktree."""
if force:
self.git("worktree", "remove", "--force", str(path))
else:
self.git("worktree", "remove", str(path))
| 30.824627 | 86 | 0.587459 | 6,753 | 0.817456 | 458 | 0.055441 | 1,916 | 0.231933 | 0 | 0 | 1,695 | 0.205181 |
5b084682efe35e9ca46aead0d385f2c28ccda23b | 5,630 | py | Python | apps/user/views.py | awsbreathpanda/dailyfresh | c218cdc3ea261b695ff00b6781ba3040f5d06eff | [
"MIT"
]
| null | null | null | apps/user/views.py | awsbreathpanda/dailyfresh | c218cdc3ea261b695ff00b6781ba3040f5d06eff | [
"MIT"
]
| 7 | 2021-03-30T14:18:30.000Z | 2022-01-13T03:13:37.000Z | apps/user/views.py | awsbreathpanda/dailyfresh | c218cdc3ea261b695ff00b6781ba3040f5d06eff | [
"MIT"
]
| null | null | null | from django.shortcuts import redirect
from django.contrib.auth import authenticate, login, logout
from celery_tasks.tasks import celery_send_mail
from apps.user.models import User
import re
from django.shortcuts import render
from django.views import View
from utils.security import get_user_token, get_activation_link, get_user_id
from django.conf import settings
from django.http import HttpResponse
from django.urls import reverse
# Create your views here.
# /user/register
class RegisterView(View):
def get(self, request):
return render(request, 'user_register.html')
def post(self, request):
username = request.POST.get('username')
password = request.POST.get('password')
rpassword = request.POST.get('rpassword')
email = request.POST.get('email')
allow = request.POST.get('allow')
if not all([username, password, rpassword, email, allow]):
context = {'errmsg': '数据不完整'}
return render(request, 'user_register.html', context=context)
if password != rpassword:
context = {'errmsg': '密码不一致'}
return render(request, 'user_register.html', context=context)
if not re.match(r'^[a-z0-9][\w.\-]*@[a-z0-9\-]+(\.[a-z]{2,5}){1,2}$',
email):
context = {'errmsg': '邮箱格式不正确'}
return render(request, 'user_register.html', context=context)
if allow != 'on':
context = {'errmsg': '请同意天天生鲜用户协议'}
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
user = None
if user is not None:
context = {'errmsg': '已经创建该用户名'}
return render(request, 'user_register.html', context=context)
user = User.objects.create_user(username, email, password)
user.is_active = 0
user.save()
user_token = get_user_token(user.id)
activation_link = get_activation_link(settings.ACTIVATION_URL_PATH,
user_token)
# send email
subject = '天天生鲜欢迎信息'
message = ''
html_message = (
'<h1>%s,欢迎您成为天天生鲜的注册会员</h1><p>请点击以下链接激活你的账户</p><br><a href="%s">%s</a>'
% (username, activation_link, activation_link))
from_email = 'dailyfresh<[email protected]>'
recipient_list = [
'[email protected]',
]
celery_send_mail.delay(subject,
message,
from_email,
recipient_list,
html_message=html_message)
context = {'errmsg': '添加用户成功'}
return render(request, 'user_register.html', context=context)
# /user/activate/(token)
class ActivateView(View):
def get(self, request, token):
token_bytes = token.encode('utf-8')
user_id = get_user_id(token_bytes)
user = User.objects.get(id=user_id)
user.is_active = 1
user.save()
# TODO
return HttpResponse('<h1>Activate User Successfully</h1>')
# /user/login
class LoginView(View):
def get(self, request):
username = request.COOKIES.get('username')
checked = 'checked'
if username is None:
username = ''
checked = ''
context = {'username': username, 'checked': checked}
return render(request, 'user_login.html', context=context)
def post(self, request):
username = request.POST.get('username')
password = request.POST.get('password')
remember = request.POST.get('remember')
if not all([username, password]):
context = {'errmsg': '参数不完整'}
return render(request, 'user_login.html', context=context)
user = authenticate(request, username=username, password=password)
if user is None:
context = {'errmsg': '用户不存在'}
return render(request, 'user_login.html', context=context)
if not user.is_active:
context = {'errmsg': '用户未激活'}
return render(request, 'user_login.html', context=context)
login(request, user)
next_url = request.GET.get('next', reverse('goods:index'))
response = redirect(next_url)
if remember == 'on':
response.set_cookie('username', username, max_age=7 * 24 * 3600)
else:
response.delete_cookie('username')
return response
# /user/
class UserInfoView(View):
def get(self, request):
if not request.user.is_authenticated:
next_url = reverse(
'user:login') + '?next=' + request.get_full_path()
return redirect(next_url)
else:
return render(request, 'user_center_info.html')
# /user/order/(page)
class UserOrderView(View):
def get(self, request, page):
if not request.user.is_authenticated:
next_url = reverse(
'user:login') + '?next=' + request.get_full_path()
return redirect(next_url)
else:
return render(request, 'user_center_order.html')
# /user/address
class UserAddressView(View):
def get(self, request):
if not request.user.is_authenticated:
next_url = reverse(
'user:login') + '?next=' + request.get_full_path()
return redirect(next_url)
else:
return render(request, 'user_center_site.html')
# /user/logout
class LogoutView(View):
def get(self, request):
logout(request)
return redirect(reverse('goods:index'))
| 31.80791 | 83 | 0.59325 | 5,218 | 0.89718 | 0 | 0 | 0 | 0 | 0 | 0 | 1,220 | 0.209766 |
5b08fda32750d87556f3ccf00e2fba375865e05c | 2,666 | py | Python | heatzy/pilote_v1.py | Devotics/heatzy-home-hassistant | 34ef71604d10b1d45be4cfb17d811bdd33042ce7 | [
"MIT"
]
| 22 | 2019-03-07T22:51:12.000Z | 2021-03-06T12:14:50.000Z | heatzy/pilote_v1.py | Devotics/heatzy-home-hassistant | 34ef71604d10b1d45be4cfb17d811bdd33042ce7 | [
"MIT"
]
| 15 | 2019-03-07T13:04:11.000Z | 2021-03-11T21:34:34.000Z | heatzy/pilote_v1.py | Devotics/heatzy-home-hassistant | 34ef71604d10b1d45be4cfb17d811bdd33042ce7 | [
"MIT"
]
| 7 | 2019-11-17T11:01:50.000Z | 2021-02-24T18:13:28.000Z | from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (HVAC_MODE_AUTO,
PRESET_AWAY,
PRESET_COMFORT, PRESET_ECO,
PRESET_NONE,
SUPPORT_PRESET_MODE)
from homeassistant.const import TEMP_CELSIUS
HEATZY_TO_HA_STATE = {
'\u8212\u9002': PRESET_COMFORT,
'\u7ecf\u6d4e': PRESET_ECO,
'\u89e3\u51bb': PRESET_AWAY,
'\u505c\u6b62': PRESET_NONE,
}
HA_TO_HEATZY_STATE = {
PRESET_COMFORT: [1, 1, 0],
PRESET_ECO: [1, 1, 1],
PRESET_AWAY: [1, 1, 2],
PRESET_NONE: [1, 1, 3],
}
class HeatzyPiloteV1Thermostat(ClimateEntity):
def __init__(self, api, device):
self._api = api
self._device = device
@property
def temperature_unit(self):
"""Return the unit of measurement used by the platform."""
return TEMP_CELSIUS
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_PRESET_MODE
@property
def unique_id(self):
"""Return a unique ID."""
return self._device.get('did')
@property
def name(self):
return self._device.get('dev_alias')
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
return [
HVAC_MODE_AUTO
]
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
return HVAC_MODE_AUTO
@property
def preset_modes(self):
"""Return a list of available preset modes.
Requires SUPPORT_PRESET_MODE.
"""
return [
PRESET_NONE,
PRESET_COMFORT,
PRESET_ECO,
PRESET_AWAY
]
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp.
Requires SUPPORT_PRESET_MODE.
"""
return HEATZY_TO_HA_STATE.get(self._device.get('attr').get('mode'))
async def async_set_preset_mode(self, preset_mode):
"""Set new preset mode."""
await self._api.async_control_device(self.unique_id, {
'raw': HA_TO_HEATZY_STATE.get(preset_mode),
})
await self.async_update()
async def async_update(self):
"""Retrieve latest state."""
self._device = await self._api.async_get_device(self.unique_id)
| 27.484536 | 79 | 0.577269 | 1,908 | 0.715679 | 0 | 0 | 1,323 | 0.496249 | 388 | 0.145536 | 679 | 0.254689 |
5b0af9dfbe74e34130cf9a393f33916249893c28 | 8,315 | py | Python | kubernetes-the-hard-way/system/collections/ansible_collections/community/general/plugins/modules/cloud/misc/proxmox_template.py | jkroepke/homelab | ffdd849e39b52972870f5552e734fd74cb1254a1 | [
"Apache-2.0"
]
| 5 | 2020-12-16T21:42:09.000Z | 2022-03-28T16:04:32.000Z | kubernetes-the-hard-way/system/collections/ansible_collections/community/general/plugins/modules/cloud/misc/proxmox_template.py | jkroepke/kubernetes-the-hard-way | 70fd096a04addec0777744c9731a4e3fbdc40c8f | [
"Apache-2.0"
]
| null | null | null | kubernetes-the-hard-way/system/collections/ansible_collections/community/general/plugins/modules/cloud/misc/proxmox_template.py | jkroepke/kubernetes-the-hard-way | 70fd096a04addec0777744c9731a4e3fbdc40c8f | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/python
#
# Copyright: Ansible Project
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = '''
---
module: proxmox_template
short_description: management of OS templates in Proxmox VE cluster
description:
- allows you to upload/delete templates in Proxmox VE cluster
options:
api_host:
description:
- the host of the Proxmox VE cluster
type: str
required: true
api_user:
description:
- the user to authenticate with
type: str
required: true
api_password:
description:
- the password to authenticate with
- you can use PROXMOX_PASSWORD environment variable
type: str
validate_certs:
description:
- enable / disable https certificate verification
default: 'no'
type: bool
node:
description:
- Proxmox VE node, when you will operate with template
type: str
required: true
src:
description:
- path to uploaded file
- required only for C(state=present)
type: path
template:
description:
- the template name
- required only for states C(absent), C(info)
type: str
content_type:
description:
- content type
- required only for C(state=present)
type: str
default: 'vztmpl'
choices: ['vztmpl', 'iso']
storage:
description:
- target storage
type: str
default: 'local'
timeout:
description:
- timeout for operations
type: int
default: 30
force:
description:
- can be used only with C(state=present), exists template will be overwritten
type: bool
default: 'no'
state:
description:
- Indicate desired state of the template
type: str
choices: ['present', 'absent']
default: present
notes:
- Requires proxmoxer and requests modules on host. This modules can be installed with pip.
requirements: [ "proxmoxer", "requests" ]
author: Sergei Antipov (@UnderGreen)
'''
EXAMPLES = '''
- name: Upload new openvz template with minimal options
community.general.proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
src: ~/ubuntu-14.04-x86_64.tar.gz
- name: >
Upload new openvz template with minimal options use environment
PROXMOX_PASSWORD variable(you should export it before)
community.general.proxmox_template:
node: uk-mc02
api_user: root@pam
api_host: node1
src: ~/ubuntu-14.04-x86_64.tar.gz
- name: Upload new openvz template with all options and force overwrite
community.general.proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
storage: local
content_type: vztmpl
src: ~/ubuntu-14.04-x86_64.tar.gz
force: yes
- name: Delete template with minimal options
community.general.proxmox_template:
node: uk-mc02
api_user: root@pam
api_password: 1q2w3e
api_host: node1
template: ubuntu-14.04-x86_64.tar.gz
state: absent
'''
import os
import time
try:
from proxmoxer import ProxmoxAPI
HAS_PROXMOXER = True
except ImportError:
HAS_PROXMOXER = False
from ansible.module_utils.basic import AnsibleModule
def get_template(proxmox, node, storage, content_type, template):
return [True for tmpl in proxmox.nodes(node).storage(storage).content.get()
if tmpl['volid'] == '%s:%s/%s' % (storage, content_type, template)]
def upload_template(module, proxmox, api_host, node, storage, content_type, realpath, timeout):
taskid = proxmox.nodes(node).storage(storage).upload.post(content=content_type, filename=open(realpath, 'rb'))
while timeout:
task_status = proxmox.nodes(api_host.split('.')[0]).tasks(taskid).status.get()
if task_status['status'] == 'stopped' and task_status['exitstatus'] == 'OK':
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for uploading template. Last line in task before timeout: %s'
% proxmox.node(node).tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def delete_template(module, proxmox, node, storage, content_type, template, timeout):
volid = '%s:%s/%s' % (storage, content_type, template)
proxmox.nodes(node).storage(storage).content.delete(volid)
while timeout:
if not get_template(proxmox, node, storage, content_type, template):
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for deleting template.')
time.sleep(1)
return False
def main():
module = AnsibleModule(
argument_spec=dict(
api_host=dict(required=True),
api_user=dict(required=True),
api_password=dict(no_log=True),
validate_certs=dict(type='bool', default=False),
node=dict(),
src=dict(type='path'),
template=dict(),
content_type=dict(default='vztmpl', choices=['vztmpl', 'iso']),
storage=dict(default='local'),
timeout=dict(type='int', default=30),
force=dict(type='bool', default=False),
state=dict(default='present', choices=['present', 'absent']),
)
)
if not HAS_PROXMOXER:
module.fail_json(msg='proxmoxer required for this module')
state = module.params['state']
api_user = module.params['api_user']
api_host = module.params['api_host']
api_password = module.params['api_password']
validate_certs = module.params['validate_certs']
node = module.params['node']
storage = module.params['storage']
timeout = module.params['timeout']
# If password not set get it from PROXMOX_PASSWORD env
if not api_password:
try:
api_password = os.environ['PROXMOX_PASSWORD']
except KeyError as e:
module.fail_json(msg='You should set api_password param or use PROXMOX_PASSWORD environment variable')
try:
proxmox = ProxmoxAPI(api_host, user=api_user, password=api_password, verify_ssl=validate_certs)
except Exception as e:
module.fail_json(msg='authorization on proxmox cluster failed with exception: %s' % e)
if state == 'present':
try:
content_type = module.params['content_type']
src = module.params['src']
template = os.path.basename(src)
if get_template(proxmox, node, storage, content_type, template) and not module.params['force']:
module.exit_json(changed=False, msg='template with volid=%s:%s/%s is already exists' % (storage, content_type, template))
elif not src:
module.fail_json(msg='src param to uploading template file is mandatory')
elif not (os.path.exists(src) and os.path.isfile(src)):
module.fail_json(msg='template file on path %s not exists' % src)
if upload_template(module, proxmox, api_host, node, storage, content_type, src, timeout):
module.exit_json(changed=True, msg='template with volid=%s:%s/%s uploaded' % (storage, content_type, template))
except Exception as e:
module.fail_json(msg="uploading of template %s failed with exception: %s" % (template, e))
elif state == 'absent':
try:
content_type = module.params['content_type']
template = module.params['template']
if not template:
module.fail_json(msg='template param is mandatory')
elif not get_template(proxmox, node, storage, content_type, template):
module.exit_json(changed=False, msg='template with volid=%s:%s/%s is already deleted' % (storage, content_type, template))
if delete_template(module, proxmox, node, storage, content_type, template, timeout):
module.exit_json(changed=True, msg='template with volid=%s:%s/%s deleted' % (storage, content_type, template))
except Exception as e:
module.fail_json(msg="deleting of template %s failed with exception: %s" % (template, e))
if __name__ == '__main__':
main()
| 33.26 | 138 | 0.657486 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,073 | 0.489838 |
5b0b336675387a3e79e4c5c116c3b8865c4ef0c0 | 9,024 | py | Python | polling_stations/apps/councils/management/commands/import_councils.py | DemocracyClub/UK-Polling-Stations | d5c428fc7fbccf0c13a84fa0045dfd332b2879e7 | [
"BSD-3-Clause"
]
| 29 | 2015-03-10T08:41:34.000Z | 2022-01-12T08:51:38.000Z | polling_stations/apps/councils/management/commands/import_councils.py | DemocracyClub/UK-Polling-Stations | d5c428fc7fbccf0c13a84fa0045dfd332b2879e7 | [
"BSD-3-Clause"
]
| 4,112 | 2015-04-01T21:27:38.000Z | 2022-03-31T19:22:11.000Z | polling_stations/apps/councils/management/commands/import_councils.py | DemocracyClub/UK-Polling-Stations | d5c428fc7fbccf0c13a84fa0045dfd332b2879e7 | [
"BSD-3-Clause"
]
| 31 | 2015-03-18T14:52:50.000Z | 2022-02-24T10:31:07.000Z | import json
from html import unescape
import requests
from django.apps import apps
from django.contrib.gis.geos import GEOSGeometry, MultiPolygon, Polygon
from django.conf import settings
from django.core.management.base import BaseCommand
from requests.exceptions import HTTPError
from retry import retry
from councils.models import Council, CouncilGeography
from polling_stations.settings.constants.councils import WELSH_COUNCIL_NAMES
def union_areas(a1, a2):
if not a1:
return a2
return MultiPolygon(a1.union(a2))
NIR_IDS = [
"ABC",
"AND",
"ANN",
"BFS",
"CCG",
"DRS",
"FMO",
"LBC",
"MEA",
"MUL",
"NMD",
]
class Command(BaseCommand):
"""
Turn off auto system check for all apps
We will maunally run system checks only for the
'councils' and 'pollingstations' apps
"""
requires_system_checks = []
contact_details = {}
def add_arguments(self, parser):
parser.add_argument(
"-t",
"--teardown",
default=False,
action="store_true",
required=False,
help="<Optional> Clear Councils and CouncilGeography tables before importing",
)
parser.add_argument(
"-u",
"--alt-url",
required=False,
help="<Optional> Alternative url to override settings.BOUNDARIES_URL",
)
parser.add_argument(
"--only-contact-details",
action="store_true",
help="Only update contact information for imported councils, "
"don't update boundaries",
)
def feature_to_multipolygon(self, feature):
geometry = GEOSGeometry(json.dumps(feature["geometry"]), srid=4326)
if isinstance(geometry, Polygon):
return MultiPolygon(geometry)
return geometry
@retry(HTTPError, tries=2, delay=30)
def get_ons_boundary_json(self, url):
r = requests.get(url)
r.raise_for_status()
"""
When an ArcGIS server can't generate a response
within X amount of time, it will return a 202 ACCEPTED
response with a body like
{
"processingTime": "27.018 seconds",
"status": "Processing",
"generating": {}
}
and expects the client to poll it.
"""
if r.status_code == 202:
raise HTTPError("202 Accepted", response=r)
return r.json()
def attach_boundaries(self, url=None, id_field="LAD20CD"):
"""
Fetch each council's boundary from ONS and attach it to an existing
council object
:param url: The URL of the geoJSON file containing council boundaries
:param id_field: The name of the feature properties field containing
the council ID
:return:
"""
if not url:
url = settings.BOUNDARIES_URL
self.stdout.write("Downloading ONS boundaries from %s..." % (url))
feature_collection = self.get_ons_boundary_json(url)
for feature in feature_collection["features"]:
gss_code = feature["properties"][id_field]
try:
council = Council.objects.get(identifiers__contains=[gss_code])
self.stdout.write(
"Found boundary for %s: %s" % (gss_code, council.name)
)
except Council.DoesNotExist:
self.stderr.write(
"No council object with GSS {} found".format(gss_code)
)
continue
council_geography, _ = CouncilGeography.objects.get_or_create(
council=council
)
council_geography.gss = gss_code
council_geography.geography = self.feature_to_multipolygon(feature)
council_geography.save()
def load_contact_details(self):
return requests.get(settings.EC_COUNCIL_CONTACT_DETAILS_API_URL).json()
def get_council_name(self, council_data):
"""
At the time of writing, the council name can be NULL in the API
meaning we can't rely on the key being populated in all cases.
This is normally only an issue with councils covered by EONI, so if
we see one of them without a name, we assign a hardcoded name.
"""
name = None
if council_data["official_name"]:
name = council_data["official_name"]
else:
if council_data["code"] in NIR_IDS:
name = "Electoral Office for Northern Ireland"
if not name:
raise ValueError("No official name for {}".format(council_data["code"]))
return unescape(name)
def import_councils_from_ec(self):
self.stdout.write("Importing councils...")
bucks_defaults = {
"name": "Buckinghamshire Council",
"electoral_services_email": "[email protected] (general enquiries), [email protected] (postal vote enquiries), [email protected] (proxy vote enquiries), [email protected] (overseas voter enquiries)",
"electoral_services_website": "https://www.buckinghamshire.gov.uk/your-council/council-and-democracy/election-and-voting/",
"electoral_services_postcode": "HP19 8FF",
"electoral_services_address": "Electoral Services\r\nBuckinghamshire Council\r\nThe Gateway\r\nGatehouse Road\r\nAylesbury",
"electoral_services_phone_numbers": ["01296 798141"],
"identifiers": ["E06000060"],
"registration_address": None,
"registration_email": "",
"registration_phone_numbers": [],
"registration_postcode": None,
"registration_website": "",
"name_translated": {},
}
bucks_council, created = Council.objects.get_or_create(
council_id="BUC", defaults=bucks_defaults
)
if not created:
for key, value in bucks_defaults.items():
setattr(bucks_council, key, value)
bucks_council.save()
self.seen_ids.add("BUC")
for council_data in self.load_contact_details():
self.seen_ids.add(council_data["code"])
if council_data["code"] in ("CHN", "AYL", "SBU", "WYO"):
continue
council, _ = Council.objects.get_or_create(council_id=council_data["code"])
council.name = self.get_council_name(council_data)
council.identifiers = council_data["identifiers"]
if council_data["electoral_services"]:
electoral_services = council_data["electoral_services"][0]
council.electoral_services_email = electoral_services["email"]
council.electoral_services_address = unescape(
electoral_services["address"]
)
council.electoral_services_postcode = electoral_services["postcode"]
council.electoral_services_phone_numbers = electoral_services["tel"]
council.electoral_services_website = electoral_services[
"website"
].replace("\\", "")
if council_data["registration"]:
registration = council_data["registration"][0]
council.registration_email = registration["email"]
council.registration_address = unescape(registration["address"])
council.registration_postcode = registration["postcode"]
council.registration_phone_numbers = registration["tel"]
council.registration_website = registration["website"].replace("\\", "")
if council.council_id in WELSH_COUNCIL_NAMES:
council.name_translated["cy"] = WELSH_COUNCIL_NAMES[council.council_id]
elif council.name_translated.get("cy"):
del council.name_translated["cy"]
council.save()
def handle(self, **options):
"""
Manually run system checks for the
'councils' and 'pollingstations' apps
Management commands can ignore checks that only apply to
the apps supporting the website part of the project
"""
self.check(
[apps.get_app_config("councils"), apps.get_app_config("pollingstations")]
)
if options["teardown"]:
self.stdout.write("Clearing councils table..")
Council.objects.all().delete()
self.stdout.write("Clearing councils_geography table..")
CouncilGeography.objects.all().delete()
self.seen_ids = set()
self.import_councils_from_ec()
if not options["only_contact_details"]:
self.attach_boundaries(options.get("alt_url"))
# Clean up old councils that we've not seen in the EC data
Council.objects.exclude(council_id__in=self.seen_ids).delete()
self.stdout.write("..done")
| 37.6 | 275 | 0.614251 | 8,348 | 0.925089 | 0 | 0 | 603 | 0.066822 | 0 | 0 | 3,205 | 0.355164 |
5b0b4a59e216a0cba015910bd19bb58090619801 | 3,693 | py | Python | saleor/webhook/observability/payload_schema.py | DevPoke/saleor | ced3a2249a18031f9f593e71d1d18aa787ec1060 | [
"CC-BY-4.0"
]
| null | null | null | saleor/webhook/observability/payload_schema.py | DevPoke/saleor | ced3a2249a18031f9f593e71d1d18aa787ec1060 | [
"CC-BY-4.0"
]
| null | null | null | saleor/webhook/observability/payload_schema.py | DevPoke/saleor | ced3a2249a18031f9f593e71d1d18aa787ec1060 | [
"CC-BY-4.0"
]
| null | null | null | from datetime import datetime
from enum import Enum
from json.encoder import ESCAPE_ASCII, ESCAPE_DCT # type: ignore
from typing import List, Optional, Tuple, TypedDict
class JsonTruncText:
def __init__(self, text="", truncated=False, added_bytes=0):
self.text = text
self.truncated = truncated
self._added_bytes = max(0, added_bytes)
def __eq__(self, other):
if not isinstance(other, JsonTruncText):
return False
return (self.text, self.truncated) == (other.text, other.truncated)
def __repr__(self):
return f'JsonTruncText(text="{self.text}", truncated={self.truncated})'
@property
def byte_size(self) -> int:
return len(self.text) + self._added_bytes
@staticmethod
def json_char_len(char: str) -> int:
try:
return len(ESCAPE_DCT[char])
except KeyError:
return 6 if ord(char) < 0x10000 else 12
@classmethod
def truncate(cls, s: str, limit: int):
limit = max(limit, 0)
s_init_len = len(s)
s = s[:limit]
added_bytes = 0
for match in ESCAPE_ASCII.finditer(s):
start, end = match.span(0)
markup = cls.json_char_len(match.group(0)) - 1
added_bytes += markup
if end + added_bytes > limit:
return cls(
text=s[:start],
truncated=True,
added_bytes=added_bytes - markup,
)
if end + added_bytes == limit:
s = s[:end]
return cls(
text=s,
truncated=len(s) < s_init_len,
added_bytes=added_bytes,
)
return cls(
text=s,
truncated=len(s) < s_init_len,
added_bytes=added_bytes,
)
class ObservabilityEventTypes(str, Enum):
API_CALL = "api_call"
EVENT_DELIVERY_ATTEMPT = "event_delivery_attempt"
HttpHeaders = List[Tuple[str, str]]
class App(TypedDict):
id: str
name: str
class Webhook(TypedDict):
id: str
name: str
target_url: str
subscription_query: Optional[JsonTruncText]
class ObservabilityEventBase(TypedDict):
event_type: ObservabilityEventTypes
class GraphQLOperation(TypedDict):
name: Optional[JsonTruncText]
operation_type: Optional[str]
query: Optional[JsonTruncText]
result: Optional[JsonTruncText]
result_invalid: bool
class ApiCallRequest(TypedDict):
id: str
method: str
url: str
time: float
headers: HttpHeaders
content_length: int
class ApiCallResponse(TypedDict):
headers: HttpHeaders
status_code: Optional[int]
content_length: int
class ApiCallPayload(ObservabilityEventBase):
request: ApiCallRequest
response: ApiCallResponse
app: Optional[App]
gql_operations: List[GraphQLOperation]
class EventDeliveryPayload(TypedDict):
content_length: int
body: JsonTruncText
class EventDelivery(TypedDict):
id: str
status: str
event_type: str
event_sync: bool
payload: EventDeliveryPayload
class EventDeliveryAttemptRequest(TypedDict):
headers: HttpHeaders
class EventDeliveryAttemptResponse(TypedDict):
headers: HttpHeaders
status_code: Optional[int]
content_length: int
body: JsonTruncText
class EventDeliveryAttemptPayload(ObservabilityEventBase):
id: str
time: datetime
duration: Optional[float]
status: str
next_retry: Optional[datetime]
request: EventDeliveryAttemptRequest
response: EventDeliveryAttemptResponse
event_delivery: EventDelivery
webhook: Webhook
app: App
| 24.296053 | 79 | 0.642296 | 3,443 | 0.932304 | 0 | 0 | 1,200 | 0.324939 | 0 | 0 | 114 | 0.030869 |
5b0e8250fd1078639a824b073c3ab62b92fe28cf | 4,537 | py | Python | NMTK_apps/NMTK_server/wms/djpaste.py | bhargavasana/nmtk | 9bebfcc4b43c28a1f2b2574060ea3195fca2c7dd | [
"Unlicense"
]
| null | null | null | NMTK_apps/NMTK_server/wms/djpaste.py | bhargavasana/nmtk | 9bebfcc4b43c28a1f2b2574060ea3195fca2c7dd | [
"Unlicense"
]
| null | null | null | NMTK_apps/NMTK_server/wms/djpaste.py | bhargavasana/nmtk | 9bebfcc4b43c28a1f2b2574060ea3195fca2c7dd | [
"Unlicense"
]
| null | null | null | # (c) 2013 Chander Ganesan and contributors; written to work with Django and Paste (http://pythonpaste.org)
# Paste CGI "middleware" for Django by Chander Ganesan <[email protected]>
# Open Technology Group, Inc <http://www.otg-nc.com>
# Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php
import os
import sys
import subprocess
import urllib
try:
import select
except ImportError:
select = None
from paste.util import converters
from paste.cgiapp import *
from paste.cgiapp import StdinReader, proc_communicate
from paste.cgiapp import CGIApplication as PasteCGIApplication
import urllib
from django.http import HttpResponse
# Taken from http://plumberjack.blogspot.com/2009/09/how-to-treat-logger-like-output-stream.html
import logging
mod_logger=logging.getLogger(__name__)
class LoggerWriter:
def __init__(self, logger, level):
self.logger = logger
self.level = level
def write(self, message):
if message.strip() and message != '\n':
self.logger.log(self.level, message)
class CGIApplication(PasteCGIApplication):
def __call__(self, request, environ, logger=None):
if not logger:
self.logger=LoggerWriter(logging.getLogger(__name__), logging.ERROR)
else:
self.logger=logger
if 'REQUEST_URI' not in environ:
environ['REQUEST_URI'] = (
urllib.quote(environ.get('SCRIPT_NAME', ''))
+ urllib.quote(environ.get('PATH_INFO', '')))
if self.include_os_environ:
cgi_environ = os.environ.copy()
else:
cgi_environ = {}
for name in environ:
# Should unicode values be encoded?
if (name.upper() == name
and isinstance(environ[name], str)):
cgi_environ[name] = environ[name]
if self.query_string is not None:
old = cgi_environ.get('QUERY_STRING', '')
if old:
old += '&'
cgi_environ['QUERY_STRING'] = old + self.query_string
cgi_environ['SCRIPT_FILENAME'] = self.script
proc = subprocess.Popen(
[self.script],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env=cgi_environ,
cwd=os.path.dirname(self.script),
)
writer = CGIWriter()
if select and sys.platform != 'win32':
proc_communicate(
proc,
stdin=request,
stdout=writer,
stderr=self.logger)
else:
stdout, stderr = proc.communicate(request.read())
if stderr:
self.logger.write(stderr)
writer.write(stdout)
if not writer.headers_finished:
return HttpResponse(status=400)
return writer.response
class CGIWriter(object):
def __init__(self):
self.status = '200 OK'
self.headers = []
self.headers_finished = False
self.writer = None
self.buffer = ''
def write(self, data):
if self.headers_finished:
self.response.write(data)
return
self.buffer += data
while '\n' in self.buffer:
if '\r\n' in self.buffer and self.buffer.find('\r\n') < self.buffer.find('\n'):
line1, self.buffer = self.buffer.split('\r\n', 1)
else:
line1, self.buffer = self.buffer.split('\n', 1)
if not line1:
self.headers_finished = True
self.response=HttpResponse(status=int(self.status.split(' ')[0]))
for name, value in self.headers:
self.response[name]=value
self.response.write(self.buffer)
del self.buffer
del self.headers
del self.status
break
elif ':' not in line1:
raise CGIError(
"Bad header line: %r" % line1)
else:
name, value = line1.split(':', 1)
value = value.lstrip()
name = name.strip()
if name.lower() == 'status':
if ' ' not in value:
# WSGI requires this space, sometimes CGI scripts don't set it:
value = '%s General' % value
self.status = value
else:
self.headers.append((name, value))
| 36.007937 | 107 | 0.561384 | 3,704 | 0.816399 | 0 | 0 | 0 | 0 | 0 | 0 | 720 | 0.158695 |
5b0f67ce020d1273d176ad58ddcab8801ec9c7f2 | 181 | py | Python | Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py | Arbupa/DAS_Sistemas | 52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1 | [
"MIT"
]
| 41 | 2017-09-26T09:36:32.000Z | 2022-03-19T18:05:25.000Z | Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py | Arbupa/DAS_Sistemas | 52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1 | [
"MIT"
]
| 67 | 2017-09-11T05:06:12.000Z | 2022-02-14T04:44:04.000Z | Ago-Dic-2019/JOSE ONOFRE/PRACTICAS/Practica1/RestaurantSeat.py | Arbupa/DAS_Sistemas | 52263ab91436b2e5a24ce6f8493aaa2e2fe92fb1 | [
"MIT"
]
| 210 | 2017-09-01T00:10:08.000Z | 2022-03-19T18:05:12.000Z | cantidad= input("Cuantas personas van a cenar?")
cant = int(cantidad)
print(cant)
if cant > 8:
print("Lo siento, tendran que esperar")
else:
print("La mesa esta lista")
| 15.083333 | 48 | 0.674033 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 83 | 0.458564 |
5b0faab2d16278cb33dcd52c6711c4e057f78b52 | 7,424 | py | Python | build/piman.app/pysnmp/carrier/asyncore/dgram/base.py | jackgisel/team-athens | 91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0 | [
"Apache-2.0"
]
| null | null | null | build/piman.app/pysnmp/carrier/asyncore/dgram/base.py | jackgisel/team-athens | 91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0 | [
"Apache-2.0"
]
| null | null | null | build/piman.app/pysnmp/carrier/asyncore/dgram/base.py | jackgisel/team-athens | 91e2aa810c0064f8b6b39ee53c3b05f037e0aeb0 | [
"Apache-2.0"
]
| null | null | null | #
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <[email protected]>
# License: http://snmplabs.com/pysnmp/license.html
#
import socket
import errno
import sys
from pysnmp.carrier.asyncore.base import AbstractSocketTransport
from pysnmp.carrier import sockfix, sockmsg, error
from pysnmp import debug
# Ignore these socket errors
sockErrors = {errno.ESHUTDOWN: True,
errno.ENOTCONN: True,
errno.ECONNRESET: False,
errno.ECONNREFUSED: False,
errno.EAGAIN: False,
errno.EWOULDBLOCK: False}
if hasattr(errno, 'EBADFD'):
# bad FD may happen upon FD closure on n-1 select() event
sockErrors[errno.EBADFD] = True
class DgramSocketTransport(AbstractSocketTransport):
sockType = socket.SOCK_DGRAM
retryCount = 3
retryInterval = 1
addressType = lambda x: x
def __init__(self, sock=None, sockMap=None):
self.__outQueue = []
self._sendto = lambda s, b, a: s.sendto(b, a)
def __recvfrom(s, sz):
d, a = s.recvfrom(sz)
return d, self.addressType(a)
self._recvfrom = __recvfrom
AbstractSocketTransport.__init__(self, sock, sockMap)
def openClientMode(self, iface=None):
if iface is not None:
try:
self.socket.bind(iface)
except socket.error:
raise error.CarrierError(
'bind() for %s failed: %s' % (iface is None and "<all local>" or iface, sys.exc_info()[1]))
return self
def openServerMode(self, iface):
try:
self.socket.bind(iface)
except socket.error:
raise error.CarrierError('bind() for %s failed: %s' % (iface, sys.exc_info()[1],))
return self
def enableBroadcast(self, flag=1):
try:
self.socket.setsockopt(
socket.SOL_SOCKET, socket.SO_BROADCAST, flag
)
except socket.error:
raise error.CarrierError('setsockopt() for SO_BROADCAST failed: %s' % (sys.exc_info()[1],))
debug.logger & debug.flagIO and debug.logger('enableBroadcast: %s option SO_BROADCAST on socket %s' % (flag and "enabled" or "disabled", self.socket.fileno()))
return self
def enablePktInfo(self, flag=1):
if (not hasattr(self.socket, 'sendmsg') or
not hasattr(self.socket, 'recvmsg')):
raise error.CarrierError('sendmsg()/recvmsg() interface is not supported by this OS and/or Python version')
try:
if self.socket.family == socket.AF_INET:
self.socket.setsockopt(socket.SOL_IP, socket.IP_PKTINFO, flag)
if self.socket.family == socket.AF_INET6:
self.socket.setsockopt(socket.SOL_IPV6, socket.IPV6_RECVPKTINFO, flag)
except socket.error:
raise error.CarrierError('setsockopt() for %s failed: %s' % (self.socket.family == socket.AF_INET6 and "IPV6_RECVPKTINFO" or "IP_PKTINFO", sys.exc_info()[1]))
self._sendto = sockmsg.getSendTo(self.addressType)
self._recvfrom = sockmsg.getRecvFrom(self.addressType)
debug.logger & debug.flagIO and debug.logger('enablePktInfo: %s option %s on socket %s' % (self.socket.family == socket.AF_INET6 and "IPV6_RECVPKTINFO" or "IP_PKTINFO", flag and "enabled" or "disabled", self.socket.fileno()))
return self
def enableTransparent(self, flag=1):
try:
if self.socket.family == socket.AF_INET:
self.socket.setsockopt(
socket.SOL_IP, socket.IP_TRANSPARENT, flag
)
if self.socket.family == socket.AF_INET6:
self.socket.setsockopt(
socket.SOL_IPV6, socket.IPV6_TRANSPARENT, flag
)
except socket.error:
raise error.CarrierError('setsockopt() for IP_TRANSPARENT failed: %s' % sys.exc_info()[1])
except OSError:
raise error.CarrierError('IP_TRANSPARENT socket option requires superusre previleges')
debug.logger & debug.flagIO and debug.logger('enableTransparent: %s option IP_TRANSPARENT on socket %s' % (flag and "enabled" or "disabled", self.socket.fileno()))
return self
def sendMessage(self, outgoingMessage, transportAddress):
self.__outQueue.append(
(outgoingMessage, self.normalizeAddress(transportAddress))
)
debug.logger & debug.flagIO and debug.logger('sendMessage: outgoingMessage queued (%d octets) %s' % (len(outgoingMessage), debug.hexdump(outgoingMessage)))
def normalizeAddress(self, transportAddress):
if not isinstance(transportAddress, self.addressType):
transportAddress = self.addressType(transportAddress)
if not transportAddress.getLocalAddress():
transportAddress.setLocalAddress(self.getLocalAddress())
return transportAddress
def getLocalAddress(self):
# one evil OS does not seem to support getsockname() for DGRAM sockets
try:
return self.socket.getsockname()
except Exception:
return '0.0.0.0', 0
# asyncore API
def handle_connect(self):
pass
def writable(self):
return self.__outQueue
def handle_write(self):
outgoingMessage, transportAddress = self.__outQueue.pop(0)
debug.logger & debug.flagIO and debug.logger('handle_write: transportAddress %r -> %r outgoingMessage (%d octets) %s' % (transportAddress.getLocalAddress(), transportAddress, len(outgoingMessage), debug.hexdump(outgoingMessage)))
if not transportAddress:
debug.logger & debug.flagIO and debug.logger('handle_write: missing dst address, loosing outgoing msg')
return
try:
self._sendto(
self.socket, outgoingMessage, transportAddress
)
except socket.error:
if sys.exc_info()[1].args[0] in sockErrors:
debug.logger & debug.flagIO and debug.logger('handle_write: ignoring socket error %s' % (sys.exc_info()[1],))
else:
raise error.CarrierError('sendto() failed for %s: %s' % (transportAddress, sys.exc_info()[1]))
def readable(self):
return 1
def handle_read(self):
try:
incomingMessage, transportAddress = self._recvfrom(self.socket, 65535)
transportAddress = self.normalizeAddress(transportAddress)
debug.logger & debug.flagIO and debug.logger(
'handle_read: transportAddress %r -> %r incomingMessage (%d octets) %s' % (transportAddress, transportAddress.getLocalAddress(), len(incomingMessage), debug.hexdump(incomingMessage)))
if not incomingMessage:
self.handle_close()
return
else:
self._cbFun(self, transportAddress, incomingMessage)
return
except socket.error:
if sys.exc_info()[1].args[0] in sockErrors:
debug.logger & debug.flagIO and debug.logger('handle_read: known socket error %s' % (sys.exc_info()[1],))
sockErrors[sys.exc_info()[1].args[0]] and self.handle_close()
return
else:
raise error.CarrierError('recvfrom() failed: %s' % (sys.exc_info()[1],))
def handle_close(self):
pass # no datagram connection
| 40.791209 | 237 | 0.630119 | 6,699 | 0.902344 | 0 | 0 | 0 | 0 | 0 | 0 | 1,353 | 0.182247 |
5b10e569de8510acb457502268786c36584d12b7 | 5,539 | py | Python | src/coreclr/scripts/superpmi-replay.py | JimmyCushnie/runtime | b7eb82871f1d742efb444873e11dd6241cea73d2 | [
"MIT"
]
| 2 | 2021-05-04T11:27:27.000Z | 2021-06-18T14:04:08.000Z | src/coreclr/scripts/superpmi-replay.py | JimmyCushnie/runtime | b7eb82871f1d742efb444873e11dd6241cea73d2 | [
"MIT"
]
| 18 | 2019-12-03T00:21:59.000Z | 2022-01-30T04:45:58.000Z | src/coreclr/scripts/superpmi-replay.py | JimmyCushnie/runtime | b7eb82871f1d742efb444873e11dd6241cea73d2 | [
"MIT"
]
| 2 | 2022-01-23T12:24:04.000Z | 2022-02-07T15:44:03.000Z | #!/usr/bin/env python3
#
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the MIT license.
#
##
# Title : superpmi_setup.py
#
# Notes:
#
# Script to run "superpmi replay" for various collections under various COMPlus_JitStressRegs value.
################################################################################
################################################################################
import argparse
from os import path
import os
from os import listdir
from coreclr_arguments import *
from superpmi_setup import run_command
parser = argparse.ArgumentParser(description="description")
parser.add_argument("-arch", help="Architecture")
parser.add_argument("-platform", help="OS platform")
parser.add_argument("-jit_directory", help="path to the directory containing clrjit binaries")
parser.add_argument("-log_directory", help="path to the directory containing superpmi log files")
jit_flags = [
"JitStressRegs=0",
"JitStressRegs=1",
"JitStressRegs=2",
"JitStressRegs=3",
"JitStressRegs=4",
"JitStressRegs=8",
"JitStressRegs=0x10",
"JitStressRegs=0x80",
"JitStressRegs=0x1000",
]
def setup_args(args):
""" Setup the args for SuperPMI to use.
Args:
args (ArgParse): args parsed by arg parser
Returns:
args (CoreclrArguments)
"""
coreclr_args = CoreclrArguments(args, require_built_core_root=False, require_built_product_dir=False,
require_built_test_dir=False, default_build_type="Checked")
coreclr_args.verify(args,
"arch",
lambda unused: True,
"Unable to set arch")
coreclr_args.verify(args,
"platform",
lambda unused: True,
"Unable to set platform")
coreclr_args.verify(args,
"jit_directory",
lambda jit_directory: os.path.isdir(jit_directory),
"jit_directory doesn't exist")
coreclr_args.verify(args,
"log_directory",
lambda log_directory: True,
"log_directory doesn't exist")
return coreclr_args
def main(main_args):
"""Main entrypoint
Args:
main_args ([type]): Arguments to the script
"""
python_path = sys.executable
cwd = os.path.dirname(os.path.realpath(__file__))
coreclr_args = setup_args(main_args)
spmi_location = path.join(cwd, "artifacts", "spmi")
log_directory = coreclr_args.log_directory
platform_name = coreclr_args.platform
os_name = "win" if platform_name.lower() == "windows" else "unix"
arch_name = coreclr_args.arch
host_arch_name = "x64" if arch_name.endswith("64") else "x86"
jit_path = path.join(coreclr_args.jit_directory, 'clrjit_{}_{}_{}.dll'.format(os_name, arch_name, host_arch_name))
print("Running superpmi.py download")
run_command([python_path, path.join(cwd, "superpmi.py"), "download", "--no_progress", "-target_os", platform_name,
"-target_arch", arch_name, "-core_root", cwd, "-spmi_location", spmi_location], _exit_on_fail=True)
failed_runs = []
for jit_flag in jit_flags:
log_file = path.join(log_directory, 'superpmi_{}.log'.format(jit_flag.replace("=", "_")))
print("Running superpmi.py replay for {}".format(jit_flag))
_, _, return_code = run_command([
python_path, path.join(cwd, "superpmi.py"), "replay", "-core_root", cwd,
"-jitoption", jit_flag, "-jitoption", "TieredCompilation=0",
"-target_os", platform_name, "-target_arch", arch_name,
"-arch", host_arch_name,
"-jit_path", jit_path, "-spmi_location", spmi_location,
"-log_level", "debug", "-log_file", log_file])
if return_code != 0:
failed_runs.append("Failure in {}".format(log_file))
# Consolidate all superpmi_*.logs in superpmi_platform_architecture.log
final_log_name = path.join(log_directory, "superpmi_{}_{}.log".format(platform_name, arch_name))
print("Consolidating final {}".format(final_log_name))
with open(final_log_name, "a") as final_superpmi_log:
for superpmi_log in listdir(log_directory):
if not superpmi_log.startswith("superpmi_Jit") or not superpmi_log.endswith(".log"):
continue
print("Appending {}".format(superpmi_log))
final_superpmi_log.write("======================================================={}".format(os.linesep))
final_superpmi_log.write("Contents from {}{}".format(superpmi_log, os.linesep))
final_superpmi_log.write("======================================================={}".format(os.linesep))
with open(path.join(log_directory, superpmi_log), "r") as current_superpmi_log:
contents = current_superpmi_log.read()
final_superpmi_log.write(contents)
# Log failures summary
if len(failed_runs) > 0:
final_superpmi_log.write(os.linesep)
final_superpmi_log.write(os.linesep)
final_superpmi_log.write("========Failed runs summary========".format(os.linesep))
final_superpmi_log.write(os.linesep.join(failed_runs))
return 0 if len(failed_runs) == 0 else 1
if __name__ == "__main__":
args = parser.parse_args()
sys.exit(main(args))
| 37.938356 | 118 | 0.609677 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,026 | 0.36577 |
5b10fde1a0b02a1e7f85ed42e2bfe8b97109fa80 | 514 | py | Python | parse_cookie.py | olnikiforov/hillel | 911bb94169aa277932e346e564e5efd69073d634 | [
"MIT"
]
| null | null | null | parse_cookie.py | olnikiforov/hillel | 911bb94169aa277932e346e564e5efd69073d634 | [
"MIT"
]
| 1 | 2021-04-01T18:56:38.000Z | 2021-04-01T18:56:38.000Z | parse_cookie.py | olnikiforov/hillel | 911bb94169aa277932e346e564e5efd69073d634 | [
"MIT"
]
| null | null | null | def parse_cookie(query: str) -> dict:
res = {}
if query:
data = query.split(';')
for i in data:
if '=' in i:
res[i.split('=')[0]] = '='.join(i.split('=')[1:])
return res
if __name__ == '__main__':
assert parse_cookie('name=Dima;') == {'name': 'Dima'}
assert parse_cookie('') == {}
assert parse_cookie('name=Dima;age=28;') == {'name': 'Dima', 'age': '28'}
assert parse_cookie('name=Dima=User;age=28;') == {'name': 'Dima=User', 'age': '28'}
| 30.235294 | 87 | 0.509728 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 141 | 0.274319 |
5b110f22e3b74f1f108abb0d9e76465e1a151a75 | 2,234 | py | Python | neuralgym/callbacks/model_saver.py | pancookie/SNPGAN_TECcompletion | 2245179db9d9c64da20a6dd7098795a1cf724ad3 | [
"MIT"
]
| 1 | 2022-02-06T07:38:43.000Z | 2022-02-06T07:38:43.000Z | neuralgym/callbacks/model_saver.py | pancookie/SNPGAN_TECcompletion | 2245179db9d9c64da20a6dd7098795a1cf724ad3 | [
"MIT"
]
| null | null | null | neuralgym/callbacks/model_saver.py | pancookie/SNPGAN_TECcompletion | 2245179db9d9c64da20a6dd7098795a1cf724ad3 | [
"MIT"
]
| null | null | null | """model_saver"""
import os
from . import PeriodicCallback, CallbackLoc
from ..utils.logger import callback_log
class ModelSaver(PeriodicCallback):
"""Save model to file at every pstep step_start.
Args:
pstep (int): Save to model every pstep.
saver: Tensorflow saver.
dump_prefix (str): Prefix for saving model files.
"""
def __init__(self, pstep, saver, dump_prefix, train_spe=None, save_every_epochs=50, op_lr=False, optim=None):
super().__init__(CallbackLoc.step_start, pstep)
self._saver = saver
self._dump_prefix = dump_prefix ; self.train_spe = train_spe ; self.see = save_every_epochs
# self.optim = optim ; self.op_lr = op_lr
# self.best_losses = {}
# self.best_losses['d_loss'] = 999.; self.best_losses['g_loss'] = 999.; self.best_losses['avg_loss'] = 999.
dump_dir = os.path.dirname(self._dump_prefix)
if not os.path.exists(dump_dir):
os.makedirs(dump_dir)
callback_log('Initialize ModelSaver: mkdirs %s.' % dump_dir)
'''
# make two folders to save best D, G, and avg loss
self.dump_dir_d = os.path.join(os.path.join(dump_dir, 'best_D'))
if not os.path.exists(self.dump_dir_d):
os.makedirs(self.dump_dir_d)
self.dump_dir_g = os.path.join(os.path.join(dump_dir, 'best_G'))
if not os.path.exists(self.dump_dir_g):
os.makedirs(self.dump_dir_g)
self.dump_dir_avg = os.path.join(os.path.join(dump_dir, 'best_avg'))
if not os.path.exists(self.dump_dir_avg):
os.makedirs(self.dump_dir_avg)
'''
def run(self, sess, step):
'''
if self.op_lr:
g_lr = sess.run(self.optim['g']._lr)
d_lr = sess.run(self.optim['d']._lr)
callback_log('At step {}, lr: g: {}, d: {}.'.format(
step, g_lr, d_lr))
'''
# save the best loss
# save model
if step != 0 and int(step/self.train_spe)%self.see == 0:
callback_log('Trigger ModelSaver: Save model to {}-{}.'.format(
self._dump_prefix, step))
self._saver.save(sess, self._dump_prefix, global_step=step)
| 37.864407 | 115 | 0.606088 | 2,118 | 0.948075 | 0 | 0 | 0 | 0 | 0 | 0 | 1,320 | 0.590868 |
5b1186da0e35b3ea68ef672cbd4ad76ad6086353 | 1,352 | py | Python | rower_monitor/boat_metrics.py | sergiomo/diy-rower-monitor | 32730025874f32015b8a582175db36cdd351ce1e | [
"Unlicense"
]
| null | null | null | rower_monitor/boat_metrics.py | sergiomo/diy-rower-monitor | 32730025874f32015b8a582175db36cdd351ce1e | [
"Unlicense"
]
| null | null | null | rower_monitor/boat_metrics.py | sergiomo/diy-rower-monitor | 32730025874f32015b8a582175db36cdd351ce1e | [
"Unlicense"
]
| null | null | null | from .time_series import TimeSeries
class BoatModel:
def __init__(self, workout):
self.workout = workout
self.position = TimeSeries()
self.speed = TimeSeries()
def update(self):
"""This function gets called on every flywheel encoder tick."""
pass
class RotatingWheel(BoatModel):
"""A simple model to calculate boat speed and distance traveled. We assume the "boat" is just a wheel moving on
the ground, with the same rotational speed as the rower's flywheel."""
WHEEL_CIRCUMFERENCE_METERS = 1.0
def update(self):
if len(self.position) == 0:
current_position = 0
else:
current_position = self.position.values[-1] + 1.0 / self.workout.machine.num_encoder_pulses_per_revolution
self.position.append(
value=current_position,
timestamp=self.workout.machine.encoder_pulse_timestamps[-1]
)
if len(self.workout.machine.flywheel_speed) > 0:
# Linear speed of a rolling wheel [m/s] = rotational speed [rev/s] * cirumference [m]
boat_speed = self.workout.machine.flywheel_speed.values[-1] * self.WHEEL_CIRCUMFERENCE_METERS
self.speed.append(
value=boat_speed,
timestamp=self.workout.machine.flywheel_speed.timestamps[-1]
)
| 35.578947 | 118 | 0.647929 | 1,310 | 0.968935 | 0 | 0 | 0 | 0 | 0 | 0 | 334 | 0.247041 |
5b11b42643e2e5c40307befa37ef00c0f90f66bd | 121 | py | Python | trackMe-backend/src/config.py | matth3wliuu/trackMe | 0fb22bb8adf147fb4d4ed09c5c7253d0e54bf992 | [
"MIT"
]
| 1 | 2022-01-28T06:20:03.000Z | 2022-01-28T06:20:03.000Z | trackMe-backend/src/config.py | matth3wliuu/trackMe | 0fb22bb8adf147fb4d4ed09c5c7253d0e54bf992 | [
"MIT"
]
| null | null | null | trackMe-backend/src/config.py | matth3wliuu/trackMe | 0fb22bb8adf147fb4d4ed09c5c7253d0e54bf992 | [
"MIT"
]
| null | null | null | dbConfig = {
"user": "root",
"password": "123567l098",
"host": "localhost",
"database": "trackMe_dev"
} | 20.166667 | 29 | 0.545455 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 74 | 0.61157 |
5b1363485151128caf183c9f6b705444acca65c5 | 136 | py | Python | src/localsrv/urls.py | vladiibine/localsrv | 7bb8fd2e08f43a1b5adef9ad17ab534a317e0a57 | [
"MIT"
]
| null | null | null | src/localsrv/urls.py | vladiibine/localsrv | 7bb8fd2e08f43a1b5adef9ad17ab534a317e0a57 | [
"MIT"
]
| 4 | 2015-04-28T08:20:26.000Z | 2015-06-13T06:32:31.000Z | src/localsrv/urls.py | vladiibine/localsrv | 7bb8fd2e08f43a1b5adef9ad17ab534a317e0a57 | [
"MIT"
]
| 1 | 2018-03-04T20:29:27.000Z | 2018-03-04T20:29:27.000Z | from django.conf.urls import url
from .views import serve_all
urlpatterns = (
url(r'^.*$', serve_all, name="localsrv:serve_all"),
) | 22.666667 | 55 | 0.705882 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 27 | 0.198529 |
5b14c0f520aa2dfc088e43cb4960682061f61a03 | 409 | py | Python | netrd/__init__.py | sdmccabe/netrd | f703c19b02f42c9f54bcab57014381da11dd58da | [
"MIT"
]
| 116 | 2019-01-17T18:31:43.000Z | 2022-03-31T13:37:21.000Z | netrd/__init__.py | sdmccabe/netrd | f703c19b02f42c9f54bcab57014381da11dd58da | [
"MIT"
]
| 175 | 2019-01-15T01:19:13.000Z | 2021-05-25T16:51:26.000Z | netrd/__init__.py | sdmccabe/netrd | f703c19b02f42c9f54bcab57014381da11dd58da | [
"MIT"
]
| 36 | 2019-01-14T20:38:32.000Z | 2022-01-21T20:58:38.000Z | """
netrd
-----
netrd stands for Network Reconstruction and Distances. It is a repository
of different algorithms for constructing a network from time series data,
as well as for comparing two networks. It is the product of the Network
Science Insitute 2019 Collabathon.
"""
from . import distance # noqa
from . import reconstruction # noqa
from . import dynamics # noqa
from . import utilities # noqa
| 25.5625 | 73 | 0.760391 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 300 | 0.733496 |
5b14c2ff1b60260805608d9bdfcac0cbbde63652 | 5,613 | py | Python | pytorch/GPT.py | lyq628/NLP-Tutorials | 7c9d117a3542695e79419c835ba9e98ef80800b8 | [
"MIT"
]
| 643 | 2018-11-30T09:14:29.000Z | 2022-03-28T14:04:15.000Z | pytorch/GPT.py | lyq628/NLP-Tutorials | 7c9d117a3542695e79419c835ba9e98ef80800b8 | [
"MIT"
]
| 22 | 2019-01-03T17:58:12.000Z | 2022-02-10T01:56:00.000Z | pytorch/GPT.py | lyq628/NLP-Tutorials | 7c9d117a3542695e79419c835ba9e98ef80800b8 | [
"MIT"
]
| 258 | 2018-12-03T17:15:04.000Z | 2022-03-30T07:45:49.000Z | from transformer import Encoder
from torch import nn,optim
from torch.nn.functional import cross_entropy,softmax, relu
from torch.utils.data import DataLoader
from torch.utils.data.dataloader import default_collate
import torch
import utils
import os
import pickle
class GPT(nn.Module):
def __init__(self, model_dim, max_len, num_layer, num_head, n_vocab, lr, max_seg=3, drop_rate=0.2,padding_idx=0):
super().__init__()
self.padding_idx = padding_idx
self.n_vocab = n_vocab
self.max_len = max_len
self.word_emb = nn.Embedding(n_vocab,model_dim)
self.word_emb.weight.data.normal_(0,0.1)
self.segment_emb = nn.Embedding(num_embeddings= max_seg, embedding_dim=model_dim)
self.segment_emb.weight.data.normal_(0,0.1)
self.position_emb = torch.empty(1,max_len,model_dim)
nn.init.kaiming_normal_(self.position_emb,mode='fan_out', nonlinearity='relu')
self.position_emb = nn.Parameter(self.position_emb)
self.encoder = Encoder(n_head=num_head, emb_dim=model_dim, drop_rate=drop_rate, n_layer=num_layer)
self.task_mlm = nn.Linear(in_features=model_dim, out_features=n_vocab)
self.task_nsp = nn.Linear(in_features=model_dim*self.max_len, out_features=2)
self.opt = optim.Adam(self.parameters(),lr)
def forward(self,seqs, segs, training=False):
embed = self.input_emb(seqs, segs)
z = self.encoder(embed, training, mask = self.mask(seqs)) # [n, step, model_dim]
mlm_logits = self.task_mlm(z) # [n, step, n_vocab]
nsp_logits = self.task_nsp(z.reshape(z.shape[0],-1)) # [n, n_cls]
return mlm_logits, nsp_logits
def step(self, seqs, segs, seqs_, nsp_labels):
self.opt.zero_grad()
mlm_logits, nsp_logits = self(seqs, segs, training=True)
pred_loss = cross_entropy(mlm_logits.reshape(-1,self.n_vocab),seqs_.reshape(-1))
nsp_loss = cross_entropy(nsp_logits,nsp_labels.reshape(-1))
loss = pred_loss + 0.2 * nsp_loss
loss.backward()
self.opt.step()
return loss.cpu().data.numpy(), mlm_logits
def input_emb(self,seqs, segs):
# device = next(self.parameters()).device
# self.position_emb = self.position_emb.to(device)
return self.word_emb(seqs) + self.segment_emb(segs) + self.position_emb
def mask(self, seqs):
device = next(self.parameters()).device
batch_size, seq_len = seqs.shape
mask = torch.triu(torch.ones((seq_len,seq_len), dtype=torch.long), diagonal=1).to(device) # [seq_len ,seq_len]
pad = torch.eq(seqs,self.padding_idx) # [n, seq_len]
mask = torch.where(pad[:,None,None,:],1,mask[None,None,:,:]).to(device) # [n, 1, seq_len, seq_len]
return mask>0 # [n, 1, seq_len, seq_len]
@property
def attentions(self):
attentions = {
"encoder": [l.mh.attention.cpu().data.numpy() for l in self.encoder.encoder_layers]
}
return attentions
def train():
MODEL_DIM = 256
N_LAYER = 4
LEARNING_RATE = 1e-4
dataset = utils.MRPCData("./MRPC",2000)
print("num word: ",dataset.num_word)
model = GPT(
model_dim=MODEL_DIM, max_len=dataset.max_len-1, num_layer=N_LAYER, num_head=4, n_vocab=dataset.num_word,
lr=LEARNING_RATE, max_seg=dataset.num_seg, drop_rate=0.2, padding_idx=dataset.pad_id
)
if torch.cuda.is_available():
print("GPU train avaliable")
device =torch.device("cuda")
model = model.cuda()
else:
device = torch.device("cpu")
model = model.cpu()
loader = DataLoader(dataset,batch_size=32,shuffle=True)
for epoch in range(100):
for batch_idx, batch in enumerate(loader):
seqs, segs,xlen,nsp_labels = batch
seqs, segs,nsp_labels = seqs.type(torch.LongTensor).to(device), segs.type(torch.LongTensor).to(device),nsp_labels.to(device)
# pred: [n, step, n_vocab]
loss,pred = model.step(seqs=seqs[:,:-1], segs= segs[:,:-1], seqs_=seqs[:,1:], nsp_labels=nsp_labels)
if batch_idx %100 == 0:
pred = pred[0].cpu().data.numpy().argmax(axis = 1) # [step]
print(
"Epoch: ",epoch,
"|batch: ", batch_idx,
"| loss: %.3f" % loss,
"\n| tgt: ", " ".join([dataset.i2v[i] for i in seqs[0, 1:].cpu().data.numpy()[:xlen[0].sum()+1]]),
"\n| prd: ", " ".join([dataset.i2v[i] for i in pred[:xlen[0].sum()+1]]),
)
os.makedirs("./visual/models/gpt",exist_ok=True)
torch.save(model.state_dict(),"./visual/models/gpt/model.pth")
export_attention(model,device,dataset)
def export_attention(model,device,data,name="gpt"):
model.load_state_dict(torch.load("./visual/models/gpt/model.pth",map_location=device))
seqs, segs,xlen,nsp_labels = data[:32]
seqs, segs,xlen,nsp_labels = torch.from_numpy(seqs),torch.from_numpy(segs),torch.from_numpy(xlen),torch.from_numpy(nsp_labels)
seqs, segs,nsp_labels = seqs.type(torch.LongTensor).to(device), segs.type(torch.LongTensor).to(device),nsp_labels.to(device)
model(seqs[:,:-1],segs[:,:-1],False)
seqs = seqs.cpu().data.numpy()
data = {"src": [[data.i2v[i] for i in seqs[j]] for j in range(len(seqs))], "attentions": model.attentions}
path = "./visual/tmp/%s_attention_matrix.pkl" % name
os.makedirs(os.path.dirname(path), exist_ok=True)
with open(path, "wb") as f:
pickle.dump(data, f)
if __name__ == "__main__":
train()
| 43.176923 | 136 | 0.637983 | 2,766 | 0.492785 | 0 | 0 | 190 | 0.03385 | 0 | 0 | 559 | 0.09959 |
5b14e976757ac56925070b1b4efc08dd156d8a00 | 22,691 | py | Python | skyportal/plot.py | dannygoldstein/skyportal | 3f3518136530fcf5bd1787a4c890782164627fce | [
"BSD-3-Clause"
]
| null | null | null | skyportal/plot.py | dannygoldstein/skyportal | 3f3518136530fcf5bd1787a4c890782164627fce | [
"BSD-3-Clause"
]
| null | null | null | skyportal/plot.py | dannygoldstein/skyportal | 3f3518136530fcf5bd1787a4c890782164627fce | [
"BSD-3-Clause"
]
| null | null | null | import numpy as np
import pandas as pd
from bokeh.core.json_encoder import serialize_json
from bokeh.core.properties import List, String
from bokeh.document import Document
from bokeh.layouts import row, column
from bokeh.models import CustomJS, HoverTool, Range1d, Slider, Button
from bokeh.models.widgets import CheckboxGroup, TextInput, Panel, Tabs
from bokeh.palettes import viridis
from bokeh.plotting import figure, ColumnDataSource
from bokeh.util.compiler import bundle_all_models
from bokeh.util.serialization import make_id
from matplotlib import cm
from matplotlib.colors import rgb2hex
import os
from skyportal.models import (
DBSession,
Obj,
Photometry,
Group,
Instrument,
Telescope,
PHOT_ZP,
)
import sncosmo
DETECT_THRESH = 5 # sigma
SPEC_LINES = {
'H': ([3970, 4102, 4341, 4861, 6563], '#ff0000'),
'He': ([3886, 4472, 5876, 6678, 7065], '#002157'),
'He II': ([3203, 4686], '#003b99'),
'C II': ([3919, 4267, 6580, 7234, 9234], '#570199'),
'C III': ([4650, 5696], '#a30198'),
'C IV': ([5801], '#ff0073'),
'O': ([7772, 7774, 7775, 8447, 9266], '#007236'),
'O II': ([3727], '#00a64d'),
'O III': ([4959, 5007], '#00bf59'),
'Na': ([5890, 5896, 8183, 8195], '#aba000'),
'Mg': ([2780, 2852, 3829, 3832, 3838, 4571, 5167, 5173, 5184], '#8c6239'),
'Mg II': ([2791, 2796, 2803, 4481], '#bf874e'),
'Si II': ([3856, 5041, 5056, 5670, 6347, 6371], '#5674b9'),
'S II': ([5433, 5454, 5606, 5640, 5647, 6715], '#a38409'),
'Ca II': ([3934, 3969, 7292, 7324, 8498, 8542, 8662], '#005050'),
'Fe II': ([5018, 5169], '#f26c4f'),
'Fe III': ([4397, 4421, 4432, 5129, 5158], '#f9917b'),
}
# TODO add groups
# Galaxy lines
#
# 'H': '4341, 4861, 6563;
# 'N II': '6548, 6583;
# 'O I': '6300;'
# 'O II': '3727;
# 'O III': '4959, 5007;
# 'Mg II': '2798;
# 'S II': '6717, 6731'
# 'H': '3970, 4102, 4341, 4861, 6563'
# 'Na': '5890, 5896, 8183, 8195'
# 'He': '3886, 4472, 5876, 6678, 7065'
# 'Mg': '2780, 2852, 3829, 3832, 3838, 4571, 5167, 5173, 5184'
# 'He II': '3203, 4686'
# 'Mg II': '2791, 2796, 2803, 4481'
# 'O': '7772, 7774, 7775, 8447, 9266'
# 'Si II': '3856, 5041, 5056, 5670 6347, 6371'
# 'O II': '3727'
# 'Ca II': '3934, 3969, 7292, 7324, 8498, 8542, 8662'
# 'O III': '4959, 5007'
# 'Fe II': '5018, 5169'
# 'S II': '5433, 5454, 5606, 5640, 5647, 6715'
# 'Fe III': '4397, 4421, 4432, 5129, 5158'
#
# Other
#
# 'Tel: 6867-6884, 7594-7621'
# 'Tel': '#b7b7b7',
# 'H: 4341, 4861, 6563;
# 'N II': 6548, 6583;
# 'O I': 6300;
# 'O II': 3727;
# 'O III': 4959, 5007;
# 'Mg II': 2798;
# 'S II': 6717, 6731'
class CheckboxWithLegendGroup(CheckboxGroup):
colors = List(String, help="List of legend colors")
__implementation__ = """
import {empty, input, label, div} from "core/dom"
import * as p from "core/properties"
import {CheckboxGroup, CheckboxGroupView} from "models/widgets/checkbox_group"
export class CheckboxWithLegendGroupView extends CheckboxGroupView
render: () ->
super()
empty(@el)
active = @model.active
colors = @model.colors
for text, i in @model.labels
inputEl = input({type: "checkbox", value: "#{i}"})
inputEl.addEventListener("change", () => @change_input())
if @model.disabled then inputEl.disabled = true
if i in active then inputEl.checked = true
attrs = {
style: "border-left: 12px solid #{colors[i]}; padding-left: 0.3em;"
}
labelEl = label(attrs, inputEl, text)
if @model.inline
labelEl.classList.add("bk-bs-checkbox-inline")
@el.appendChild(labelEl)
else
divEl = div({class: "bk-bs-checkbox"}, labelEl)
@el.appendChild(divEl)
return @
export class CheckboxWithLegendGroup extends CheckboxGroup
type: "CheckboxWithLegendGroup"
default_view: CheckboxWithLegendGroupView
@define {
colors: [ p.Array, [] ]
}
"""
# TODO replace with (script, div) method
def _plot_to_json(plot):
"""Convert plot to JSON objects necessary for rendering with `bokehJS`.
Parameters
----------
plot : bokeh.plotting.figure.Figure
Bokeh plot object to be rendered.
Returns
-------
(str, str)
Returns (docs_json, render_items) json for the desired plot.
"""
render_items = [{'docid': plot._id, 'elementid': make_id()}]
doc = Document()
doc.add_root(plot)
docs_json_inner = doc.to_json()
docs_json = {render_items[0]['docid']: docs_json_inner}
docs_json = serialize_json(docs_json)
render_items = serialize_json(render_items)
custom_model_js = bundle_all_models()
return docs_json, render_items, custom_model_js
tooltip_format = [
('mjd', '@mjd{0.000000}'),
('flux', '@flux'),
('filter', '@filter'),
('fluxerr', '@fluxerr'),
('mag', '@mag'),
('magerr', '@magerr'),
('lim_mag', '@lim_mag'),
('instrument', '@instrument'),
('stacked', '@stacked'),
]
cmap = cm.get_cmap('jet_r')
def get_color(bandpass_name, cmap_limits=(3000.0, 10000.0)):
if bandpass_name.startswith('ztf'):
return {'ztfg': 'green', 'ztfi': 'orange', 'ztfr': 'red'}[bandpass_name]
else:
bandpass = sncosmo.get_bandpass(bandpass_name)
wave = bandpass.wave_eff
rgb = cmap((cmap_limits[1] - wave) / (cmap_limits[1] - cmap_limits[0]))[:3]
bandcolor = rgb2hex(rgb)
return bandcolor
# TODO make async so that thread isn't blocked
def photometry_plot(obj_id, user, width=600, height=300):
"""Create scatter plot of photometry for object.
Parameters
----------
obj_id : str
ID of Obj to be plotted.
Returns
-------
(str, str)
Returns (docs_json, render_items) json for the desired plot.
"""
data = pd.read_sql(
DBSession()
.query(
Photometry,
Telescope.nickname.label("telescope"),
Instrument.name.label("instrument"),
)
.join(Instrument, Instrument.id == Photometry.instrument_id)
.join(Telescope, Telescope.id == Instrument.telescope_id)
.filter(Photometry.obj_id == obj_id)
.filter(
Photometry.groups.any(Group.id.in_([g.id for g in user.accessible_groups]))
)
.statement,
DBSession().bind,
)
if data.empty:
return None, None, None
data['color'] = [get_color(f) for f in data['filter']]
data['label'] = [
f'{i} {f}-band' for i, f in zip(data['instrument'], data['filter'])
]
data['zp'] = PHOT_ZP
data['magsys'] = 'ab'
data['alpha'] = 1.0
data['lim_mag'] = -2.5 * np.log10(data['fluxerr'] * DETECT_THRESH) + data['zp']
# Passing a dictionary to a bokeh datasource causes the frontend to die,
# deleting the dictionary column fixes that
del data['original_user_data']
# keep track of things that are only upper limits
data['hasflux'] = ~data['flux'].isna()
# calculate the magnitudes - a photometry point is considered "significant"
# or "detected" (and thus can be represented by a magnitude) if its snr
# is above DETECT_THRESH
obsind = data['hasflux'] & (
data['flux'].fillna(0.0) / data['fluxerr'] >= DETECT_THRESH
)
data.loc[~obsind, 'mag'] = None
data.loc[obsind, 'mag'] = -2.5 * np.log10(data[obsind]['flux']) + PHOT_ZP
# calculate the magnitude errors using standard error propagation formulae
# https://en.wikipedia.org/wiki/Propagation_of_uncertainty#Example_formulae
data.loc[~obsind, 'magerr'] = None
coeff = 2.5 / np.log(10)
magerrs = np.abs(coeff * data[obsind]['fluxerr'] / data[obsind]['flux'])
data.loc[obsind, 'magerr'] = magerrs
data['obs'] = obsind
data['stacked'] = False
split = data.groupby('label', sort=False)
finite = np.isfinite(data['flux'])
fdata = data[finite]
lower = np.min(fdata['flux']) * 0.95
upper = np.max(fdata['flux']) * 1.05
plot = figure(
plot_width=width,
plot_height=height,
active_drag='box_zoom',
tools='box_zoom,wheel_zoom,pan,reset,save',
y_range=(lower, upper),
)
imhover = HoverTool(tooltips=tooltip_format)
plot.add_tools(imhover)
model_dict = {}
for i, (label, sdf) in enumerate(split):
# for the flux plot, we only show things that have a flux value
df = sdf[sdf['hasflux']]
key = f'obs{i}'
model_dict[key] = plot.scatter(
x='mjd',
y='flux',
color='color',
marker='circle',
fill_color='color',
alpha='alpha',
source=ColumnDataSource(df),
)
imhover.renderers.append(model_dict[key])
key = f'bin{i}'
model_dict[key] = plot.scatter(
x='mjd',
y='flux',
color='color',
marker='circle',
fill_color='color',
source=ColumnDataSource(
data=dict(
mjd=[],
flux=[],
fluxerr=[],
filter=[],
color=[],
lim_mag=[],
mag=[],
magerr=[],
stacked=[],
instrument=[],
)
),
)
imhover.renderers.append(model_dict[key])
key = 'obserr' + str(i)
y_err_x = []
y_err_y = []
for d, ro in df.iterrows():
px = ro['mjd']
py = ro['flux']
err = ro['fluxerr']
y_err_x.append((px, px))
y_err_y.append((py - err, py + err))
model_dict[key] = plot.multi_line(
xs='xs',
ys='ys',
color='color',
alpha='alpha',
source=ColumnDataSource(
data=dict(
xs=y_err_x, ys=y_err_y, color=df['color'], alpha=[1.0] * len(df)
)
),
)
key = f'binerr{i}'
model_dict[key] = plot.multi_line(
xs='xs',
ys='ys',
color='color',
source=ColumnDataSource(data=dict(xs=[], ys=[], color=[])),
)
plot.xaxis.axis_label = 'MJD'
plot.yaxis.axis_label = 'Flux (μJy)'
plot.toolbar.logo = None
toggle = CheckboxWithLegendGroup(
labels=list(data.label.unique()),
active=list(range(len(data.label.unique()))),
colors=list(data.color.unique()),
)
# TODO replace `eval` with Namespaces
# https://github.com/bokeh/bokeh/pull/6340
toggle.callback = CustomJS(
args={'toggle': toggle, **model_dict},
code=open(
os.path.join(os.path.dirname(__file__), '../static/js/plotjs', 'togglef.js')
).read(),
)
slider = Slider(start=0.0, end=15.0, value=0.0, step=1.0, title='Binsize (days)')
callback = CustomJS(
args={'slider': slider, 'toggle': toggle, **model_dict},
code=open(
os.path.join(os.path.dirname(__file__), '../static/js/plotjs', 'stackf.js')
)
.read()
.replace('default_zp', str(PHOT_ZP))
.replace('detect_thresh', str(DETECT_THRESH)),
)
slider.js_on_change('value', callback)
# Mark the first and last detections
detection_dates = data[data['hasflux']]['mjd']
if len(detection_dates) > 0:
first = round(detection_dates.min(), 6)
last = round(detection_dates.max(), 6)
first_color = "#34b4eb"
last_color = "#8992f5"
midpoint = (upper + lower) / 2
line_top = 5 * upper - 4 * midpoint
line_bottom = 5 * lower - 4 * midpoint
first_x = np.full(5000, first)
last_x = np.full(5000, last)
y = np.linspace(line_bottom, line_top, num=5000)
first_r = plot.line(
x=first_x, y=y, line_alpha=0.5, line_color=first_color, line_width=2,
)
plot.add_tools(
HoverTool(tooltips=[("First detection", f'{first}')], renderers=[first_r],)
)
last_r = plot.line(
x=last_x, y=y, line_alpha=0.5, line_color=last_color, line_width=2
)
plot.add_tools(
HoverTool(tooltips=[("Last detection", f'{last}')], renderers=[last_r],)
)
layout = row(plot, toggle)
layout = column(slider, layout)
p1 = Panel(child=layout, title='Flux')
# now make the mag light curve
ymax = np.nanmax(data['mag']) + 0.1
ymin = np.nanmin(data['mag']) - 0.1
plot = figure(
plot_width=width,
plot_height=height,
active_drag='box_zoom',
tools='box_zoom,wheel_zoom,pan,reset,save',
y_range=(ymax, ymin),
toolbar_location='above',
)
# Mark the first and last detections again
if len(detection_dates) > 0:
midpoint = (ymax + ymin) / 2
line_top = 5 * ymax - 4 * midpoint
line_bottom = 5 * ymin - 4 * midpoint
y = np.linspace(line_bottom, line_top, num=5000)
first_r = plot.line(
x=first_x, y=y, line_alpha=0.5, line_color=first_color, line_width=2,
)
plot.add_tools(
HoverTool(tooltips=[("First detection", f'{first}')], renderers=[first_r],)
)
last_r = plot.line(
x=last_x, y=y, line_alpha=0.5, line_color=last_color, line_width=2
)
plot.add_tools(
HoverTool(
tooltips=[("Last detection", f'{last}')],
renderers=[last_r],
point_policy='follow_mouse',
)
)
imhover = HoverTool(tooltips=tooltip_format)
plot.add_tools(imhover)
model_dict = {}
for i, (label, df) in enumerate(split):
key = f'obs{i}'
model_dict[key] = plot.scatter(
x='mjd',
y='mag',
color='color',
marker='circle',
fill_color='color',
alpha='alpha',
source=ColumnDataSource(df[df['obs']]),
)
imhover.renderers.append(model_dict[key])
unobs_source = df[~df['obs']].copy()
unobs_source.loc[:, 'alpha'] = 0.8
key = f'unobs{i}'
model_dict[key] = plot.scatter(
x='mjd',
y='lim_mag',
color='color',
marker='inverted_triangle',
fill_color='white',
line_color='color',
alpha='alpha',
source=ColumnDataSource(unobs_source),
)
imhover.renderers.append(model_dict[key])
key = f'bin{i}'
model_dict[key] = plot.scatter(
x='mjd',
y='mag',
color='color',
marker='circle',
fill_color='color',
source=ColumnDataSource(
data=dict(
mjd=[],
flux=[],
fluxerr=[],
filter=[],
color=[],
lim_mag=[],
mag=[],
magerr=[],
instrument=[],
stacked=[],
)
),
)
imhover.renderers.append(model_dict[key])
key = 'obserr' + str(i)
y_err_x = []
y_err_y = []
for d, ro in df[df['obs']].iterrows():
px = ro['mjd']
py = ro['mag']
err = ro['magerr']
y_err_x.append((px, px))
y_err_y.append((py - err, py + err))
model_dict[key] = plot.multi_line(
xs='xs',
ys='ys',
color='color',
alpha='alpha',
source=ColumnDataSource(
data=dict(
xs=y_err_x,
ys=y_err_y,
color=df[df['obs']]['color'],
alpha=[1.0] * len(df[df['obs']]),
)
),
)
key = f'binerr{i}'
model_dict[key] = plot.multi_line(
xs='xs',
ys='ys',
color='color',
source=ColumnDataSource(data=dict(xs=[], ys=[], color=[])),
)
key = f'unobsbin{i}'
model_dict[key] = plot.scatter(
x='mjd',
y='lim_mag',
color='color',
marker='inverted_triangle',
fill_color='white',
line_color='color',
alpha=0.8,
source=ColumnDataSource(
data=dict(
mjd=[],
flux=[],
fluxerr=[],
filter=[],
color=[],
lim_mag=[],
mag=[],
magerr=[],
instrument=[],
stacked=[],
)
),
)
imhover.renderers.append(model_dict[key])
key = f'all{i}'
model_dict[key] = ColumnDataSource(df)
key = f'bold{i}'
model_dict[key] = ColumnDataSource(
df[
[
'mjd',
'flux',
'fluxerr',
'mag',
'magerr',
'filter',
'zp',
'magsys',
'lim_mag',
'stacked',
]
]
)
plot.xaxis.axis_label = 'MJD'
plot.yaxis.axis_label = 'AB mag'
plot.toolbar.logo = None
toggle = CheckboxWithLegendGroup(
labels=list(data.label.unique()),
active=list(range(len(data.label.unique()))),
colors=list(data.color.unique()),
)
# TODO replace `eval` with Namespaces
# https://github.com/bokeh/bokeh/pull/6340
toggle.callback = CustomJS(
args={'toggle': toggle, **model_dict},
code=open(
os.path.join(os.path.dirname(__file__), '../static/js/plotjs', 'togglem.js')
).read(),
)
slider = Slider(start=0.0, end=15.0, value=0.0, step=1.0, title='Binsize (days)')
button = Button(label="Export Bold Light Curve to CSV")
button.callback = CustomJS(
args={'slider': slider, 'toggle': toggle, **model_dict},
code=open(
os.path.join(
os.path.dirname(__file__), '../static/js/plotjs', "download.js"
)
)
.read()
.replace('objname', obj_id)
.replace('default_zp', str(PHOT_ZP)),
)
toplay = row(slider, button)
callback = CustomJS(
args={'slider': slider, 'toggle': toggle, **model_dict},
code=open(
os.path.join(os.path.dirname(__file__), '../static/js/plotjs', 'stackm.js')
)
.read()
.replace('default_zp', str(PHOT_ZP))
.replace('detect_thresh', str(DETECT_THRESH)),
)
slider.js_on_change('value', callback)
layout = row(plot, toggle)
layout = column(toplay, layout)
p2 = Panel(child=layout, title='Mag')
tabs = Tabs(tabs=[p2, p1])
return _plot_to_json(tabs)
# TODO make async so that thread isn't blocked
def spectroscopy_plot(obj_id, spec_id=None):
"""TODO normalization? should this be handled at data ingestion or plot-time?"""
obj = Obj.query.get(obj_id)
spectra = Obj.query.get(obj_id).spectra
if spec_id is not None:
spectra = [spec for spec in spectra if spec.id == int(spec_id)]
if len(spectra) == 0:
return None, None, None
color_map = dict(zip([s.id for s in spectra], viridis(len(spectra))))
data = pd.concat(
[
pd.DataFrame(
{
'wavelength': s.wavelengths,
'flux': s.fluxes,
'id': s.id,
'instrument': s.instrument.telescope.nickname,
}
)
for i, s in enumerate(spectra)
]
)
split = data.groupby('id')
hover = HoverTool(
tooltips=[('wavelength', '$x'), ('flux', '$y'), ('instrument', '@instrument')]
)
plot = figure(
plot_width=600,
plot_height=300,
sizing_mode='scale_both',
tools='box_zoom,wheel_zoom,pan,reset',
active_drag='box_zoom',
)
plot.add_tools(hover)
model_dict = {}
for i, (key, df) in enumerate(split):
model_dict['s' + str(i)] = plot.line(
x='wavelength', y='flux', color=color_map[key], source=ColumnDataSource(df)
)
plot.xaxis.axis_label = 'Wavelength (Å)'
plot.yaxis.axis_label = 'Flux'
plot.toolbar.logo = None
# TODO how to choose a good default?
plot.y_range = Range1d(0, 1.03 * data.flux.max())
toggle = CheckboxWithLegendGroup(
labels=[s.instrument.telescope.nickname for s in spectra],
active=list(range(len(spectra))),
width=100,
colors=[color_map[k] for k, df in split],
)
toggle.callback = CustomJS(
args={'toggle': toggle, **model_dict},
code="""
for (let i = 0; i < toggle.labels.length; i++) {
eval("s" + i).visible = (toggle.active.includes(i))
}
""",
)
elements = CheckboxWithLegendGroup(
labels=list(SPEC_LINES.keys()),
active=[],
width=80,
colors=[c for w, c in SPEC_LINES.values()],
)
z = TextInput(value=str(obj.redshift), title="z:")
v_exp = TextInput(value='0', title="v_exp:")
for i, (wavelengths, color) in enumerate(SPEC_LINES.values()):
el_data = pd.DataFrame({'wavelength': wavelengths})
el_data['x'] = el_data['wavelength'] * (1 + obj.redshift)
model_dict[f'el{i}'] = plot.segment(
x0='x',
x1='x',
# TODO change limits
y0=0,
y1=1e-13,
color=color,
source=ColumnDataSource(el_data),
)
model_dict[f'el{i}'].visible = False
# TODO callback policy: don't require submit for text changes?
elements.callback = CustomJS(
args={'elements': elements, 'z': z, 'v_exp': v_exp, **model_dict},
code="""
let c = 299792.458; // speed of light in km / s
for (let i = 0; i < elements.labels.length; i++) {
let el = eval("el" + i);
el.visible = (elements.active.includes(i))
el.data_source.data.x = el.data_source.data.wavelength.map(
x_i => (x_i * (1 + parseFloat(z.value)) /
(1 + parseFloat(v_exp.value) / c))
);
el.data_source.change.emit();
}
""",
)
z.callback = elements.callback
v_exp.callback = elements.callback
layout = row(plot, toggle, elements, column(z, v_exp))
return _plot_to_json(layout)
| 30.335561 | 88 | 0.534441 | 1,273 | 0.056097 | 0 | 0 | 0 | 0 | 0 | 0 | 6,719 | 0.296082 |
5b15f03a9e21ad9e630b8c38b2ac80ff1cf06549 | 4,625 | py | Python | lib/session.py | Hiteshsuhas/err-stackstorm | 7579350ac50d9324b64a73b86d57e094270cb275 | [
"Apache-2.0"
]
| 15 | 2016-09-19T12:06:12.000Z | 2021-11-30T12:04:44.000Z | lib/session.py | Hiteshsuhas/err-stackstorm | 7579350ac50d9324b64a73b86d57e094270cb275 | [
"Apache-2.0"
]
| 22 | 2017-06-19T18:13:54.000Z | 2021-05-28T09:25:01.000Z | lib/session.py | Hiteshsuhas/err-stackstorm | 7579350ac50d9324b64a73b86d57e094270cb275 | [
"Apache-2.0"
]
| 7 | 2017-06-19T17:03:59.000Z | 2021-09-27T11:06:31.000Z | # coding:utf-8
import uuid
import string
import hashlib
import logging
from lib.errors import SessionExpiredError, SessionConsumedError
from datetime import datetime as dt
from random import SystemRandom
LOG = logging.getLogger("errbot.plugin.st2.session")
def generate_password(length=8):
rnd = SystemRandom()
if length > 255:
length = 255
return "".join([rnd.choice(string.hexdigits) for _ in range(length)])
class Session(object):
def __init__(self, user_id, user_secret, session_ttl=3600):
self.bot_secret = None
self.user_id = user_id
self._is_sealed = True
self.session_id = uuid.uuid4()
self.create_date = int(dt.now().timestamp())
self.modified_date = self.create_date
self.ttl_in_seconds = session_ttl
self._hashed_secret = self.hash_secret(user_secret)
del user_secret
def is_expired(self):
"""
Returns False if both create and modified timestamps have exceeded the ttl.
"""
now = int(dt.now().timestamp())
modified_expiry = self.modified_date + self.ttl_in_seconds
if modified_expiry < now:
raise SessionExpiredError
return False
def attributes(self):
return {
"UserID": self.user_id,
"IsSealed": self._is_sealed,
"SessionID": self.session_id,
"CreationDate": str(dt.fromtimestamp(self.create_date)),
"ModifiedDate": str(dt.fromtimestamp(self.modified_date)),
"ExpiryDate": str(dt.fromtimestamp(self.modified_date + self.ttl_in_seconds)),
}
def __repr__(self):
return " ".join(
[
"UserID: {},".format(str(self.user_id)),
"Is Sealed: {},".format(str(self._is_sealed)),
"SessionID: {},".format(str(self.session_id)),
"Creation Date: {},".format(str(dt.fromtimestamp(self.create_date))),
"Modified Date: {},".format(str(dt.fromtimestamp(self.modified_date))),
"Expiry Date: {}".format(
str(dt.fromtimestamp(self.modified_date + self.ttl_in_seconds))
),
]
)
def unseal(self):
"""
Mark the session as being consumed. Returns true if the session was available to be
consumed or raises SessionConsumedError if the session has already been marked as consumed.
"""
self.is_expired()
if self._is_sealed:
self._is_sealed = False
else:
raise SessionConsumedError
return True
def is_sealed(self):
"""
Query the state of the one time use flag.
Returns True if the session has not been consumed or False if the session has already been
consumed.
"""
self.is_expired()
return self._is_sealed
def id(self):
"""
Return the UUID for the session.
"""
return str(self.session_id)
def ttl(self, ttl=None):
"""
Get/Set the time to live for the session.
param: ttl[int] The number of seconds the session should remain valid since creation or
modification.
Returns the number of seconds the ttl has been set to if no agrument is provided otherwise
the ttl is set to the number of seconds provided to the ttl argument.
"""
self.is_expired()
if ttl is None:
return self.ttl_in_seconds
if isinstance(ttl, int):
self.ttl_in_seconds = ttl
self.modified_date = int(dt.now().timestamp())
else:
LOG.warning("session ttl must be an integer type, got '{}'".format(ttl))
def hash_secret(self, user_secret):
"""
Generate a unique token by hashing a random bot secret with the user secrets.
param: user_secret[string] - The users secret provided in the chat backend.
"""
self.is_expired()
if self.bot_secret is None:
self.bot_secret = generate_password(8)
h = hashlib.sha256()
h.update(bytes(user_secret, "utf-8"))
del user_secret
h.update(bytes(self.bot_secret, "utf-8"))
return h.hexdigest()
def match_secret(self, user_secret):
"""
Compare a secret with the session's hashed secret.
param: user_secret[string] the secret to compare.
Return True if the user_secret hash has matches the session hash or False if it does not.
"""
self.is_expired()
return self._hashed_secret == self.hash_secret(user_secret)
| 34.774436 | 99 | 0.611676 | 4,188 | 0.905514 | 0 | 0 | 0 | 0 | 0 | 0 | 1,598 | 0.345514 |
5b16bf8ef2577dbc0fa8123ec5c7829b61cd4d77 | 700 | py | Python | junopy/entities/bill.py | robertons/junopy | 1acc64ab99d8ea49bb0dac979cd34da43541f243 | [
"MIT"
]
| 3 | 2021-07-12T15:05:13.000Z | 2022-01-31T03:35:43.000Z | junopy/entities/bill.py | robertons/junopy | 1acc64ab99d8ea49bb0dac979cd34da43541f243 | [
"MIT"
]
| 2 | 2022-01-29T20:14:51.000Z | 2022-02-07T16:16:24.000Z | junopy/entities/bill.py | robertons/junopy | 1acc64ab99d8ea49bb0dac979cd34da43541f243 | [
"MIT"
]
| 1 | 2022-02-01T18:36:10.000Z | 2022-02-01T18:36:10.000Z | # -*- coding: utf-8 -*-
from .lib import *
class Bill(JunoEntity):
def __init__(cls, **kw):
cls.__route__ = '/bill-payments'
cls.__metadata__ = {}
# FIELDS
cls.id = String(max=80)
cls.digitalAccountId = String(max=100)
cls.billType = ObjList(context=cls, key='status', name='str')
cls.numericalBarCode = String(max=100)
cls.paymentDescription = String(max=100)
cls.beneficiaryDocument = String(max=100)
cls.dueDate = DateTime(format="%Y-%m-%d")
cls.paymentDate = DateTime(format="%Y-%m-%d")
cls.billAmount = Float()
cls.paidAmount =Float()
cls.createdOn = DateTime(format="iso")
cls.status = ObjList(context=cls, key='status', name='str')
super().__init__(**kw)
| 26.923077 | 63 | 0.674286 | 655 | 0.935714 | 0 | 0 | 0 | 0 | 0 | 0 | 98 | 0.14 |
5b18bfb17e1557ac4b871c78c2b1715de071b1e0 | 881 | py | Python | accounts/signals.py | julesc00/challenge | 0f991d07c3fa959e254d1b97d4d393fde13844a9 | [
"MIT"
]
| null | null | null | accounts/signals.py | julesc00/challenge | 0f991d07c3fa959e254d1b97d4d393fde13844a9 | [
"MIT"
]
| null | null | null | accounts/signals.py | julesc00/challenge | 0f991d07c3fa959e254d1b97d4d393fde13844a9 | [
"MIT"
]
| null | null | null | from django.db.models.signals import post_save
from django.contrib.auth.signals import user_logged_in, user_logged_out, user_login_failed
from django.contrib.auth.models import User
from django.contrib.auth.models import Group
from django.dispatch import receiver
from .models import Usuario, LoginLog
def user_profile(sender, instance, created, **kwargs):
if created:
group = Group.objects.get(name="usuarios")
instance.groups.add(group)
Usuario.objects.create(
user=instance,
name=instance.username
)
print("Profile created")
post_save.connect(user_profile, sender=User)
@receiver(user_logged_in)
def log_user_login(sender, request, user, **kwargs):
print(f"User {user.username} logged in on {user.last_login}")
log = user.last_login
LoginLog.objects.create(
login_log=log
)
| 24.472222 | 90 | 0.715096 | 0 | 0 | 0 | 0 | 229 | 0.259932 | 0 | 0 | 81 | 0.091941 |
5b190f68d89adb80d4fc9ec36ff5f159161ba327 | 2,166 | py | Python | Python Scripting/Python - POC-3/DvdApp.py | vaibhavkrishna-bhosle/Trendnxt-Projects | 6c8a31be2f05ec79cfc5086ee09adff161b836ad | [
"MIT"
]
| null | null | null | Python Scripting/Python - POC-3/DvdApp.py | vaibhavkrishna-bhosle/Trendnxt-Projects | 6c8a31be2f05ec79cfc5086ee09adff161b836ad | [
"MIT"
]
| null | null | null | Python Scripting/Python - POC-3/DvdApp.py | vaibhavkrishna-bhosle/Trendnxt-Projects | 6c8a31be2f05ec79cfc5086ee09adff161b836ad | [
"MIT"
]
| null | null | null | import mysql.connector
from mysql.connector.errors import ProgrammingError
from mysql.connector import Error
from DvdOperations import DvdStore
database = "db4"
def CreateDatabase(database):
mydb = mysql.connector.connect(
host="localhost",
user="Vaibhav",
passwd="Vaibhav@007",
)
mycursor = mydb.cursor()
mycursor.execute("CREATE DATABASE "+database)
mydb.close()
print("Database is created ")
Function1()
def Function1():
try:
mydb1 = mysql.connector.connect(
host="localhost",
user="Vaibhav",
passwd="Vaibhav@007",
database=database
)
except mysql.connector.errors.ProgrammingError as error1:
print("error occurred because : {}".format(error1))
CreateDatabase(database=database)
except mysql.connector.Error as error2:
print("error occured because : {}".format(error2))
exit
else:
mycursor = mydb1.cursor()
s1 = "CREATE TABLE IF NOT EXISTS DVDSTORE (id INT AUTO_INCREMENT PRIMARY KEY, title VARCHAR(255), star_name VARCHAR(255), year_of_release INT, genre VARCHAR(255))"
mycursor.execute(s1)
mydb1.commit()
def Function2():
Function1()
print("\nWELCOME TO DVD STORE ")
print("1. Add a DVD\n2. Search\n3. Modify a DVD\n4. Delete a DVD\n5. Exit")
ch = int(input("Enter your choice : "))
if ch == 1 :
DvdStore.AddDvd()
Function2()
elif ch ==2 :
DvdStore.SearchDvd()
Function2()
elif ch == 3:
DvdStore.ModifyDvd()
Function2()
elif ch == 4:
DvdStore.DeleteDvd()
Function2()
elif ch == 5:
print("\nThank You !!! Visit Again")
else:
print("\nInvalid Choice !!! Enter Choice Again\n")
Function2()
def PrintTable():
mydb1 = mysql.connector.connect(
host="localhost",
user="Vaibhav",
passwd="Vaibhav@007",
database=database
)
mycursor = mydb1.cursor()
mycursor.execute("SELECT * FROM DVDSTORE")
myresult = mycursor.fetchall()
for i in myresult:
print(i)
Function2() | 24.066667 | 171 | 0.60711 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 570 | 0.263158 |
5b1919573f3036459523134660e1cde252b7f5d5 | 8,689 | py | Python | cloudshell/rest/api.py | QualiSystems/cloudshell-rest-api | 70d09262c81b8dae55053aae162a7265cf67865f | [
"Apache-2.0"
]
| 1 | 2021-11-26T22:52:42.000Z | 2021-11-26T22:52:42.000Z | cloudshell/rest/api.py | katzy687/cloudshell-rest-api | 70d09262c81b8dae55053aae162a7265cf67865f | [
"Apache-2.0"
]
| 11 | 2019-01-08T06:37:34.000Z | 2021-06-09T17:39:50.000Z | cloudshell/rest/api.py | katzy687/cloudshell-rest-api | 70d09262c81b8dae55053aae162a7265cf67865f | [
"Apache-2.0"
]
| 7 | 2016-09-27T13:14:00.000Z | 2021-11-23T14:02:06.000Z | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import json
try:
import urllib2
except:
import urllib.request as urllib2
from requests import delete, get, post, put
from cloudshell.rest.exceptions import ShellNotFoundException, FeatureUnavailable
class PackagingRestApiClient(object):
def __init__(self, ip, port, username, password, domain):
"""
Logs into CloudShell using REST API
:param ip: CloudShell server IP or host name
:param port: port, usually 9000
:param username: CloudShell username
:param password: CloudShell password
:param domain: CloudShell domain, usually Global
"""
self.ip = ip
self.port = port
opener = urllib2.build_opener(urllib2.HTTPHandler)
url = "http://{0}:{1}/API/Auth/Login".format(ip, port)
data = "username={0}&password={1}&domain={2}" \
.format(username, PackagingRestApiClient._urlencode(password), domain).encode()
request = urllib2.Request(url=url, data=data)
request.add_header("Content-Type", "application/x-www-form-urlencoded")
backup = request.get_method
request.get_method = lambda: "PUT"
url = opener.open(request)
self.token = url.read()
if isinstance(self.token, bytes):
self.token = self.token.decode()
self.token = self.token.strip("\"")
request.get_method = backup
def add_shell(self, shell_path):
"""
Adds a new Shell Entity to CloudShell
If the shell exists, exception will be thrown
:param shell_path:
:return:
"""
url = "http://{0}:{1}/API/Shells".format(self.ip, self.port)
response = post(url,
files={os.path.basename(shell_path): open(shell_path, "rb")},
headers={"Authorization": "Basic " + self.token})
if response.status_code != 201:
raise Exception(response.text)
def update_shell(self, shell_path, shell_name=None):
"""
Updates an existing Shell Entity in CloudShell
:param shell_path: The path to the shell file
:param shell_name: The shell name. if not supplied the shell name is derived from the shell path
:return:
"""
filename = os.path.basename(shell_path)
shell_name = shell_name or self._get_shell_name_from_filename(filename)
url = "http://{0}:{1}/API/Shells/{2}".format(self.ip, self.port, shell_name)
response = put(url,
files={filename: open(shell_path, "rb")},
headers={"Authorization": "Basic " + self.token})
if response.status_code == 404: # Not Found
raise ShellNotFoundException()
if response.status_code != 200: # Ok
raise Exception(response.text)
def get_installed_standards(self):
"""
Gets all standards installed on CloudShell
:return:
"""
url = "http://{0}:{1}/API/Standards".format(self.ip, self.port)
response = get(url,
headers={"Authorization": "Basic " + self.token})
if response.status_code == 404: # Feature unavailable (probably due to cloudshell version below 8.1)
raise FeatureUnavailable()
if response.status_code != 200: # Ok
raise Exception(response.text)
return response.json()
def get_shell(self, shell_name):
url = "http://{0}:{1}/API/Shells/{2}".format(self.ip, self.port, shell_name)
response = get(url,
headers={"Authorization": "Basic " + self.token})
if response.status_code == 404 or response.status_code == 405: # Feature unavailable (probably due to cloudshell version below 8.2)
raise FeatureUnavailable()
if response.status_code == 400: # means shell not found
raise ShellNotFoundException()
if response.status_code != 200:
raise Exception(response.text)
return response.json()
def delete_shell(self, shell_name):
url = "http://{0}:{1}/API/Shells/{2}".format(self.ip, self.port, shell_name)
response = delete(url,
headers={"Authorization": "Basic " + self.token})
if response.status_code == 404 or response.status_code == 405: # Feature unavailable (probably due to cloudshell version below 9.2)
raise FeatureUnavailable()
if response.status_code == 400: # means shell not found
raise ShellNotFoundException()
if response.status_code != 200:
raise Exception(response.text)
def export_package(self, topologies):
"""Export a package with the topologies from the CloudShell
:type topologies: list[str]
:rtype: str
:return: package content
"""
url = "http://{0.ip}:{0.port}/API/Package/ExportPackage".format(self)
response = post(
url,
headers={"Authorization": "Basic " + self.token,
"Content-type": "application/json"},
json={"TopologyNames": topologies},
)
if response.status_code in (404, 405):
raise FeatureUnavailable()
if not response.ok:
raise Exception(response.text)
return response.content
def import_package(self, package_path):
"""Import the package to the CloudShell
:type package_path: str
"""
url = "http://{0.ip}:{0.port}/API/Package/ImportPackage".format(self)
with open(package_path, "rb") as fo:
response = post(
url,
headers={"Authorization": "Basic " + self.token},
files={"file": fo},
)
if response.status_code in (404, 405):
raise FeatureUnavailable()
if not response.ok:
raise Exception(response.text)
if not response.json().get("Success"):
error_msg = response.json().get("ErrorMessage", "Problem with importing the package")
raise Exception(error_msg)
@staticmethod
def _urlencode(s):
return s.replace("+", "%2B").replace("/", "%2F").replace("=", "%3D")
@staticmethod
def _get_shell_name_from_filename(filename):
return os.path.splitext(filename)[0]
def upload_environment_zip_file(self, zipfilename):
with open(zipfilename, "rb") as g:
zipdata = g.read()
self.upload_environment_zip_data(zipdata)
def upload_environment_zip_data(self, zipdata):
boundary = b'''------------------------652c70c071862fc2'''
fd = b'''--''' + boundary + \
b'''\r\nContent-Disposition: form-data; name="file"; filename="my_zip.zip"''' + \
b'''\r\nContent-Type: application/octet-stream\r\n\r\n''' + zipdata + \
b'''\r\n--''' + boundary + b'''--\r\n'''
class FakeReader(object):
def __init__(self, k):
self.k = k
self.offset = 0
def read(self, blocksize):
if self.offset >= len(self.k):
return None
if self.offset + blocksize >= len(self.k):
rv = self.k[self.offset:]
self.offset = len(self.k)
else:
rv = self.k[self.offset:self.offset+blocksize]
self.offset += blocksize
return rv
fdreader = FakeReader(fd)
request = urllib2.Request("http://{}:{}/API/Package/ImportPackage".format(self.ip, str(self.port)),
data=fdreader)
backup = request.get_method
request.get_method = lambda: "POST"
request.add_header("Authorization", "Basic " + self.token)
request.add_header("Content-Type", "multipart/form-data; boundary=" + boundary)
request.add_header("Accept", "*/*")
request.add_header("Content-Length", str(len(fd)))
request.get_method = backup
opener = urllib2.build_opener(urllib2.HTTPHandler)
url = opener.open(request)
try:
s = url.read()
if isinstance(s, bytes):
s = s.decode()
o = json.loads(s)
if "Success" not in o:
raise Exception("'Success' value not found in Quali API response: " + str(o))
except Exception as ue:
raise Exception("Error extracting Quali API zip import result: " + str(ue))
if not o["Success"]:
raise Exception("Error uploading Quali API zip package: "+o["ErrorMessage"])
| 36.662447 | 140 | 0.579008 | 8,425 | 0.969617 | 0 | 0 | 220 | 0.025319 | 0 | 0 | 2,563 | 0.294971 |
5b19d3c83fe2ac0f121d05692ca3db02ba4ea908 | 1,848 | py | Python | data/scripts/classes/team_row.py | matt-waite/lol-reference | 1042fc0a63f7911ed9434b5bb6ba8f866fc0a9c2 | [
"MIT"
]
| 1 | 2020-08-26T17:29:58.000Z | 2020-08-26T17:29:58.000Z | data/scripts/classes/team_row.py | matt-waite/lol-reference | 1042fc0a63f7911ed9434b5bb6ba8f866fc0a9c2 | [
"MIT"
]
| null | null | null | data/scripts/classes/team_row.py | matt-waite/lol-reference | 1042fc0a63f7911ed9434b5bb6ba8f866fc0a9c2 | [
"MIT"
]
| null | null | null | from classes import oracles_headers
class TeamRow:
COLUMNS = oracles_headers.oracles_columns
def __init__(self, row):
self.ROW = row
def GetCell(self, name):
return self.ROW[self.COLUMNS[name]]
def GetDatabaseObject(self):
game = {
"gameId": self.GameId(),
"isComplete": self.IsComplete(),
"league": self.League(),
"year": self.Year(),
"split": self.Split(),
"date": self.Date(),
"patch": self.Patch(),
"side": self.Side(),
"team": self.Team(),
"bans": self.Bans(),
"gameLength": self.GameLength(),
"result": self.Result(),
"kills": self.Kills(),
"deaths": self.Deaths(),
"assists": self.Assists()
}
return game
def GameId(self):
return self.GetCell('GameId')
def IsComplete(self):
return self.GetCell('IsComplete')
def League(self):
return self.GetCell('League')
def Year(self):
return int(self.GetCell('Year'))
def Split(self):
return self.GetCell('Split')
def Date(self):
return self.GetCell('Date')
def Patch(self):
return self.GetCell('Patch')
def Side(self):
return self.GetCell('Side')
def Team(self):
return self.GetCell('Team')
def Bans(self):
return [self.GetCell(f"Ban{i}") for i in range(1, 6)]
def GameLength(self):
return self.GetCell('GameLength')
def Result(self):
return False if self.GetCell('Result') == "0" else True
def Kills(self):
return int(self.GetCell('Kills'))
def Deaths(self):
return int(self.GetCell('Deaths'))
def Assists(self):
return int(self.GetCell('Assists'))
| 24 | 63 | 0.540584 | 1,806 | 0.977273 | 0 | 0 | 0 | 0 | 0 | 0 | 238 | 0.128788 |
5b1a34dd97d2ac3c30c9847cc931832f35fa692e | 7,854 | py | Python | startup/97-standard-plans.py | MikeHart85/SIX_profile_collection | f4b34add0c464006a1310375b084c63597b6baf0 | [
"BSD-3-Clause"
]
| null | null | null | startup/97-standard-plans.py | MikeHart85/SIX_profile_collection | f4b34add0c464006a1310375b084c63597b6baf0 | [
"BSD-3-Clause"
]
| null | null | null | startup/97-standard-plans.py | MikeHart85/SIX_profile_collection | f4b34add0c464006a1310375b084c63597b6baf0 | [
"BSD-3-Clause"
]
| null | null | null | def pol_V(offset=None):
yield from mv(m1_simple_fbk,0)
cur_mono_e = pgm.en.user_readback.value
yield from mv(epu1.table,6) # 4 = 3rd harmonic; 6 = "testing V" 1st harmonic
if offset is not None:
yield from mv(epu1.offset,offset)
yield from mv(epu1.phase,28.5)
yield from mv(pgm.en,cur_mono_e+1) #TODO this is dirty trick. figure out how to process epu.table.input
yield from mv(pgm.en,cur_mono_e)
yield from mv(m1_simple_fbk,1)
print('\nFinished moving the polarization to vertical.\n\tNote that the offset for epu calibration is {}eV.\n\n'.format(offset))
def pol_H(offset=None):
yield from mv(m1_simple_fbk,0)
cur_mono_e = pgm.en.user_readback.value
yield from mv(epu1.table,5) # 2 = 3rd harmonic; 5 = "testing H" 1st harmonic
if offset is not None:
yield from mv(epu1.offset,offset)
yield from mv(epu1.phase,0)
yield from mv(pgm.en,cur_mono_e+1) #TODO this is dirty trick. figure out how to process epu.table.input
yield from mv(pgm.en,cur_mono_e)
yield from mv(m1_simple_fbk,1)
print('\nFinished moving the polarization to horizontal.\n\tNote that the offset for epu calibration is {}eV.\n\n'.format(offset))
def m3_check():
yield from mv(m3_simple_fbk,0)
sclr_enable()
if pzshutter.value == 0:
print('Piezo Shutter is disabled')
flag = 0
if pzshutter.value == 2:
print('Piezo Shutter is enabled: going to be disabled')
yield from pzshutter_disable()
flag = 1
temp_extslt_vg=extslt.vg.user_readback.value
temp_extslt_hg=extslt.hg.user_readback.value
temp_gcdiag = gcdiag.y.user_readback.value
#yield from mv(qem07.averaging_time, 1)
yield from mv(sclr.preset_time, 1)
yield from mv(extslt.hg,10)
yield from mv(extslt.vg,30)
#yield from gcdiag.grid # RE-COMMENT THIS LINE 5/7/2019
#yield from rel_scan([qem07],m3.pit,-0.0005,0.0005,31, md = {'reason':'checking m3 before cff'})
yield from rel_scan([sclr],m3.pit,-0.0005,0.0005,31, md = {'reason':'checking m3'})
#yield from mv(m3.pit,peaks['cen']['gc_diag_grid'])
yield from mv(m3.pit,peaks['cen']['sclr_channels_chan8'])
#yield from mv(m3.pit,peaks['cen']['sclr_channels_chan2'])
yield from mv(extslt.hg,temp_extslt_hg)
yield from mv(extslt.vg,temp_extslt_vg)
yield from mv(gcdiag.y,temp_gcdiag)
yield from sleep(20)
#yield from mv(m1_fbk_sp,extslt_cam.stats1.centroid.x.value)
yield from mv(m3_simple_fbk_target,extslt_cam.stats1.centroid.x.value)#m3_simple_fbk_cen.value)
yield from mv(m3_simple_fbk,1)
if flag == 0:
print('Piezo Shutter remains disabled')
if flag == 1:
print('Piezo Shutter is going to renabled')
yield from pzshutter_enable()
def m1_align_fine2():
m1x_init=m1.x.user_readback.value
m1pit_init=m1.pit.user_readback.value
m1pit_step=50
m1pit_start=m1pit_init-1*m1pit_step
for i in range(0,5):
yield from mv(m1.pit,m1pit_start+i*m1pit_step)
yield from scan([qem05],m1.x,-3,3.8,35)
yield from mv(m1.pit,m1pit_init)
yield from mv(m1.x,m1x_init)
def alignM3x():
# get the exit slit positions to return to at the end
vg_init = extslt.vg.user_setpoint.value
hg_init = extslt.hg.user_setpoint.value
hc_init = extslt.hc.user_setpoint.value
print('Saving exit slit positions for later')
# get things out of the way
yield from m3diag.out
# read gas cell diode
yield from gcdiag.grid
# set detector e.g. gas cell diagnostics qem
detList=[qem07] #[sclr]
# set V exit slit value to get enough signal
yield from mv(extslt.vg, 30)
# open H slit full open
yield from mv(extslt.hg, 9000)
#move extslt.hs appropriately and scan m3.x
yield from mv(extslt.hc,-9)
yield from relative_scan(detList,m3.x,-6,6,61)
yield from mv(extslt.hc,-3)
yield from relative_scan(detList,m3.x,-6,6,61)
yield from mv(extslt.hc,3)
yield from relative_scan(detList,m3.x,-6,6,61)
print('Returning exit slit positions to the inital values')
yield from mv(extslt.hc,hc_init)
yield from mv(extslt.vg, vg_init, extslt.hg, hg_init)
def beamline_align():
yield from mv(m1_fbk,0)
yield from align.m1pit
yield from sleep(5)
yield from m3_check()
#yield from mv(m1_fbk_cam_time,0.002)
#yield from mv(m1_fbk_th,1500)
yield from sleep(5)
yield from mv(m1_fbk_sp,extslt_cam.stats1.centroid.x.value)
yield from mv(m1_fbk,1)
def beamline_align_v2():
yield from mv(m1_simple_fbk,0)
yield from mv(m3_simple_fbk,0)
yield from mv(m1_fbk,0)
yield from align.m1pit
yield from sleep(5)
yield from mv(m1_simple_fbk_target_ratio,m1_simple_fbk_ratio.value)
yield from mv(m1_simple_fbk,1)
yield from sleep(5)
yield from m3_check()
def xas(dets,motor,start_en,stop_en,num_points,sec_per_point):
sclr_enable()
sclr_set_time=sclr.preset_time.value
if pzshutter.value == 0:
print('Piezo Shutter is disabled')
flag = 0
if pzshutter.value == 2:
print('Piezo Shutter is enabled: going to be disabled')
yield from pzshutter_disable()
flag = 1
yield from mv(sclr.preset_time,sec_per_point)
yield from scan(dets,pgm.en,start_en,stop_en,num_points)
E_max = peaks['max']['sclr_channels_chan2'][0]
E_com = peaks['com']['sclr_channels_chan2']
if flag == 0:
print('Piezo Shutter remains disabled')
if flag == 1:
print('Piezo Shutter is going to renabled')
yield from pzshutter_enable()
yield from mv(sclr.preset_time,sclr_set_time)
return E_com, E_max
#TODO put this inside of rixscam
def rixscam_get_threshold(Ei = None):
'''Calculate the minimum and maximum threshold for RIXSCAM single photon counting (LS mode)
Ei\t:\t float - incident energy (default is beamline current energy)
'''
if Ei is None:
Ei = pgm.en.user_readback.value
t_min = 0.7987 * Ei - 97.964
t_max = 1.4907 * Ei + 38.249
print('\n\n\tMinimum value for RIXSCAM threshold (LS mode):\t{}'.format(t_min))
print('\tMaximum value for RIXSCAM threshold (LS mode):\t{}'.format(t_max))
print('\tFor Beamline Energy:\t\t\t\t{}'.format(Ei))
return t_min, t_max
#TODO put this insdie of rixscam
def rixscam_set_threshold(Ei=None):
'''Setup the RIXSCAM.XIP plugin values for a specific energy for single photon counting and
centroiding in LS mode.
Ei\t:\t float - incident energy (default is beamline current energy)
'''
if Ei is None:
Ei = pgm.en.user_readback.value
thold_min, thold_max = rixscam_get_threshold(Ei)
yield from mv(rixscam.xip.beamline_energy, Ei,
rixscam.xip.sum_3x3_threshold_min, thold_min,
rixscam.xip.sum_3x3_threshold_max, thold_max)
#TODO make official so that there is a m1_fbk device like m1fbk.setpoint
m1_fbk = EpicsSignal('XF:02IDA-OP{FBck}Sts:FB-Sel', name = 'm1_fbk')
m1_fbk_sp = EpicsSignal('XF:02IDA-OP{FBck}PID-SP', name = 'm1_fbk_sp')
m1_fbk_th = extslt_cam.stats1.centroid_threshold
#m1_fbk_pix_x = extslt_cam.stats1.centroid.x.value
m1_fbk_cam_time = extslt_cam.cam.acquire_time
#(mv(m1_fbk_th,1500)
m1_simple_fbk = EpicsSignal('XF:02IDA-OP{M1_simp_feed}FB-Ena', name = 'm1_simple_fbk')
m1_simple_fbk_target_ratio = EpicsSignal('XF:02IDA-OP{M1_simp_feed}FB-TarRat', name = 'm1_simple_fbk_target_ratio')
m1_simple_fbk_ratio = EpicsSignal('XF:02IDA-OP{M1_simp_feed}FB-Ratio', name = 'm1_simple_fbk_ratio')
m3_simple_fbk = EpicsSignal('XF:02IDA-OP{M3_simp_feed}FB-Ena', name = 'm3_simple_fbk')
m3_simple_fbk_target = EpicsSignal('XF:02IDA-OP{M3_simp_feed}FB-Targ', name = 'm3_simple_fbk_target')
m3_simple_fbk_cen = EpicsSignal('XF:02IDA-OP{M3_simp_feed}FB_inpbuf', name = 'm3_simple_fbk_cen')
| 37.222749 | 134 | 0.697734 | 0 | 0 | 6,192 | 0.788388 | 0 | 0 | 0 | 0 | 2,768 | 0.352432 |
5b1a7c8341406690f20aa12accdb9fc9001deadc | 238 | py | Python | speechpro/cloud/speech/synthesis/rest/cloud_client/api/__init__.py | speechpro/cloud-python | dfcfc19a1f008b55c5290599c594fe8de777018b | [
"MIT"
]
| 15 | 2020-05-27T09:35:32.000Z | 2022-03-29T18:35:36.000Z | speechpro/cloud/speech/synthesis/rest/cloud_client/api/__init__.py | speechpro/cloud-python | dfcfc19a1f008b55c5290599c594fe8de777018b | [
"MIT"
]
| null | null | null | speechpro/cloud/speech/synthesis/rest/cloud_client/api/__init__.py | speechpro/cloud-python | dfcfc19a1f008b55c5290599c594fe8de777018b | [
"MIT"
]
| 1 | 2021-04-06T21:39:29.000Z | 2021-04-06T21:39:29.000Z | from __future__ import absolute_import
# flake8: noqa
# import apis into api package
import speechpro.cloud.speech.synthesis.rest.cloud_client.api.session_api
import speechpro.cloud.speech.synthesis.rest.cloud_client.api.synthesize_api
| 29.75 | 76 | 0.848739 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 44 | 0.184874 |
5b1aad312b8c27483bc4147a2754724cb8c715fb | 1,039 | py | Python | learn_pyqt5/checkable_bar.py | liusong-cn/python | f67933f0879021a595258e09c4cde5ca1f9f6aed | [
"Apache-2.0"
]
| 1 | 2019-11-12T13:38:54.000Z | 2019-11-12T13:38:54.000Z | learn_pyqt5/checkable_bar.py | liusong-cn/python | f67933f0879021a595258e09c4cde5ca1f9f6aed | [
"Apache-2.0"
]
| null | null | null | learn_pyqt5/checkable_bar.py | liusong-cn/python | f67933f0879021a595258e09c4cde5ca1f9f6aed | [
"Apache-2.0"
]
| null | null | null | # _*_ coding:utf-8 _*_
# author:ls
# time:2020/3/19 0019
import sys
from PyQt5.QtWidgets import QApplication,QAction,QMainWindow
from PyQt5.QtGui import QIcon
class Example(QMainWindow):
def __init__(self):
super().__init__()
self.setui()
def setui(self):
self.statusbar = self.statusBar()
self.statusbar.showMessage('default show')
act = QAction('check',self,checkable=True)
act.setCheckable(True)
act.setStatusTip('view changed')
#不是太明白triggered如何使toggle函数执行
act.triggered.connect(self.toggle)
menubar = self.menuBar()
menu = menubar.addMenu('checkable')
menu.addAction(act)
self.setGeometry(300,300,400,150)
self.setWindowTitle('this is a checkable menu')
self.show()
def toggle(self,state):
if state:
self.statusbar.show()
else:
self.statusbar.hide()
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_()) | 25.341463 | 60 | 0.627526 | 797 | 0.749765 | 0 | 0 | 0 | 0 | 0 | 0 | 188 | 0.176858 |
5b1aca9be8fbadae0d16bcaf4d8c545808d7368a | 3,451 | py | Python | service/test.py | ksiomelo/cubix | cd9e6dda6696b302a7c0d383259a9d60b15b0d55 | [
"Apache-2.0"
]
| 3 | 2015-09-07T00:16:16.000Z | 2019-01-11T20:27:56.000Z | service/test.py | ksiomelo/cubix | cd9e6dda6696b302a7c0d383259a9d60b15b0d55 | [
"Apache-2.0"
]
| null | null | null | service/test.py | ksiomelo/cubix | cd9e6dda6696b302a7c0d383259a9d60b15b0d55 | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python
import pika
import time
import json
import StringIO
#from fca.concept import Concept
from casa import Casa
#from fca.readwrite import cxt
def read_cxt_string(data):
input_file = StringIO.StringIO(data)
assert input_file.readline().strip() == "B",\
"File is not valid cxt"
input_file.readline() # Empty line
number_of_objects = int(input_file.readline().strip())
number_of_attributes = int(input_file.readline().strip())
input_file.readline() # Empty line
objects = [input_file.readline().strip() for i in xrange(number_of_objects)]
attributes = [input_file.readline().strip() for i in xrange(number_of_attributes)]
table = []
for i in xrange(number_of_objects):
line = map(lambda c: c=="X", input_file.readline().strip())
table.append(line)
input_file.close()
return Casa("sample", objects, attributes, table)
def get_a_context():
title = "sample context"
objects = [1, 2, 3, 4]
attributes = ['a', 'b', 'c', 'd']
rels = [[True, False, False, True],\
[True, False, True, False],\
[False, True, True, False],\
[False, True, True, True]]
return Casa(title,objects,attributes,rels)
def on_queue_declared(queue):
channel.queue_bind(queue='test',
exchange='',
routing_key='order.test.customer')
connection = pika.BlockingConnection(pika.ConnectionParameters(
host='localhost'))
channel = connection.channel()
channel.queue_declare(queue='task_queue', durable=True, exclusive=False)
channel.queue_declare(queue='msg_queue', durable=True, exclusive=False)
#channel.exchange_declare(exchange='',
# type="topic",
# durable=True,
# auto_delete=False)
#channel.queue_declare(queue="task_queue",
# durable=True,
# exclusive=False,
# auto_delete=False,
# callback=on_queue_declared)
print ' [*] Waiting for messages. To exit press CTRL+C'
def msg_callback(ch, method, props, body):
print " [x] Received %r" % (body,)
response = body + " MODIFIED"
#response = get_a_concept()
print " [x] Done"
ch.basic_publish(exchange='',
routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id = \
props.correlation_id),
body= str(response))
ch.basic_ack(delivery_tag = method.delivery_tag)
def callback(ch, method, props, body):
print " [x] Received %r" % (body,)
response = body + " MODIFIED"
context = read_cxt_string(body)
print context.to_dict(False)
#response = get_a_concept()
print " [x] Done"
ch.basic_publish(exchange='',
routing_key=props.reply_to,
properties=pika.BasicProperties(correlation_id = \
props.correlation_id),
body= json.dumps(context.to_dict(False)))#str(response))
ch.basic_ack(delivery_tag = method.delivery_tag)
channel.basic_qos(prefetch_count=1)
channel.basic_consume(callback,
queue='task_queue')
channel.basic_consume(msg_callback,
queue='msg_queue')
channel.start_consuming() | 30.8125 | 86 | 0.597508 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 835 | 0.241959 |
5b1cda3e00260587ee1daafde0d87ed8f1313a59 | 310 | py | Python | src/nia/selections/rank.py | salar-shdk/nia | bb0f1b941240b627291dd8212b8840cbe77b0398 | [
"MIT"
]
| 8 | 2021-09-06T07:20:23.000Z | 2022-02-23T23:18:22.000Z | src/nia/selections/rank.py | salar-shdk/nia | bb0f1b941240b627291dd8212b8840cbe77b0398 | [
"MIT"
]
| null | null | null | src/nia/selections/rank.py | salar-shdk/nia | bb0f1b941240b627291dd8212b8840cbe77b0398 | [
"MIT"
]
| null | null | null | from .selection import Selection
import numpy as np
class Rank(Selection):
@Selection.initializer
def __init__(self, size=20):
pass
def select(self, population, fitness):
indexes = fitness.argsort()
return (population[indexes])[:self.size], (fitness[indexes])[:self.size]
| 25.833333 | 80 | 0.677419 | 256 | 0.825806 | 0 | 0 | 68 | 0.219355 | 0 | 0 | 0 | 0 |
5b1ed26356ab2b3641b50b827cab69738be819bd | 15,878 | py | Python | datasets/imppres/imppres.py | ddhruvkr/datasets-1 | 66f2a7eece98d2778bd22bb5034cb7c2376032d4 | [
"Apache-2.0"
]
| 7 | 2021-01-04T22:18:26.000Z | 2021-07-10T09:13:29.000Z | datasets/imppres/imppres.py | ddhruvkr/datasets-1 | 66f2a7eece98d2778bd22bb5034cb7c2376032d4 | [
"Apache-2.0"
]
| null | null | null | datasets/imppres/imppres.py | ddhruvkr/datasets-1 | 66f2a7eece98d2778bd22bb5034cb7c2376032d4 | [
"Apache-2.0"
]
| 3 | 2021-01-03T22:08:20.000Z | 2021-08-12T20:09:39.000Z | # coding=utf-8
# Copyright 2020 The HuggingFace Datasets Authors and the current dataset script contributor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Over 25k semiautomatically generated sentence pairs illustrating well-studied pragmatic inference types. IMPPRES is an NLI dataset following the format of SNLI (Bowman et al., 2015), MultiNLI (Williams et al., 2018) and XNLI (Conneau et al., 2018), which was created to evaluate how well trained NLI models recognize several classes of presuppositions and scalar implicatures."""
from __future__ import absolute_import, division, print_function
import json
import os
import datasets
# Find for instance the citation on arxiv or on the dataset repo/website
_CITATION = """\
@inproceedings{jeretic-etal-2020-natural,
title = "Are Natural Language Inference Models {IMPPRESsive}? {L}earning {IMPlicature} and {PRESupposition}",
author = "Jereti\v{c}, Paloma and
Warstadt, Alex and
Bhooshan, Suvrat and
Williams, Adina",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/2020.acl-main.768",
doi = "10.18653/v1/2020.acl-main.768",
pages = "8690--8705",
abstract = "Natural language inference (NLI) is an increasingly important task for natural language understanding, which requires one to infer whether a sentence entails another. However, the ability of NLI models to make pragmatic inferences remains understudied. We create an IMPlicature and PRESupposition diagnostic dataset (IMPPRES), consisting of 32K semi-automatically generated sentence pairs illustrating well-studied pragmatic inference types. We use IMPPRES to evaluate whether BERT, InferSent, and BOW NLI models trained on MultiNLI (Williams et al., 2018) learn to make pragmatic inferences. Although MultiNLI appears to contain very few pairs illustrating these inference types, we find that BERT learns to draw pragmatic inferences. It reliably treats scalar implicatures triggered by {``}some{''} as entailments. For some presupposition triggers like {``}only{''}, BERT reliably recognizes the presupposition as an entailment, even when the trigger is embedded under an entailment canceling operator like negation. BOW and InferSent show weaker evidence of pragmatic reasoning. We conclude that NLI training encourages models to learn some, but not all, pragmatic inferences.",
}
"""
# You can copy an official description
_DESCRIPTION = """Over >25k semiautomatically generated sentence pairs illustrating well-studied pragmatic inference types. IMPPRES is an NLI dataset following the format of SNLI (Bowman et al., 2015), MultiNLI (Williams et al., 2018) and XNLI (Conneau et al., 2018), which was created to evaluate how well trained NLI models recognize several classes of presuppositions and scalar implicatures."""
_HOMEPAGE = "https://github.com/facebookresearch/Imppres"
_LICENSE = "Creative Commons Attribution-NonCommercial 4.0 International Public License"
# The HuggingFace dataset library don't host the datasets but only point to the original files
# This can be an arbitrary nested dict/list of URLs (see below in `_split_generators` method)
_URLs = {"default": "https://github.com/facebookresearch/Imppres/blob/master/dataset/IMPPRES.zip?raw=true"}
class Imppres(datasets.GeneratorBasedBuilder):
"""Each sentence type in IMPPRES is generated according to a template that specifies the linear order of the constituents in the sentence. The constituents are sampled from a vocabulary of over 3000 lexical items annotated with grammatical features needed to ensure wellformedness. We semiautomatically generate IMPPRES using a codebase developed by Warstadt et al. (2019a) and significantly expanded for the BLiMP dataset (Warstadt et al., 2019b)."""
VERSION = datasets.Version("1.1.0")
# This is an example of a dataset with multiple configurations.
# If you don't want/need to define several sub-sets in your dataset,
# just remove the BUILDER_CONFIG_CLASS and the BUILDER_CONFIGS attributes.
# If you need to make complex sub-parts in the datasets with configurable options
# You can create your own builder configuration class to store attribute, inheriting from datasets.BuilderConfig
# BUILDER_CONFIG_CLASS = MyBuilderConfig
# You will be able to load one or the other configurations in the following list with
# data = datasets.load_dataset('my_dataset', 'first_domain')
# data = datasets.load_dataset('my_dataset', 'second_domain')
BUILDER_CONFIGS = [
datasets.BuilderConfig(
name="presupposition_all_n_presupposition",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="presupposition_both_presupposition",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="presupposition_change_of_state",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="presupposition_cleft_existence",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="presupposition_cleft_uniqueness",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="presupposition_only_presupposition",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="presupposition_possessed_definites_existence",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="presupposition_possessed_definites_uniqueness",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="presupposition_question_presupposition",
version=VERSION,
description="Presuppositions are facts that the speaker takes for granted when uttering a sentence.",
),
datasets.BuilderConfig(
name="implicature_connectives",
version=VERSION,
description="Scalar implicatures are inferences which can be drawn when one member of a memorized lexical scale is uttered.",
),
datasets.BuilderConfig(
name="implicature_gradable_adjective",
version=VERSION,
description="Scalar implicatures are inferences which can be drawn when one member of a memorized lexical scale is uttered.",
),
datasets.BuilderConfig(
name="implicature_gradable_verb",
version=VERSION,
description="Scalar implicatures are inferences which can be drawn when one member of a memorized lexical scale is uttered.",
),
datasets.BuilderConfig(
name="implicature_modals",
version=VERSION,
description="Scalar implicatures are inferences which can be drawn when one member of a memorized lexical scale is uttered.",
),
datasets.BuilderConfig(
name="implicature_numerals_10_100",
version=VERSION,
description="Scalar implicatures are inferences which can be drawn when one member of a memorized lexical scale is uttered.",
),
datasets.BuilderConfig(
name="implicature_numerals_2_3",
version=VERSION,
description="Scalar implicatures are inferences which can be drawn when one member of a memorized lexical scale is uttered.",
),
datasets.BuilderConfig(
name="implicature_quantifiers",
version=VERSION,
description="Scalar implicatures are inferences which can be drawn when one member of a memorized lexical scale is uttered.",
),
]
def _info(self):
if (
"presupposition" in self.config.name
): # This is the name of the configuration selected in BUILDER_CONFIGS above
features = datasets.Features(
{
"premise": datasets.Value("string"),
"hypothesis": datasets.Value("string"),
"trigger": datasets.Value("string"),
"trigger1": datasets.Value("string"),
"trigger2": datasets.Value("string"),
"presupposition": datasets.Value("string"),
"gold_label": datasets.ClassLabel(names=["entailment", "neutral", "contradiction"]),
"UID": datasets.Value("string"),
"pairID": datasets.Value("string"),
"paradigmID": datasets.Value("int16")
# These are the features of your dataset like images, labels ...
}
)
else: # This is an example to show how to have different features for "first_domain" and "second_domain"
features = datasets.Features(
{
"premise": datasets.Value("string"),
"hypothesis": datasets.Value("string"),
"gold_label_log": datasets.ClassLabel(names=["entailment", "neutral", "contradiction"]),
"gold_label_prag": datasets.ClassLabel(names=["entailment", "neutral", "contradiction"]),
"spec_relation": datasets.Value("string"),
"item_type": datasets.Value("string"),
"trigger": datasets.Value("string"),
"lexemes": datasets.Value("string"),
# These are the features of your dataset like images, labels ...
}
)
return datasets.DatasetInfo(
# This is the description that will appear on the datasets page.
description=_DESCRIPTION,
# This defines the different columns of the dataset and their types
features=features, # Here we define them above because they are different between the two configurations
# If there's a common (input, target) tuple from the features,
# specify them here. They'll be used if as_supervised=True in
# builder.as_dataset.
supervised_keys=None,
# Homepage of the dataset for documentation
homepage=_HOMEPAGE,
# License for the dataset if available
license=_LICENSE,
# Citation for the dataset
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
# TODO: This method is tasked with downloading/extracting the data and defining the splits depending on the configuration
# If several configurations are possible (listed in BUILDER_CONFIGS), the configuration selected by the user is in self.config.name
# dl_manager is a datasets.download.DownloadManager that can be used to download and extract URLs
# It can accept any type or nested list/dict and will give back the same structure with the url replaced with path to local files.
# By default the archives will be extracted and a path to a cached folder where they are extracted is returned instead of the archive
my_urls = _URLs["default"]
base_config = self.config.name.split("_")[0]
secondary_config = self.config.name.split(base_config + "_")[1]
data_dir = os.path.join(dl_manager.download_and_extract(my_urls), "IMPPRES", base_config)
return [
datasets.SplitGenerator(
name=secondary_config,
# These kwargs will be passed to _generate_examples
gen_kwargs={
"filepath": os.path.join(data_dir, secondary_config + ".jsonl"),
"split": "test",
},
)
]
def _generate_examples(self, filepath, split):
""" Yields examples. """
# TODO: This method will receive as arguments the `gen_kwargs` defined in the previous `_split_generators` method.
# It is in charge of opening the given file and yielding (key, example) tuples from the dataset
# The key is not important, it's more here for legacy reason (legacy from tfds)
with open(filepath, encoding="utf-8") as f:
for id_, row in enumerate(f):
data = json.loads(row)
if "presupposition" in self.config.name:
# for k, v in data.items():
# print('{}({}): {}'.format(k, type(v), v))
# print('-'*55)
if "trigger1" not in list(data.keys()):
yield id_, {
"premise": data["sentence1"],
"hypothesis": data["sentence2"],
"trigger": data["trigger"],
"trigger1": "Not_In_Example",
"trigger2": "Not_In_Example",
"presupposition": data["presupposition"],
"gold_label": data["gold_label"],
"UID": data["UID"],
"pairID": data["pairID"],
"paradigmID": data["paradigmID"],
}
else:
yield id_, {
"premise": data["sentence1"],
"hypothesis": data["sentence2"],
"trigger": "Not_In_Example",
"trigger1": data["trigger1"],
"trigger2": data["trigger2"],
"presupposition": "Not_In_Example",
"gold_label": data["gold_label"],
"UID": data["UID"],
"pairID": data["pairID"],
"paradigmID": data["paradigmID"],
}
else:
yield id_, {
"premise": data["sentence1"],
"hypothesis": data["sentence2"],
"gold_label_log": data["gold_label_log"],
"gold_label_prag": data["gold_label_prag"],
"spec_relation": data["spec_relation"],
"item_type": data["item_type"],
"trigger": data["trigger"],
"lexemes": data["lexemes"],
}
| 56.910394 | 1,197 | 0.634463 | 11,917 | 0.750535 | 2,666 | 0.167905 | 0 | 0 | 0 | 0 | 9,985 | 0.628858 |
5b201dedf7625f49673a17f90219f4d165f06f5d | 1,322 | py | Python | app.py | juergenpointinger/status-dashboard | 439c7e9b6966ff10ada4062c6b97d5088083f442 | [
"MIT"
]
| null | null | null | app.py | juergenpointinger/status-dashboard | 439c7e9b6966ff10ada4062c6b97d5088083f442 | [
"MIT"
]
| null | null | null | app.py | juergenpointinger/status-dashboard | 439c7e9b6966ff10ada4062c6b97d5088083f442 | [
"MIT"
]
| null | null | null | # Standard library imports
import logging
import os
# Third party imports
import dash
import dash_bootstrap_components as dbc
from flask_caching import Cache
import plotly.io as pio
# Local application imports
from modules.gitlab import GitLab
import settings
# Initialize logging mechanism
logging.basicConfig(level=settings.LOGLEVEL, format=settings.LOGFORMAT)
logger = logging.getLogger(__name__)
gl = GitLab()
logger.info("Current GitLab version: {}".format(GitLab.version))
# App instance
app = dash.Dash(__name__,
suppress_callback_exceptions=True,
external_stylesheets=[dbc.themes.BOOTSTRAP])
app.title = settings.APP_NAME
# App caching
# CACHE_CONFIG = {
# # Note that filesystem cache doesn't work on systems with ephemeral
# # filesystems like Heroku.
# 'CACHE_TYPE': 'filesystem',
# 'CACHE_DIR': 'cache-directory',
# # should be equal to maximum number of users on the app at a single time
# # higher numbers will store more data in the filesystem / redis cache
# 'CACHE_THRESHOLD': 200
# }
CACHE_CONFIG = {
# try 'filesystem' if you don't want to setup redis
'CACHE_TYPE': 'redis',
'CACHE_REDIS_URL': settings.REDIS_URL
}
cache = Cache()
cache.init_app(app.server, config=CACHE_CONFIG)
pio.templates.default = "plotly_dark" | 28.12766 | 77 | 0.729955 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 638 | 0.482602 |
5b20baf76a7bc453b189c49cad4f4c0139f19706 | 5,154 | py | Python | tests/scanner/test_data/fake_retention_scanner_data.py | ogreface/forseti-security | a7a3573183fa1416c605dad683587717795fe13b | [
"Apache-2.0"
]
| null | null | null | tests/scanner/test_data/fake_retention_scanner_data.py | ogreface/forseti-security | a7a3573183fa1416c605dad683587717795fe13b | [
"Apache-2.0"
]
| null | null | null | tests/scanner/test_data/fake_retention_scanner_data.py | ogreface/forseti-security | a7a3573183fa1416c605dad683587717795fe13b | [
"Apache-2.0"
]
| null | null | null | # Copyright 2018 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fake Retention scanner data."""
import json
from datetime import datetime, timedelta
import collections
from google.cloud.forseti.common.gcp_type import organization
from google.cloud.forseti.common.gcp_type import project
from google.cloud.forseti.common.gcp_type import bucket
from google.cloud.forseti.scanner.audit import retention_rules_engine as rre
ORGANIZATION = organization.Organization(
'123456',
display_name='Default Organization',
full_name='organization/123456/',
data='fake_org_data_123456',
)
PROJECT1 = project.Project(
'def-project-1',
project_number=11223344,
display_name='default project 1',
parent=ORGANIZATION,
full_name='organization/123456/project/def-project-1/',
data='fake_project_data_11223344',
)
PROJECT2 = project.Project(
'def-project-2',
project_number=55667788,
display_name='default project 2',
parent=ORGANIZATION,
full_name='organization/123456/project/def-project-2/',
data='fake_project_data_55667788',
)
PROJECT3 = project.Project(
'def-project-3',
project_number=12121212,
display_name='default project 3',
parent=ORGANIZATION,
full_name='organization/123456/project/def-project-3/',
data='fake_project_data_12121212',
)
PROJECT4 = project.Project(
'def-project-4',
project_number=34343434,
display_name='default project 4',
parent=ORGANIZATION,
full_name='organization/123456/project/def-project-4/',
data='fake_project_data_34343434',
)
def build_bucket_violations(bucket, rule_name):
data_lifecycle = bucket.get_lifecycle_rule()
data_lifecycle_str = json.dumps(data_lifecycle, sort_keys=True)
return [rre.RuleViolation(
resource_name='buckets/'+bucket.id,
resource_id=bucket.id,
resource_type=bucket.type,
full_name=bucket.full_name,
rule_index=0,
rule_name=rule_name,
violation_type='RETENTION_VIOLATION',
violation_data=data_lifecycle_str,
resource_data=bucket.data,
)]
class FakeBucketDataCreater():
def __init__(self, id, project):
self._id = id
self._parent = project
self._data_lifecycle = None
def SetLifecycleDict(self):
self._data_lifecycle = {"rule": []}
def AddLifecycleDict(
self,
action=None,
age=None,
created_before=None,
matches_storage_class=None,
num_newer_versions=None,
is_live=None):
if not self._data_lifecycle:
self.SetLifecycleDict()
result = {'action':{}, 'condition':{}}
result['action']['type'] = action
if age != None:
result['condition']['age'] = age
if created_before != None:
result['condition']['createdBefore'] = created_before
if matches_storage_class != None:
result['condition']['matchesStorageClass'] = matches_storage_class
if num_newer_versions != None:
result['condition']['numNewerVersions'] = num_newer_versions
if is_live != None:
result['condition']['isLive'] = is_live
self._data_lifecycle['rule'].append(result)
return result
def get_resource(self):
data_dict = {'id':self._id, 'location':'earth'}
if self._data_lifecycle is not None:
data_dict['lifecycle'] = self._data_lifecycle
data = json.dumps(data_dict)
return bucket.Bucket(bucket_id=self._id,
parent=self._parent,
full_name=self._parent.full_name+'bucket/'+self._id+'/',
data=data)
FakeBucketDataInput = collections.namedtuple(
'FakeBucketDataInput', ['id', 'project', 'lifecycles'])
LifecycleInput = collections.namedtuple(
'LifecycleInput', ['action', 'conditions'])
def get_fake_bucket_resource(fake_bucket_data_input):
data_creater = FakeBucketDataCreater(
fake_bucket_data_input.id, fake_bucket_data_input.project)
for lifecycle in fake_bucket_data_input.lifecycles:
data_creater.AddLifecycleDict(
action=lifecycle.action,
age=lifecycle.conditions.get('age'),
created_before=lifecycle.conditions.get('created_before'),
matches_storage_class=lifecycle.conditions.get('matches_storage_class'),
num_newer_versions=lifecycle.conditions.get('num_newer_versions'),
is_live=lifecycle.conditions.get('is_live'))
return data_creater.get_resource()
| 34.13245 | 85 | 0.684517 | 1,634 | 0.317035 | 0 | 0 | 0 | 0 | 0 | 0 | 1,529 | 0.296663 |
5b22463c2df2d021f347bc17bcb98562b99edb54 | 4,298 | py | Python | libsonyapi/camera.py | BugsForDays/libsonyapi | c6482b4ad90f199b7fb4e344f8e61d4ed0f9466f | [
"MIT"
]
| 13 | 2019-04-19T16:44:58.000Z | 2021-09-20T05:33:10.000Z | libsonyapi/camera.py | BugsForDays/libsonyapi | c6482b4ad90f199b7fb4e344f8e61d4ed0f9466f | [
"MIT"
]
| 3 | 2021-04-23T17:21:50.000Z | 2022-01-06T17:21:28.000Z | libsonyapi/camera.py | BugsForDays/libsonyapi | c6482b4ad90f199b7fb4e344f8e61d4ed0f9466f | [
"MIT"
]
| 5 | 2019-04-11T20:24:47.000Z | 2021-10-17T22:02:56.000Z | import socket
import requests
import json
import xml.etree.ElementTree as ET
class Camera(object):
def __init__(self):
"""
create camera object
"""
self.xml_url = self.discover()
self.name, self.api_version, self.services = self.connect(self.xml_url)
self.camera_endpoint_url = self.services["camera"] + "/camera"
self.available_apis = self.do("getAvailableApiList")["result"]
# prepare camera for rec mode
if "startRecMode" in self.available_apis[0]:
self.do("startRecMode")
self.available_apis = self.do("getAvailableApiList")["result"]
self.connected = False
def discover(self):
"""
discover camera using upnp ssdp method, return url for device xml
"""
msg = (
"M-SEARCH * HTTP/1.1\r\n"
"HOST: 239.255.255.250:1900\r\n"
'MAN: "ssdp:discover" \r\n'
"MX: 2\r\n"
"ST: urn:schemas-sony-com:service:ScalarWebAPI:1\r\n"
"\r\n"
).encode()
# Set up UDP socket
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
s.settimeout(2)
s.sendto(msg, ("239.255.255.250", 1900))
try:
while True:
data, addr = s.recvfrom(65507)
decoded_data = data.decode()
# get xml url from ssdp response
for item in decoded_data.split("\n"):
if "LOCATION" in item:
return item.strip().split(" ")[
1
] # get location url from ssdp response
self.connected = True
except socket.timeout:
raise ConnectionError("you are not connected to the camera's wifi")
def connect(self, xml_url):
"""
returns name, api_version, api_service_urls on success
"""
device_xml_request = requests.get(xml_url)
xml_file = str(device_xml_request.content.decode())
xml = ET.fromstring(xml_file)
name = xml.find(
"{urn:schemas-upnp-org:device-1-0}device/{urn:schemas-upnp-org:device-1-0}friendlyName"
).text
api_version = xml.find(
"{urn:schemas-upnp-org:device-1-0}device/{urn:schemas-sony-com:av}X_ScalarWebAPI_DeviceInfo/{urn:schemas-sony-com:av}X_ScalarWebAPI_Version"
).text
service_list = xml.find(
"{urn:schemas-upnp-org:device-1-0}device/{urn:schemas-sony-com:av}X_ScalarWebAPI_DeviceInfo/{urn:schemas-sony-com:av}X_ScalarWebAPI_ServiceList"
)
api_service_urls = {}
for service in service_list:
service_type = service.find(
"{urn:schemas-sony-com:av}X_ScalarWebAPI_ServiceType"
).text
action_url = service.find(
"{urn:schemas-sony-com:av}X_ScalarWebAPI_ActionList_URL"
).text
api_service_urls[service_type] = action_url
return name, api_version, api_service_urls
def info(self):
"""
returns camera info(name, api version, supported services, available apis) in a dictionary
"""
return {
"name": self.name,
"api version": self.api_version,
"supported services": list(self.services.keys()),
"available apis": self.available_apis,
}
def post_request(self, url, method, param=[]):
"""
sends post request to url with method and param as json
"""
if type(param) is not list:
param = [param]
json_request = {"method": method, "params": param, "id": 1, "version": "1.0"}
request = requests.post(url, json.dumps(json_request))
response = json.loads(request.content)
if "error" in list(response.keys()):
print("Error: ")
print(response)
else:
return response
def do(self, method, param=[]):
"""
this calls to camera service api, require method and param args
"""
# TODO: response handler, return result of do, etc
response = self.post_request(self.camera_endpoint_url, method, param)
return response
class ConnectionError(Exception):
pass
| 37.051724 | 156 | 0.578176 | 4,215 | 0.980689 | 0 | 0 | 0 | 0 | 0 | 0 | 1,578 | 0.367148 |
5b231e5f06d51cf2896d5d0d0db4095473d26007 | 11,961 | py | Python | utility_functions_flu.py | neherlab/treetime_validation | c9760194712396ea5f5c33a9215eddbd3d13bfc1 | [
"MIT"
]
| 4 | 2019-01-28T06:47:48.000Z | 2021-04-22T16:31:37.000Z | utility_functions_flu.py | neherlab/treetime_validation | c9760194712396ea5f5c33a9215eddbd3d13bfc1 | [
"MIT"
]
| 1 | 2020-04-03T14:42:11.000Z | 2020-04-03T14:42:11.000Z | utility_functions_flu.py | neherlab/treetime_validation | c9760194712396ea5f5c33a9215eddbd3d13bfc1 | [
"MIT"
]
| 1 | 2020-03-25T06:58:45.000Z | 2020-03-25T06:58:45.000Z | #!/usr/bin/env python
"""
This module defines functions to facilitate operations with data specific
to Flu trees and alignments.
"""
import numpy as np
from Bio import AlignIO, Phylo
from Bio.Align import MultipleSeqAlignment
import random
import subprocess
import datetime
import os, copy
import matplotlib.pyplot as plt
from scipy.stats import linregress
from collections import Counter
import StringIO
import treetime
from utility_functions_general import remove_polytomies
from utility_functions_beast import run_beast, create_beast_xml, read_beast_log
import xml.etree.ElementTree as XML
from external_binaries import BEAST_BIN
def date_from_seq_name(name):
"""
Parse flu sequence name to the date in numeric format (YYYY.F)
Args:
- name(str): name of the flu sequence.
Returns:
- sequence sampling date if succeeded to parse. None otherwise.
"""
def str2date_time(instr):
"""
Convert input string to datetime object.
Args:
- instr (str): input string. Accepts one of the formats:
{MM.DD.YYYY, MM.YYYY, MM/DD/YYYY, MM/YYYY, YYYY}.
Returns:
- date (datetime.datetime): parsed date object. If the parsing failed,
None is returned
"""
instr = instr.replace('/', '.')
# import ipdb; ipdb.set_trace()
try:
date = datetime.datetime.strptime(instr, "%m.%d.%Y")
except ValueError:
date = None
if date is not None:
return date
try:
date = datetime.datetime.strptime(instr, "%m.%Y")
except ValueError:
date = None
if date is not None:
return date
try:
date = datetime.datetime.strptime(instr, "%Y")
except ValueError:
date = None
return date
try:
date = str2date_time(name.split('|')[3].strip())
return date.year + (date - datetime.datetime(date.year, 1, 1)).days / 365.25
except:
return None
def dates_from_flu_tree(tree):
"""
Iterate over the Flu tree, parse each leaf name and return dates for the
leaves as dictionary.
Args:
- tree(str or Biopython tree): Flu tree
Returns:
- dates(dict): dictionary of dates in format {seq_name: numdate}. Only the
entries which were parsed successfully are included.
"""
if isinstance(tree, str):
tree = Phylo.read(tree, 'newick')
dates = {k.name:date_from_seq_name(k.name) for k in tree.get_terminals()
if date_from_seq_name(k.name) is not None}
return dates
def subtree_with_same_root(tree, Nleaves, outfile, optimize=True):
"""
Sample subtree of the given tree so that the root of the subtree is that of
the original tree.
Args:
- tree(str or Biopython tree): initial tree
- Nleaves(int): number of leaves in the target subtree
- outfile(str): path to save the resulting subtree
optimize(bool): perform branch length optimization for the subtree?
Returns:
- tree(Biopython tree): the subtree
"""
if isinstance(tree, str):
treecopy = Phylo.read(tree, 'newick')
else:
treecopy = copy.deepcopy(tree)
remove_polytomies(treecopy)
assert(len(treecopy.root.clades) == 2)
tot_terminals = treecopy.count_terminals()
# sample to the left of the root
left = treecopy.root.clades[0]
n_left = left.count_terminals()
right = treecopy.root.clades[1]
n_right = right.count_terminals()
n_left_sampled = np.min((n_left, Nleaves * n_left / (n_left + n_right)))
n_left_sampled = np.max((n_left_sampled, 5)) # make sure we have at least one
left_terminals = left.get_terminals()
left_sample_idx = np.random.choice(np.arange(len(left_terminals)), size=n_left_sampled, replace=False)
left_sample = [left_terminals[i] for i in left_sample_idx]
# sample to the right of the root
n_right_sampled = np.min((n_right, Nleaves * n_right / (n_left + n_right)))
n_right_sampled = np.max((n_right_sampled, 5)) # make sure we have at least one
right_terminals = right.get_terminals()
right_sample_idx = np.random.choice(np.arange(len(right_terminals)), size=n_right_sampled, replace=False)
right_sample = [right_terminals[i] for i in right_sample_idx]
for leaf in treecopy.get_terminals():
if leaf not in right_sample and leaf not in left_sample:
treecopy.prune(leaf)
else:
pass
#print ("leaving leaf {} in the tree".format(leaf.name))
if optimize:
import treetime
dates = dates_from_flu_tree(treecopy)
aln = './resources/flu_H3N2/H3N2_HA_2011_2013.fasta'
tt = treetime.TreeAnc(tree=treecopy, aln=aln,gtr='Jukes-Cantor')
tt.optimize_seq_and_branch_len(prune_short=False)
Phylo.write(tt.tree, outfile, 'newick')
return tt.tree
else:
Phylo.write(treecopy, outfile, 'newick')
return treecopy
def subtree_year_vol(tree, N_per_year, outfile):
"""
Sample subtree of the given tree with equal number of samples per year.
Note:
- if there are not enough leaves sampled at a given year, all leaves for this
year will be included in the subtree.
Args:
- tree(str or Biopython object): Initial tree
- N_per_year(int): number of samples per year.
- outfile (str): path to save the subtree
Returns:
- tree(Biopython tree): the subtree
"""
if isinstance(tree, str):
treecopy = Phylo.read(tree, 'newick')
else:
treecopy = copy.deepcopy(tree)
remove_polytomies(treecopy)
dates = dates_from_flu_tree(treecopy)
sample = []
cntr = Counter(map (int, dates.values()))
years = cntr.keys()
min_year = np.min(years)
for year in years:
all_names = [k for k in dates if int(dates[k]) == year]
if len(all_names) <= N_per_year or year == min_year:
sample += all_names
else:
sample += list(np.random.choice(all_names, size=N_per_year, replace=False))
for leaf in treecopy.get_terminals():
if leaf.name not in sample:
treecopy.prune(leaf)
else:
pass
#print ("leaving leaf {} in the tree".format(leaf.name))
Phylo.write(treecopy, outfile, 'newick')
return treecopy
def create_LSD_dates_file_from_flu_tree(tree, outfile):
"""
Parse dates from the flu tree and write to the file in the LSD format.
Args:
- tree(str or Biopython object): Initial tree
- outfile(str): path to save the LSD dates file.
Returns:
- dates(dict): dates parsed from the tree as dictionary.
"""
dates = dates_from_flu_tree(tree)
with open(outfile, 'w') as df:
df.write(str(len(dates)) + "\n")
df.write("\n".join([str(k) + "\t" + str(dates[k]) for k in dates]))
return dates
def make_known_dates_dict(alnfile, dates_known_fraction=1.0):
"""
Read all the dates of the given flu sequences, and make the dates dictionary
for only a fraction of them. The sequences in the resulting dict are chosen
randomly.
"""
aln = AlignIO.read(alnfile, 'fasta')
dates = {k.name: date_from_seq_name(k.name) for k in aln}
# randomly choose the dates so that only the known_ratio number of dates is known
if dates_known_fraction != 1.0:
assert(dates_known_fraction > 0 and dates_known_fraction < 1.0)
knonw_keys = np.random.choice(dates.keys(), size=int (len(dates) * dates_known_fraction), replace=False)
dates = {k : dates[k] for k in knonw_keys}
return dates
def create_treetime_with_missing_dates(alnfile, treefile, dates_known_fraction=1.0):
"""dates = {k.name: date_from_seq_name(k.name) for k in aln}
Create TreeTime object with fraction of leaves having no sampling dates.
The leaves to earse sampling dates are chosen randomly.
Args:
- alnfile(str): path to the flu alignment
- treefiule(str): path to the Flu newixk tree
- dates_known_fraction(float): fraction of leaves, which should have
sampling date information.
"""
aln = AlignIO.read(alnfile, 'fasta')
tt = Phylo.read(treefile, 'newick')
dates = make_known_dates_dict(alnfile, dates_known_fraction)
myTree = treetime.TreeTime(gtr='Jukes-Cantor', tree = treefile,
aln = alnfile, verbose = 4, dates = dates, debug=False)
myTree.optimize_seq_and_branch_len(reuse_branch_len=True, prune_short=True, max_iter=5, infer_gtr=False)
return myTree
def create_subtree(tree, n_seqs, out_file, st_type='equal_sampling'):
"""
Args:
- tree(filename or Biopython tree): original tree
- n_seqs: number of leaves in the resulting subtree
- out_file: output locaton to store the resulting subtree
- st_type: type of the subtree generation algorithm. Available types:
- random: just choose n_leaves randomly
- equal_sampling: choose equal leaves each year (if possible)
- preserve_root: sample from right and left subtrees of the tree root.
The root of the resulting subtree is therefore the same as of the original tree
"""
if isinstance(tree, str):
tree = Phylo.read(tree, 'newick')
pass
def correct_beast_xml_for_missing_dates(config_xml):
def create_leafHeight(strain):
xml_leafHeightParam = XML.Element('parameter')
xml_leafHeightParam.attrib={'id': strain+".height"}
xml_leafHeight = XML.Element('leafHeight')
xml_leafHeight.attrib={"taxon": strain}
xml_leafHeight.append(xml_leafHeightParam)
return xml_leafHeight
def create_leafHeight_operator(strain, weight):
#<parameter idref="A/Yurimaguas/FLU4785/2006.height"/>
xml_param = XML.Element('parameter')
xml_param.attrib = {'idref': strain+'.height'}
#<uniformOperator weight="0.024154589371980676">
xml_operator = XML.Element('uniformOperator')
xml_operator.attrib = {'weight': str(weight)}
xml_operator.append(xml_param)
return xml_operator
def create_taxon_date():
xml_date = XML.Element('date')
xml_date.attrib={'value': '2011', 'direction':"forwards", 'units':"years", 'precision':'4.0'}
return xml_date
xml_treeModel = config_xml.find('treeModel')
xml_operators = config_xml.find('operators')
xml_taxa = config_xml.find('taxa').findall('taxon')
xml_filelog = config_xml.findall('mcmc')[0].findall('log')[np.argmax([k.attrib['id']=='filelog' for k in config_xml .findall('mcmc')[0].findall('log')])]
operator_weight = 1. / np.sum([k.find('date') is None for k in xml_taxa])
#import ipdb; ipdb.set_trace()
for t in xml_taxa:
if t.find('date') is None:
strain = t.attrib['id']
t.append(create_taxon_date())
xml_treeModel.append(create_leafHeight(strain))
xml_operators.append(create_leafHeight_operator(strain, operator_weight))
parameter = XML.Element("parameter")
parameter.attrib = {"idref" : strain+".height"}
xml_filelog.append(parameter)
return config_xml
def run_beast(tree_name, aln_name, dates, beast_prefix, log_post_process=None, template_file="./resources/beast/template_bedford_et_al_2015.xml"):
config_filename = beast_prefix + ".config.xml"
config_xml = create_beast_xml(tree_name, aln_name, dates, beast_prefix, template_file)
config_xml = correct_beast_xml_for_missing_dates(config_xml)
config_xml.write(config_filename)
#print (config_filename)
#return config_xml
call = ["java", "-jar", BEAST_BIN, "-beagle_off", "-overwrite", config_filename]
subprocess.call(call)
if log_post_process is not None:
log_file = beast_prefix + ".log.txt"
log_post_process(log_file)
if __name__ == '__main__':
pass
| 32.239892 | 157 | 0.664995 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,463 | 0.373129 |
5b2402300dbab63aa021dccf15d38bf7417d131e | 90 | py | Python | python/632.smallest-range-covering-elements-from-k-lists.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
]
| null | null | null | python/632.smallest-range-covering-elements-from-k-lists.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
]
| null | null | null | python/632.smallest-range-covering-elements-from-k-lists.py | stavanmehta/leetcode | 1224e43ce29430c840e65daae3b343182e24709c | [
"Apache-2.0"
]
| null | null | null | class Solution:
def smallestRange(self, nums: List[List[int]]) -> List[int]:
| 22.5 | 64 | 0.611111 | 80 | 0.888889 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.