blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
eca0527dc6e1af8eb14ad2e48ab7b65a7961662a | c1a4742ecd23941140b57cfd61759aa3901e0711 | /src/apps/boards/views.py | 68afa2b13811821ff0bc49eb05d9c7d3ea14862e | []
| no_license | aodarc/django-trello | 31a5b6813f5136b427c483c0d329ec8c231888d0 | ee00fc5a71e2a7003118542b6b8caffaa73bc9b8 | refs/heads/master | 2023-06-29T14:42:05.027572 | 2021-07-26T15:28:28 | 2021-07-26T15:28:28 | 389,680,626 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,806 | py | from django.contrib.auth.decorators import login_required
from django.db.models import Count, Prefetch
from django.http import HttpResponseRedirect
# Create your views here.
from django.urls import reverse_lazy
from django.utils.decorators import method_decorator
from django.views import generic
from rest_framework import generics as rest_generic
from apps.boards.models import Board, Comment, Task
from apps.boards.serializers.comment import CommentSerializer
from apps.boards.serializers.tasks import TaskSerializer
from common.permissions import IsOwnerOrReadOnly
class CreateCommentView(generic.CreateView):
model = Comment
fields = ["message"]
template_name = 'boards/create_comment_form.html'
success_url = reverse_lazy('home:home-page')
def form_valid(self, form):
obj = form.save(commit=False)
obj.created_by = self.request.user
obj.task = self.request.user.tasks.last()
obj.save()
return HttpResponseRedirect(self.get_success_url())
class DeleteComment(generic.DeleteView):
model = Comment
success_url = reverse_lazy('home:home-page')
template_name = 'boards/delete_comments.html'
def get_queryset(self):
return super(DeleteComment, self).get_queryset().filter(created_by=self.request.user)
class BoardDetailView(generic.DetailView):
model = Board
context_object_name = 'board'
template_name = 'boards/board-page.html'
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(BoardDetailView, self).dispatch(*args, **kwargs)
def get_queryset(self):
prefetch_tasks = Prefetch(
'cols__tasks',
queryset=Task.objects.select_related('col') \
.prefetch_related('comments')
.annotate(comments_count=Count('comments')) \
.exclude(status=Task.STATUS_ARCHIVED)
)
return super(BoardDetailView, self).get_queryset() \
.select_related('owner') \
.prefetch_related('users', 'cols', prefetch_tasks) \
.filter(users=self.request.user)
class CommentListCreateAPIView(rest_generic.ListCreateAPIView):
queryset = Comment.objects.all()
serializer_class = CommentSerializer
permission_classes = [IsOwnerOrReadOnly]
# def get_queryset(self):
# return self.queryset.filter(create_by=self.request.user)
# def get_serializer_class(self):
# if self.request.version == 'v1':
# return "CommentSerializerV1"
# return CommentSerializer
class TaskListCreateAPIView(rest_generic.ListCreateAPIView):
queryset = Task.objects.select_related('created_by').prefetch_related('comments').all()
serializer_class = TaskSerializer
# permission_classes = [IsOwnerOrReadOnly]
| [
"[email protected]"
]
| |
96c18d0ab5d9ca7292ba91d87de1eb104dda90bd | 69145e4b94bd6225138a57305fc09a1c714ebca7 | /home/migrations/0003_resume.py | d1d56477c33b114530c483f060458b5a44616366 | [
"MIT"
]
| permissive | SimonOkello/portfolio | 09504163b34559af6119a89c7d3368e45025bbaa | 8b2436399ba1d686769a88c87567ed5e86b797a4 | refs/heads/main | 2021-12-02T18:58:22.120534 | 2021-10-10T10:55:05 | 2021-10-10T10:55:05 | 412,837,378 | 0 | 0 | MIT | 2021-10-09T09:20:20 | 2021-10-02T15:35:14 | Python | UTF-8 | Python | false | false | 832 | py | # Generated by Django 3.2.7 on 2021-10-03 16:15
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('home', '0002_service'),
]
operations = [
migrations.CreateModel(
name='Resume',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('c_vitae', models.FileField(upload_to='media')),
('cover_letter', models.FileField(upload_to='media')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
]
| |
b6ef4a8d17124102cba4c340c0adaa7d224bd5c3 | b72b41f8191e44ad4b70355ed2c26ea7feb0e1d0 | /main.py | 904a1e4e5ca193586ed8b1d462e14c5b8b9e4054 | [
"BSD-3-Clause"
]
| permissive | dendisuhubdy/neuron-swarms | 6b25bace21f6116790904cc999e0a9540985251b | ceb8854a580abb825155c362dc2e8f801f950ed0 | refs/heads/master | 2021-08-16T10:39:56.229663 | 2017-11-19T17:05:46 | 2017-11-19T17:05:46 | 110,992,001 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | import numpy as np
import time
from visdom import Visdom
from scipy.integrate import odeint
from swarmalators import Swarmalarator
viz = Visdom(server='http://suhubdy.com', port=51401)
def main():
# Load simulation parameters
a, dt, T, n, L = 1, 0.5, 500, 100, 1 # surprisingly, dt = 0.5 seems to work OK (for prelimiart)
swarms = Swarmalarator(a,dt,T,n,L)
x, y, theta = swarms.solve()
#Plot at end
swarms.scatter_t(x,y,theta,-1)
if __name__=="__main__":
main()
| [
"[email protected]"
]
| |
704b36b47012d436f8ccc0f8667299c1f37979dd | 5d9932a1abeae21b8201368e5cf465680f106761 | /data_ccxt/btcbox.py | 8ade01dad3dba51cdbd5eef1bfcdd3849b2bd56f | []
| no_license | qqzhangjian789/text | 5dc6086e55d8a9494b889fa40cc9730da6bf5940 | 938be0df0a965aacf13cfb942548b8d2a1c7cec0 | refs/heads/master | 2023-05-04T11:38:47.178345 | 2021-05-21T17:44:13 | 2021-05-21T17:44:13 | 286,178,737 | 1 | 6 | null | null | null | null | UTF-8 | Python | false | false | 15,257 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from data_ccxt.base.exchange import Exchange
# -----------------------------------------------------------------------------
try:
basestring # Python 3
except NameError:
basestring = str # Python 2
import json
from data_ccxt.base.errors import ExchangeError
from data_ccxt.base.errors import AuthenticationError
from data_ccxt.base.errors import PermissionDenied
from data_ccxt.base.errors import InsufficientFunds
from data_ccxt.base.errors import InvalidOrder
from data_ccxt.base.errors import OrderNotFound
from data_ccxt.base.errors import DDoSProtection
from data_ccxt.base.errors import InvalidNonce
class btcbox(Exchange):
def describe(self):
return self.deep_extend(super(btcbox, self).describe(), {
'id': 'btcbox',
'name': 'BtcBox',
'countries': ['JP'],
'rateLimit': 1000,
'version': 'v1',
'has': {
'cancelOrder': True,
'CORS': False,
'createOrder': True,
'fetchBalance': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrders': True,
'fetchTicker': True,
'fetchTickers': False,
'fetchTrades': True,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/87327317-98c55400-c53c-11ea-9a11-81f7d951cc74.jpg',
'api': 'https://www.btcbox.co.jp/api',
'www': 'https://www.btcbox.co.jp/',
'doc': 'https://blog.btcbox.jp/en/archives/8762',
'fees': 'https://support.btcbox.co.jp/hc/en-us/articles/360001235694-Fees-introduction',
},
'api': {
'public': {
'get': [
'depth',
'orders',
'ticker',
],
},
'private': {
'post': [
'balance',
'trade_add',
'trade_cancel',
'trade_list',
'trade_view',
'wallet',
],
},
},
'markets': {
'BTC/JPY': {'id': 'btc', 'symbol': 'BTC/JPY', 'base': 'BTC', 'quote': 'JPY', 'baseId': 'btc', 'quoteId': 'jpy', 'taker': 0.05 / 100, 'maker': 0.05 / 100},
'ETH/JPY': {'id': 'eth', 'symbol': 'ETH/JPY', 'base': 'ETH', 'quote': 'JPY', 'baseId': 'eth', 'quoteId': 'jpy', 'taker': 0.10 / 100, 'maker': 0.10 / 100},
'LTC/JPY': {'id': 'ltc', 'symbol': 'LTC/JPY', 'base': 'LTC', 'quote': 'JPY', 'baseId': 'ltc', 'quoteId': 'jpy', 'taker': 0.10 / 100, 'maker': 0.10 / 100},
'BCH/JPY': {'id': 'bch', 'symbol': 'BCH/JPY', 'base': 'BCH', 'quote': 'JPY', 'baseId': 'bch', 'quoteId': 'jpy', 'taker': 0.10 / 100, 'maker': 0.10 / 100},
},
'exceptions': {
'104': AuthenticationError,
'105': PermissionDenied,
'106': InvalidNonce,
'107': InvalidOrder, # price should be an integer
'200': InsufficientFunds,
'201': InvalidOrder, # amount too small
'202': InvalidOrder, # price should be [0 : 1000000]
'203': OrderNotFound,
'401': OrderNotFound, # cancel canceled, closed or non-existent order
'402': DDoSProtection,
},
})
def fetch_balance(self, params={}):
self.load_markets()
response = self.privatePostBalance(params)
result = {'info': response}
codes = list(self.currencies.keys())
for i in range(0, len(codes)):
code = codes[i]
currency = self.currency(code)
currencyId = currency['id']
free = currencyId + '_balance'
if free in response:
account = self.account()
used = currencyId + '_lock'
account['free'] = self.safe_number(response, free)
account['used'] = self.safe_number(response, used)
result[code] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {}
numSymbols = len(self.symbols)
if numSymbols > 1:
request['coin'] = market['baseId']
response = self.publicGetDepth(self.extend(request, params))
return self.parse_order_book(response)
def parse_ticker(self, ticker, market=None):
timestamp = self.milliseconds()
symbol = None
if market is not None:
symbol = market['symbol']
last = self.safe_number(ticker, 'last')
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_number(ticker, 'high'),
'low': self.safe_number(ticker, 'low'),
'bid': self.safe_number(ticker, 'buy'),
'bidVolume': None,
'ask': self.safe_number(ticker, 'sell'),
'askVolume': None,
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': self.safe_number(ticker, 'vol'),
'quoteVolume': self.safe_number(ticker, 'volume'),
'info': ticker,
}
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {}
numSymbols = len(self.symbols)
if numSymbols > 1:
request['coin'] = market['baseId']
response = self.publicGetTicker(self.extend(request, params))
return self.parse_ticker(response, market)
def parse_trade(self, trade, market=None):
timestamp = self.safe_timestamp(trade, 'date')
symbol = None
if market is not None:
symbol = market['symbol']
id = self.safe_string(trade, 'tid')
price = self.safe_number(trade, 'price')
amount = self.safe_number(trade, 'amount')
cost = None
if amount is not None:
if price is not None:
cost = price * amount
type = None
side = self.safe_string(trade, 'type')
return {
'info': trade,
'id': id,
'order': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'type': type,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': None,
}
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {}
numSymbols = len(self.symbols)
if numSymbols > 1:
request['coin'] = market['baseId']
response = self.publicGetOrders(self.extend(request, params))
return self.parse_trades(response, market, since, limit)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'amount': amount,
'price': price,
'type': side,
'coin': market['baseId'],
}
response = self.privatePostTradeAdd(self.extend(request, params))
#
# {
# "result":true,
# "id":"11"
# }
#
return self.parse_order(response, market)
def cancel_order(self, id, symbol=None, params={}):
self.load_markets()
# a special case for btcbox – default symbol is BTC/JPY
if symbol is None:
symbol = 'BTC/JPY'
market = self.market(symbol)
request = {
'id': id,
'coin': market['baseId'],
}
response = self.privatePostTradeCancel(self.extend(request, params))
#
# {"result":true, "id":"11"}
#
return self.parse_order(response, market)
def parse_order_status(self, status):
statuses = {
# TODO: complete list
'part': 'open', # partially or not at all executed
'all': 'closed', # fully executed
'cancelled': 'canceled',
'closed': 'closed', # never encountered, seems to be bug in the doc
'no': 'closed', # not clarified in the docs...
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# {
# "id":11,
# "datetime":"2014-10-21 10:47:20",
# "type":"sell",
# "price":42000,
# "amount_original":1.2,
# "amount_outstanding":1.2,
# "status":"closed",
# "trades":[]
# }
#
id = self.safe_string(order, 'id')
datetimeString = self.safe_string(order, 'datetime')
timestamp = None
if datetimeString is not None:
timestamp = self.parse8601(order['datetime'] + '+09:00') # Tokyo time
amount = self.safe_number(order, 'amount_original')
remaining = self.safe_number(order, 'amount_outstanding')
filled = None
if amount is not None:
if remaining is not None:
filled = amount - remaining
price = self.safe_number(order, 'price')
cost = None
if price is not None:
if filled is not None:
cost = filled * price
# status is set by fetchOrder method only
status = self.parse_order_status(self.safe_string(order, 'status'))
# fetchOrders do not return status, use heuristic
if status is None:
if remaining is not None and remaining == 0:
status = 'closed'
trades = None # todo: self.parse_trades(order['trades'])
symbol = None
if market is not None:
symbol = market['symbol']
side = self.safe_string(order, 'type')
return {
'id': id,
'clientOrderId': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'amount': amount,
'remaining': remaining,
'filled': filled,
'side': side,
'type': None,
'timeInForce': None,
'postOnly': None,
'status': status,
'symbol': symbol,
'price': price,
'stopPrice': None,
'cost': cost,
'trades': trades,
'fee': None,
'info': order,
'average': None,
}
def fetch_order(self, id, symbol=None, params={}):
self.load_markets()
# a special case for btcbox – default symbol is BTC/JPY
if symbol is None:
symbol = 'BTC/JPY'
market = self.market(symbol)
request = self.extend({
'id': id,
'coin': market['baseId'],
}, params)
response = self.privatePostTradeView(self.extend(request, params))
return self.parse_order(response, market)
def fetch_orders_by_type(self, type, symbol=None, since=None, limit=None, params={}):
self.load_markets()
# a special case for btcbox – default symbol is BTC/JPY
if symbol is None:
symbol = 'BTC/JPY'
market = self.market(symbol)
request = {
'type': type, # 'open' or 'all'
'coin': market['baseId'],
}
response = self.privatePostTradeList(self.extend(request, params))
orders = self.parse_orders(response, market, since, limit)
# status(open/closed/canceled) is None
# btcbox does not return status, but we know it's 'open' as we queried for open orders
if type == 'open':
for i in range(0, len(orders)):
orders[i]['status'] = 'open'
return orders
def fetch_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_by_type('all', symbol, since, limit, params)
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
return self.fetch_orders_by_type('open', symbol, since, limit, params)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'] + '/' + self.version + '/' + path
if api == 'public':
if params:
url += '?' + self.urlencode(params)
else:
self.check_required_credentials()
nonce = str(self.nonce())
query = self.extend({
'key': self.apiKey,
'nonce': nonce,
}, params)
request = self.urlencode(query)
secret = self.hash(self.encode(self.secret))
query['signature'] = self.hmac(self.encode(request), self.encode(secret))
body = self.urlencode(query)
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, httpCode, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # resort to defaultErrorHandler
# typical error response: {"result":false,"code":"401"}
if httpCode >= 400:
return # resort to defaultErrorHandler
result = self.safe_value(response, 'result')
if result is None or result is True:
return # either public API(no error codes expected) or success
code = self.safe_value(response, 'code')
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions, code, feedback)
raise ExchangeError(feedback) # unknown message
def request(self, path, api='public', method='GET', params={}, headers=None, body=None):
response = self.fetch2(path, api, method, params, headers, body)
if isinstance(response, basestring):
# sometimes the exchange returns whitespace prepended to json
response = self.strip(response)
if not self.is_json_encoded_object(response):
raise ExchangeError(self.id + ' ' + response)
response = json.loads(response)
return response
| [
"[email protected]"
]
| |
8a56365067845b7b5db72543297923863af7cf25 | 3d792bcf31843a8329f6c9774a8a58a8c49a8a70 | /0x08-python-more_classes/0-rectangle.py | eb68bca78885b8e474dec29c21d2365dde3134f7 | []
| no_license | theurikenneth/alx-higher_level_programming | a6accbe016bdc62ee3f0e849c8e2c847247fb4d9 | 289a08cffa1bcbecab4550b6fed21296cf88fe66 | refs/heads/main | 2023-08-28T08:53:32.676778 | 2021-10-21T06:00:47 | 2021-10-21T06:00:47 | 361,642,411 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 110 | py | #!/usr/bin/python3
"""Contains the rectangle class"""
class Rectangle:
pass
"""A rectangle class"""
| [
"[email protected]"
]
| |
5c0544ab8f5d844f75a21896e5c0928fd3feac1c | 8dd53a5d1820ae5a3efe799381a90c977afd32c4 | /test/functional/wallet_keypool_topup.py | 8e3ca127b17210112557981640e1cf17922daad5 | [
"MIT"
]
| permissive | mulecore/mulecoin | 8b654817a1b78c9e98f96bfef5febaca23347f64 | e52131742938ae433463f32680837981a5cedc0f | refs/heads/master | 2023-03-28T05:37:53.552271 | 2021-03-27T03:22:13 | 2021-03-27T03:22:13 | 351,796,749 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,799 | py | #!/usr/bin/env python3
# Copyright (c) 2017 The Bitcoin Core developers
# Copyright (c) 2017-2019 The Raven Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""
Test HD Wallet keypool restore function.
Two nodes. Node1 is under test. Node0 is providing transactions and generating blocks.
- Start node1, shutdown and backup wallet.
- Generate 110 keys (enough to drain the keypool). Store key 90 (in the initial keypool) and key 110 (beyond the initial keypool). Send funds to key 90 and key 110.
- Stop node1, clear the datadir, move wallet file back into the datadir and restart node1.
- connect node1 to node0. Verify that they sync and node1 receives its funds."""
import shutil
from test_framework.test_framework import MulecoinTestFramework
from test_framework.util import assert_equal, connect_nodes_bi, sync_blocks
class KeypoolRestoreTest(MulecoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.extra_args = [[], ['-keypool=100', '-keypoolmin=20']]
def run_test(self):
self.tmpdir = self.options.tmpdir
self.nodes[0].generate(101)
self.log.info("Make backup of wallet")
self.stop_node(1)
shutil.copyfile(self.tmpdir + "/node1/regtest/wallet.dat", self.tmpdir + "/wallet.bak")
self.start_node(1, self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.log.info("Generate keys for wallet")
addr_oldpool = []
addr_extpool = []
for _ in range(90):
addr_oldpool = self.nodes[1].getnewaddress()
for _ in range(20):
addr_extpool = self.nodes[1].getnewaddress()
self.log.info("Send funds to wallet")
self.nodes[0].sendtoaddress(addr_oldpool, 10)
self.nodes[0].generate(1)
self.nodes[0].sendtoaddress(addr_extpool, 5)
self.nodes[0].generate(1)
sync_blocks(self.nodes)
self.log.info("Restart node with wallet backup")
self.stop_node(1)
shutil.copyfile(self.tmpdir + "/wallet.bak", self.tmpdir + "/node1/regtest/wallet.dat")
self.log.info("Verify keypool is restored and balance is correct")
self.start_node(1, self.extra_args[1])
connect_nodes_bi(self.nodes, 0, 1)
self.sync_all()
assert_equal(self.nodes[1].getbalance(), 15)
assert_equal(self.nodes[1].listtransactions()[0]['category'], "receive")
# Check that we have marked all keys up to the used keypool key as used
assert_equal(self.nodes[1].validateaddress(self.nodes[1].getnewaddress())['hdkeypath'], "m/0'/0'/110'")
if __name__ == '__main__':
KeypoolRestoreTest().main()
| [
"[email protected]"
]
| |
5683033c35209cce456734e560d9bd3c07451980 | b049a961f100444dde14599bab06a0a4224d869b | /sdk/python/pulumi_azure_native/security/v20190801/__init__.py | 6309308fa63d8a40d72ad8c853ec0211cc0f2c9f | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | pulumi/pulumi-azure-native | b390c88beef8381f9a71ab2bed5571e0dd848e65 | 4c499abe17ec6696ce28477dde1157372896364e | refs/heads/master | 2023-08-30T08:19:41.564780 | 2023-08-28T19:29:04 | 2023-08-28T19:29:04 | 172,386,632 | 107 | 29 | Apache-2.0 | 2023-09-14T13:17:00 | 2019-02-24T20:30:21 | Python | UTF-8 | Python | false | false | 459 | py | # coding=utf-8
# *** WARNING: this file was generated by pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
from ... import _utilities
import typing
# Export this package's modules as members:
from ._enums import *
from .device_security_group import *
from .get_device_security_group import *
from .get_iot_security_solution import *
from .iot_security_solution import *
from ._inputs import *
from . import outputs
| [
"[email protected]"
]
| |
8f98c28591c94aca20e7258f94d6fbc06859f0fe | 6b99e6ee32d8885fd5d7501a385f66a2e73c2c56 | /manage.py | 10005c5bca0fe2f20a0568e764a99d680a57eae3 | []
| no_license | Sentret/comics_aggregator | 8e4dd03ac38599091d5996aa93be35137f9a84f8 | 7474dec39d5011495556db23336c63a02c692b20 | refs/heads/master | 2021-05-16T04:14:50.313650 | 2017-10-08T21:09:01 | 2017-10-08T21:09:01 | 105,859,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 815 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "comics_aggregator.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
| [
"[email protected]"
]
| |
879437f3995fc2c8af33708e20e65ea71d787eed | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /benchmark/startQiskit_noisy1998.py | 8ea67d5a234bcee1b81265ef51492d40802ad06f | [
"BSD-3-Clause"
]
| permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,931 | py | # qubit number=4
# total number=32
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.h(input_qubit[3]) # number=16
prog.cz(input_qubit[0],input_qubit[3]) # number=17
prog.h(input_qubit[3]) # number=18
prog.x(input_qubit[3]) # number=14
prog.cx(input_qubit[0],input_qubit[3]) # number=15
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=12
prog.h(input_qubit[3]) # number=29
prog.cz(input_qubit[2],input_qubit[3]) # number=30
prog.h(input_qubit[3]) # number=31
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=24
prog.cz(input_qubit[3],input_qubit[2]) # number=25
prog.h(input_qubit[2]) # number=26
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=8
prog.x(input_qubit[2]) # number=23
prog.h(input_qubit[0]) # number=9
prog.y(input_qubit[2]) # number=10
prog.y(input_qubit[2]) # number=11
prog.x(input_qubit[1]) # number=20
prog.x(input_qubit[1]) # number=21
prog.x(input_qubit[3]) # number=27
prog.x(input_qubit[3]) # number=28
# circuit end
for i in range(n):
prog.measure(input_qubit[i], classical[i])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = FakeVigo()
sample_shot =8000
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_noisy1998.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| [
"[email protected]"
]
| |
5a9fb527004b7c85da090fbd398b277106d50371 | e0c8662a56d89730043146ddc340e9e0b9f7de72 | /plugin/1183fe82-1596.py | 02dad21d9a024d399fccb78fe53c31cad6e8bc1d | []
| no_license | izj007/bugscan_poc | f2ef5903b30b15c230b292a1ff2dc6cea6836940 | 4490f3c36d4033bdef380577333722deed7bc758 | refs/heads/master | 2020-09-22T17:20:50.408078 | 2019-01-18T09:42:47 | 2019-01-18T09:42:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | #coding:utf-8
from lib.curl import *
# -*- coding: utf-8 -*-
"""
POC Name : sgc8000 大型旋转机监控系统报警短信模块泄露
Author : a
mail : [email protected]
refer : 打雷 http://www.wooyun.org/bugs/wooyun-2015-0135197/
波及各大能源公司,包括中石油,中石化,中海油,中煤等等等等全国各个化工能源公司
"""
import urlparse
def assign(service, arg):
if service == 'sgc8000':
arr = urlparse.urlparse(arg)
return True, '%s://%s/' % (arr.scheme, arr.netloc)
def audit(arg):
p ="sg8k_sms/"
url = arg + p
code2, head, res, errcode, _ = curl.curl2(url)
if (code2 ==200) and ('SG8000' in res) and ('getMachineList' in res) and ('cancelSendMessage' in res):
security_warning(url)
if __name__ == '__main__':
from dummy import *
audit(assign('sgc8000', 'http://www.pindodo.com/')[1]) | [
"[email protected]"
]
| |
829b93625b164aec03032b7f6b7d6a98b68afbfb | 4522fc52bc43654aadd30421a75bae00a09044f0 | /isis/itzamara/search_item.py | 8508a4237dc9d83db9719b60b26ba7f43ce31d6f | []
| no_license | qesoalpe/anelys | 1edb8201aa80fedf0316db973da3a58b67070fca | cfccaa1bf5175827794da451a9408a26cd97599d | refs/heads/master | 2020-04-07T22:39:35.344954 | 2018-11-25T05:23:21 | 2018-11-25T05:23:21 | 158,779,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,665 | py | from isis.dialog_search_text import Dialog_Search_Text
from isis.data_model.table import Table
from sarah.acp_bson import Client
class Search_Item(Dialog_Search_Text):
def __init__(self, parent=None):
Dialog_Search_Text.__init__(self, parent)
self.agent_itzamara = None
self.store = None
self.search_by_sku = True
self.search_by_code_ref = True
self.agent_itzamara = Client(Search_Item.APP_ID, 'itzamara')
def searching(self, e):
if self.search_by_sku:
msg = {'type_message': 'find_one', 'type': 'itzamara/item', 'query': {'sku': e['text']}}
answer = self.agent_itzamara(msg)
if 'result' in answer and answer['result'] is not None:
e['selected'] = answer['result']
return
if self.search_by_code_ref:
msg = {'type_message': 'request', 'request_type': 'get', 'get': 'itzamara/item_related_to_code_ref',
'code_ref': e.text}
answer = self.agent_itzamara(msg)
if 'result' in answer and answer.result is not None:
e.selected = answer.result
return
msg = {'type_message': 'find', 'type': 'itzamara/item', 'query': {'description': {'!like': e['text']}}}
if self.store is not None:
msg['query']['store'] = self.store
answer = self.agent_itzamara.send_msg(msg)
e['list'] = answer['result']
table = Table()
e['table'] = table
table.columns.add('sku', str)
table.columns.add('description', str)
table.datasource = e.list
APP_ID = 'isis.itzamara.Search_Item'
| [
"[email protected]"
]
| |
63ede9c176a7066d977459f31d78c2ffed292262 | 9610621437f025aa97f99b67f0a5d8e13bbb715c | /com/vmware/appliance/system_client.py | a257fc441c845dd0d91497dfdb258d5d9af7b588 | [
"MIT"
]
| permissive | adammillerio/vsphere-automation-sdk-python | 2b3b730db7da99f1313c26dc738b82966ecea6ce | c07e1be98615201139b26c28db3aa584c4254b66 | refs/heads/master | 2022-11-20T03:09:59.895841 | 2020-07-17T19:32:37 | 2020-07-17T19:32:37 | 280,499,136 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,323 | py | # -*- coding: utf-8 -*-
#---------------------------------------------------------------------------
# Copyright 2020 VMware, Inc. All rights reserved.
# AUTO GENERATED FILE -- DO NOT MODIFY!
#
# vAPI stub file for package com.vmware.appliance.system.
#---------------------------------------------------------------------------
"""
The ``com.vmware.appliance.system_client`` module provides classes to query the
appliance system information. The module is available starting in vSphere 6.5.
"""
__author__ = 'VMware, Inc.'
__docformat__ = 'restructuredtext en'
import sys
from vmware.vapi.bindings import type
from vmware.vapi.bindings.converter import TypeConverter
from vmware.vapi.bindings.enum import Enum
from vmware.vapi.bindings.error import VapiError
from vmware.vapi.bindings.struct import VapiStruct
from vmware.vapi.bindings.stub import (
ApiInterfaceStub, StubFactoryBase, VapiInterface)
from vmware.vapi.bindings.common import raise_core_exception
from vmware.vapi.data.validator import (UnionValidator, HasFieldsOfValidator)
from vmware.vapi.exception import CoreException
from vmware.vapi.lib.constants import TaskType
from vmware.vapi.lib.rest import OperationRestMetadata
class Storage(VapiInterface):
"""
``Storage`` class provides methods Appliance storage configuration
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.system.storage'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _StorageStub)
self._VAPI_OPERATION_IDS = {}
class StorageMapping(VapiStruct):
"""
The ``Storage.StorageMapping`` class describes the mapping between VCSA
partitions and the Hard disk numbers visible in the vSphere Web Client.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
disk=None,
partition=None,
description=None,
):
"""
:type disk: :class:`str`
:param disk: The disk number in the vSphere Web Client.
When clients pass a value of this class as a parameter, the
attribute must be an identifier for the resource type:
``com.vmware.appliance.system.storage``. When methods return a
value of this class as a return value, the attribute will be an
identifier for the resource type:
``com.vmware.appliance.system.storage``.
:type partition: :class:`str`
:param partition: Storage partition name.
:type description: :class:`com.vmware.vapi.std_client.LocalizableMessage`
:param description: Description of partition. This attribute was added in vSphere API
6.7.
This attribute is optional because it was added in a newer version
than its parent node.
"""
self.disk = disk
self.partition = partition
self.description = description
VapiStruct.__init__(self)
StorageMapping._set_binding_type(type.StructType(
'com.vmware.appliance.system.storage.storage_mapping', {
'disk': type.IdType(resource_types='com.vmware.appliance.system.storage'),
'partition': type.StringType(),
'description': type.OptionalType(type.ReferenceType('com.vmware.vapi.std_client', 'LocalizableMessage')),
},
StorageMapping,
False,
None))
class StorageChange(VapiStruct):
"""
The ``Storage.StorageChange`` class describes the changes in capasity of a
storage partition. This class was added in vSphere API 6.7.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
old_size=None,
new_size=None,
):
"""
:type old_size: :class:`long`
:param old_size: Original size of the partition in MB. This attribute was added in
vSphere API 6.7.
:type new_size: :class:`long`
:param new_size: Nedw size of the partition in MB. This attribute was added in
vSphere API 6.7.
"""
self.old_size = old_size
self.new_size = new_size
VapiStruct.__init__(self)
StorageChange._set_binding_type(type.StructType(
'com.vmware.appliance.system.storage.storage_change', {
'old_size': type.IntegerType(),
'new_size': type.IntegerType(),
},
StorageChange,
False,
None))
def list(self):
"""
Get disk to partition mapping.
:rtype: :class:`list` of :class:`Storage.StorageMapping`
:return: list of mapping items
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('list', None)
def resize(self):
"""
Resize all partitions to 100 percent of disk size.
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('resize', None)
def resize_ex(self):
"""
Resize all partitions to 100 percent of disk size. This method was
added in vSphere API 6.7.
:rtype: :class:`dict` of :class:`str` and :class:`Storage.StorageChange`
:return: List of the partitions with the size before and after resizing
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('resize_ex', None)
class Uptime(VapiInterface):
"""
``Uptime`` class provides methods Get the system uptime.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.system.uptime'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _UptimeStub)
self._VAPI_OPERATION_IDS = {}
def get(self):
"""
Get the system uptime.
:rtype: :class:`float`
:return: system uptime
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class Time(VapiInterface):
"""
``Time`` class provides methods Gets system time.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.system.time'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _TimeStub)
self._VAPI_OPERATION_IDS = {}
class SystemTimeStruct(VapiStruct):
"""
``Time.SystemTimeStruct`` class Structure representing the system time.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
seconds_since_epoch=None,
date=None,
time=None,
timezone=None,
):
"""
:type seconds_since_epoch: :class:`float`
:param seconds_since_epoch: seconds since the epoch
:type date: :class:`str`
:param date: date format: Thu 07-31-2014
:type time: :class:`str`
:param time: time format: 18:18:32
:type timezone: :class:`str`
:param timezone: timezone
"""
self.seconds_since_epoch = seconds_since_epoch
self.date = date
self.time = time
self.timezone = timezone
VapiStruct.__init__(self)
SystemTimeStruct._set_binding_type(type.StructType(
'com.vmware.appliance.system.time.system_time_struct', {
'seconds_since_epoch': type.DoubleType(),
'date': type.StringType(),
'time': type.StringType(),
'timezone': type.StringType(),
},
SystemTimeStruct,
False,
None))
def get(self):
"""
Get system time.
:rtype: :class:`Time.SystemTimeStruct`
:return: System time
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class Version(VapiInterface):
"""
``Version`` class provides methods Get the appliance version.
"""
_VAPI_SERVICE_ID = 'com.vmware.appliance.system.version'
"""
Identifier of the service in canonical form.
"""
def __init__(self, config):
"""
:type config: :class:`vmware.vapi.bindings.stub.StubConfiguration`
:param config: Configuration to be used for creating the stub.
"""
VapiInterface.__init__(self, config, _VersionStub)
self._VAPI_OPERATION_IDS = {}
class VersionStruct(VapiStruct):
"""
``Version.VersionStruct`` class Structure representing appliance version
information.
.. tip::
The arguments are used to initialize data attributes with the same
names.
"""
def __init__(self,
version=None,
product=None,
build=None,
type=None,
summary=None,
releasedate=None,
install_time=None,
):
"""
:type version: :class:`str`
:param version: Appliance version.
:type product: :class:`str`
:param product: Appliance name.
:type build: :class:`str`
:param build: Appliance build number.
:type type: :class:`str`
:param type: Type of product. Same product can have different deployment
options, which is represented by type.
:type summary: :class:`str`
:param summary: Summary of patch (empty string, if the appliance has not been
patched)
:type releasedate: :class:`str`
:param releasedate: Release date of patch (empty string, if the appliance has not been
patched)
:type install_time: :class:`str`
:param install_time: Display the date and time when this system was first installed.
Value will not change on subsequent updates.
"""
self.version = version
self.product = product
self.build = build
self.type = type
self.summary = summary
self.releasedate = releasedate
self.install_time = install_time
VapiStruct.__init__(self)
VersionStruct._set_binding_type(type.StructType(
'com.vmware.appliance.system.version.version_struct', {
'version': type.StringType(),
'product': type.StringType(),
'build': type.StringType(),
'type': type.StringType(),
'summary': type.StringType(),
'releasedate': type.StringType(),
'install_time': type.StringType(),
},
VersionStruct,
False,
None))
def get(self):
"""
Get the version.
:rtype: :class:`Version.VersionStruct`
:return: version information about the appliance
:raise: :class:`com.vmware.vapi.std.errors_client.Error`
Generic error
"""
return self._invoke('get', None)
class _StorageStub(ApiInterfaceStub):
def __init__(self, config):
# properties for list operation
list_input_type = type.StructType('operation-input', {})
list_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
list_input_value_validator_list = [
]
list_output_validator_list = [
]
list_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/system/storage',
path_variables={
},
query_parameters={
}
)
# properties for resize operation
resize_input_type = type.StructType('operation-input', {})
resize_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
resize_input_value_validator_list = [
]
resize_output_validator_list = [
]
resize_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/appliance/system/storage/resize',
path_variables={
},
query_parameters={
}
)
# properties for resize_ex operation
resize_ex_input_type = type.StructType('operation-input', {})
resize_ex_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
resize_ex_input_value_validator_list = [
]
resize_ex_output_validator_list = [
]
resize_ex_rest_metadata = OperationRestMetadata(
http_method='POST',
url_template='/appliance/system/storage?action=resize-ex',
path_variables={
},
query_parameters={
}
)
operations = {
'list': {
'input_type': list_input_type,
'output_type': type.ListType(type.ReferenceType(__name__, 'Storage.StorageMapping')),
'errors': list_error_dict,
'input_value_validator_list': list_input_value_validator_list,
'output_validator_list': list_output_validator_list,
'task_type': TaskType.NONE,
},
'resize': {
'input_type': resize_input_type,
'output_type': type.VoidType(),
'errors': resize_error_dict,
'input_value_validator_list': resize_input_value_validator_list,
'output_validator_list': resize_output_validator_list,
'task_type': TaskType.NONE,
},
'resize_ex': {
'input_type': resize_ex_input_type,
'output_type': type.MapType(type.StringType(), type.ReferenceType(__name__, 'Storage.StorageChange')),
'errors': resize_ex_error_dict,
'input_value_validator_list': resize_ex_input_value_validator_list,
'output_validator_list': resize_ex_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'list': list_rest_metadata,
'resize': resize_rest_metadata,
'resize_ex': resize_ex_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.system.storage',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _UptimeStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/system/uptime',
path_variables={
},
query_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.DoubleType(),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.system.uptime',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _TimeStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/system/time',
path_variables={
},
query_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType(__name__, 'Time.SystemTimeStruct'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.system.time',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class _VersionStub(ApiInterfaceStub):
def __init__(self, config):
# properties for get operation
get_input_type = type.StructType('operation-input', {})
get_error_dict = {
'com.vmware.vapi.std.errors.error':
type.ReferenceType('com.vmware.vapi.std.errors_client', 'Error'),
}
get_input_value_validator_list = [
]
get_output_validator_list = [
]
get_rest_metadata = OperationRestMetadata(
http_method='GET',
url_template='/appliance/system/version',
path_variables={
},
query_parameters={
}
)
operations = {
'get': {
'input_type': get_input_type,
'output_type': type.ReferenceType(__name__, 'Version.VersionStruct'),
'errors': get_error_dict,
'input_value_validator_list': get_input_value_validator_list,
'output_validator_list': get_output_validator_list,
'task_type': TaskType.NONE,
},
}
rest_metadata = {
'get': get_rest_metadata,
}
ApiInterfaceStub.__init__(
self, iface_name='com.vmware.appliance.system.version',
config=config, operations=operations, rest_metadata=rest_metadata,
is_vapi_rest=True)
class StubFactory(StubFactoryBase):
_attrs = {
'Storage': Storage,
'Uptime': Uptime,
'Time': Time,
'Version': Version,
'time': 'com.vmware.appliance.system.time_client.StubFactory',
}
| [
"[email protected]"
]
| |
272af93c538fed750477de3a44e4113b6286c109 | b2d3bd39b2de8bcc3b0f05f4800c2fabf83d3c6a | /examples/pwr_run/checkpointing/throughput/final2_inverse/job38.py | cb01d22cc7b7a1de16176929f6f3423c3617d33e | [
"MIT"
]
| permissive | boringlee24/keras_old | 3bf7e3ef455dd4262e41248f13c04c071039270e | 1e1176c45c4952ba1b9b9e58e9cc4df027ab111d | refs/heads/master | 2021-11-21T03:03:13.656700 | 2021-11-11T21:57:54 | 2021-11-11T21:57:54 | 198,494,579 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,325 | py | """
#Trains a ResNet on the CIFAR10 dataset.
"""
from __future__ import print_function
import keras
from keras.layers import Dense, Conv2D, BatchNormalization, Activation
from keras.layers import AveragePooling2D, Input, Flatten
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.callbacks import ReduceLROnPlateau, TensorBoard
from keras.preprocessing.image import ImageDataGenerator
from keras.regularizers import l2
from keras import backend as K
from keras.models import Model
from keras.datasets import cifar10
from keras.applications.mobilenet_v2 import MobileNetV2
from keras import models, layers, optimizers
from datetime import datetime
import tensorflow as tf
import numpy as np
import os
import pdb
import sys
import argparse
import time
import signal
import glob
import json
import send_signal
parser = argparse.ArgumentParser(description='Tensorflow Cifar10 Training')
parser.add_argument('--tc', metavar='TESTCASE', type=str, help='specific testcase name')
parser.add_argument('--resume', dest='resume', action='store_true', help='if True, resume training from a checkpoint')
parser.add_argument('--gpu_num', metavar='GPU_NUMBER', type=str, help='select which gpu to use')
parser.add_argument('--node', metavar='HOST_NODE', type=str, help='node of the host (scheduler)')
parser.set_defaults(resume=False)
args = parser.parse_args()
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]=args.gpu_num
# Training parameters
batch_size = 64
args_lr = 0.0015
epoch_begin_time = 0
job_name = sys.argv[0].split('.')[0]
save_files = '/scratch/li.baol/checkpoint_final2_inverse/' + job_name + '*'
total_epochs = 83
starting_epoch = 0
# first step is to update the PID
pid = os.getpid()
message = job_name + ' pid ' + str(pid) # 'job50 pid 3333'
send_signal.send(args.node, 10002, message)
if args.resume:
save_file = glob.glob(save_files)[0]
# epochs = int(save_file.split('/')[4].split('_')[1].split('.')[0])
starting_epoch = int(save_file.split('/')[4].split('.')[0].split('_')[-1])
data_augmentation = True
num_classes = 10
# Subtracting pixel mean improves accuracy
subtract_pixel_mean = True
n = 3
# Model name, depth and version
model_type = args.tc #'P100_resnet50_he_256_1'
# Load the CIFAR10 data.
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Normalize data.
x_train = x_train.astype('float32') / 255
x_test = x_test.astype('float32') / 255
# If subtract pixel mean is enabled
if subtract_pixel_mean:
x_train_mean = np.mean(x_train, axis=0)
x_train -= x_train_mean
x_test -= x_train_mean
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
print('y_train shape:', y_train.shape)
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
if args.resume:
print('resume from checkpoint')
message = job_name + ' b_end'
send_signal.send(args.node, 10002, message)
model = keras.models.load_model(save_file)
message = job_name + ' c_end'
send_signal.send(args.node, 10002, message)
else:
print('train from start')
model = models.Sequential()
base_model = MobileNetV2(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
#base_model.summary()
#pdb.set_trace()
model.add(base_model)
model.add(layers.Flatten())
#model.add(layers.BatchNormalization())
#model.add(layers.Dense(128, activation='relu'))
#model.add(layers.Dropout(0.5))
#model.add(layers.BatchNormalization())
#model.add(layers.Dense(64, activation='relu'))
#model.add(layers.Dropout(0.5))
#model.add(layers.BatchNormalization())
model.add(layers.Dense(10, activation='softmax'))
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=args_lr),
metrics=['accuracy'])
#model.summary()
print(model_type)
#pdb.set_trace()
current_epoch = 0
################### connects interrupt signal to the process #####################
def terminateProcess(signalNumber, frame):
# first record the wasted epoch time
global epoch_begin_time
if epoch_begin_time == 0:
epoch_waste_time = 0
else:
epoch_waste_time = int(time.time() - epoch_begin_time)
message = job_name + ' waste ' + str(epoch_waste_time) # 'job50 waste 100'
if epoch_waste_time > 0:
send_signal.send(args.node, 10002, message)
print('checkpointing the model triggered by kill -15 signal')
# delete whatever checkpoint that already exists
for f in glob.glob(save_files):
os.remove(f)
model.save('/scratch/li.baol/checkpoint_final2_inverse/' + job_name + '_' + str(current_epoch) + '.h5')
print ('(SIGTERM) terminating the process')
message = job_name + ' checkpoint'
send_signal.send(args.node, 10002, message)
sys.exit()
signal.signal(signal.SIGTERM, terminateProcess)
#################################################################################
logdir = '/scratch/li.baol/tsrbrd_log/job_runs/' + model_type + '/' + job_name
tensorboard_callback = TensorBoard(log_dir=logdir)#, update_freq='batch')
first_epoch_start = 0
class PrintEpoch(keras.callbacks.Callback):
def on_epoch_begin(self, epoch, logs=None):
global current_epoch, first_epoch_start
#remaining_epochs = epochs - epoch
current_epoch = epoch
print('current epoch ' + str(current_epoch))
global epoch_begin_time
epoch_begin_time = time.time()
if epoch == starting_epoch and args.resume:
first_epoch_start = time.time()
message = job_name + ' d_end'
send_signal.send(args.node, 10002, message)
elif epoch == starting_epoch:
first_epoch_start = time.time()
if epoch == starting_epoch:
# send signal to indicate checkpoint is qualified
message = job_name + ' ckpt_qual'
send_signal.send(args.node, 10002, message)
def on_epoch_end(self, epoch, logs=None):
if epoch == starting_epoch:
first_epoch_time = int(time.time() - first_epoch_start)
message = job_name + ' 1st_epoch ' + str(first_epoch_time)
send_signal.send(args.node, 10002, message)
progress = round((epoch+1) / round(total_epochs/2), 2)
message = job_name + ' completion ' + str(progress)
send_signal.send(args.node, 10002, message)
my_callback = PrintEpoch()
callbacks = [tensorboard_callback, my_callback]
#[checkpoint, lr_reducer, lr_scheduler, tensorboard_callback]
# Run training
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=round(total_epochs/2),
validation_data=(x_test, y_test),
shuffle=True,
callbacks=callbacks,
initial_epoch=starting_epoch,
verbose=1
)
# Score trained model.
scores = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
# send signal to indicate job has finished
message = job_name + ' finish'
send_signal.send(args.node, 10002, message)
| [
"[email protected]"
]
| |
6834426075b03b496ae3de4b06d6f72d73bf5839 | 35a10ea7657fb28b4ae5a95045bc8e715b0b8d1c | /mysite/main/migrations/0005_delete_essaycategory.py | 3baecf7fefd629a67d9303b72c037ffca744b4da | [
"MIT"
]
| permissive | nsky80/editorial | d7c978be4b8b8ea1cec6b764dd2e9860ebdf0867 | e85106e32d5d5ff8b9ac7f140b0c8f67d34b2dc0 | refs/heads/master | 2020-04-29T08:41:57.601027 | 2020-03-05T18:37:02 | 2020-03-05T18:37:02 | 175,995,388 | 2 | 0 | MIT | 2019-08-19T18:29:58 | 2019-03-16T16:20:23 | HTML | UTF-8 | Python | false | false | 299 | py | # Generated by Django 2.2.1 on 2019-07-21 09:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('main', '0004_auto_20190316_0140'),
]
operations = [
migrations.DeleteModel(
name='EssayCategory',
),
]
| [
"[email protected]"
]
| |
67c25e2ceff629da5c3493d2c01f251996768911 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_244/ch1_2019_03_14_17_05_12_929051.py | cfc8916fb07baf0253baedef1f8239e1e722ce14 | []
| no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 75 | py | def calcula_valor_devido(x,y,z):
return VF = x * (1 + z)**y
| [
"[email protected]"
]
| |
35b09022fa3e5caa076b0ac3a5627233549ded43 | 55c552b03a07dcfa2d621b198aa8664d6ba76b9a | /Algorithm/BOJ/4134_다음 소수_s4/4134.py | 7845e2c7cdb25cd978361a4bf6a02925ddce3a0b | []
| no_license | LastCow9000/Algorithms | 5874f1523202c10864bdd8bb26960953e80bb5c0 | 738d7e1b37f95c6a1b88c99eaf2bc663b5f1cf71 | refs/heads/master | 2023-08-31T12:18:45.533380 | 2021-11-07T13:24:32 | 2021-11-07T13:24:32 | 338,107,899 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 709 | py | # boj 4134 다음 소수 s4
# noj.am/4134
for _ in range(int(input())):
n = int(input())
if n <= 1:
print(2)
continue
maxRange = 80000
flag = [False, False] + [True] * (maxRange - 1)
for num in range(2, maxRange + 1):
if flag[num]:
for i in range(num + num, maxRange + 1, num):
flag[i] = False
num = n
while True:
for i in range(int(num ** 0.5) + 1):
if flag[i]:
if num % i == 0:
num += 1
break
else:
print(num)
break
'''
특정 수의 양의 제곱근 이하의 소수들로 나누어 떨어지면 소수x
''' | [
"[email protected]"
]
| |
5dc7334bd95e7f16687b5903ecfc180f29cb6d4a | f6d7c30a7ed343e5fe4859ceaae1cc1965d904b7 | /htdocs/submissions/5dc7334bd95e7f16687b5903ecfc180f29cb6d4a.py | f240b858556dd23d3f4d394931854f0d7c911994 | []
| no_license | pycontest/pycontest.github.io | ed365ebafc5be5d610ff9d97001240289de697ad | 606015cad16170014c41e335b1f69dc86250fb24 | refs/heads/master | 2021-01-10T04:47:46.713713 | 2016-02-01T11:03:46 | 2016-02-01T11:03:46 | 50,828,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | n=' .'
m=' _ .'
e='| |.'
a='|_|.'
r=' |.'
l='| .'
d=' _|.'
s='|_ .'
n=m+e+a,n+r+r,m+d+s,m+d+d,n+a+r,m+s+d,m+s+a,m+r+r,m+a+a,m+a+d
def seven_seg(x):
a=['']*3
for d in x:
l=n[int(d)].split('.')
for z in range(3):a[z]+=l[z]
return '\n'.join(a)+'\n'
| [
"[email protected]"
]
| |
536ff942f90b91a7fb29e3a9076d36b582318420 | f50f1aa1f8f139d546db3230a1cb1f53043fd9e6 | /multimedia/converter/dvdbackup/actions.py | 2d25a92cb98af0288858998d4ff57bf65653cc77 | []
| no_license | pars-linux/corporate2 | 7887961d1552d39bc3b0bef4a60fd3413d9b82bb | 14d1eacfc824fb8d0bff8173e7ac06b36b88d10d | refs/heads/master | 2020-05-26T15:02:12.005654 | 2017-02-27T03:07:14 | 2017-02-27T03:07:14 | 82,476,084 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2007-2009 TUBITAK/UEKAE
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
WorkDir = "dvdbackup"
def build():
autotools.compile("-I/usr/include/dvdread -o dvdbackup src/dvdbackup.c -ldvdread")
def install():
pisitools.dobin("dvdbackup")
| [
"[email protected]"
]
| |
05d70434fa49d8b43242b5bc319959b97b833cbb | f1cb02057956e12c352a8df4ad935d56cb2426d5 | /LeetCode/2402. Meeting Rooms III/Solution.py | e04582cc6433fc14b232d16a3615a444f3a02378 | []
| no_license | nhatsmrt/AlgorithmPractice | 191a6d816d98342d723e2ab740e9a7ac7beac4ac | f27ba208b97ed2d92b4c059848cc60f6b90ce75e | refs/heads/master | 2023-06-10T18:28:45.876046 | 2023-05-26T07:46:42 | 2023-05-26T07:47:10 | 147,932,664 | 15 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,454 | py | MEETING_START = 1
MEETING_END = 0
class Solution:
def mostBooked(self, n: int, meetings: List[List[int]]) -> int:
# Time Complexity: O(M log MN)
# Space Complexity: O(N + M)
events = [(start, MEETING_START, end - start) for start, end in meetings]
pending = []
available_rooms = list(range(n))
heapq.heapify(events)
meeting_cnter = Counter()
while events:
event = heapq.heappop(events)
event_type = event[1]
cur_time = event[0]
if event_type == MEETING_START:
duration = event[2]
if available_rooms:
room = heapq.heappop(available_rooms)
heapq.heappush(events, (cur_time + duration, MEETING_END, room))
meeting_cnter[room] += 1
else:
heapq.heappush(pending, (cur_time, duration))
elif event_type == MEETING_END:
room = event[2]
if pending:
_, duration = heapq.heappop(pending)
heapq.heappush(events, (cur_time + duration, MEETING_END, room))
meeting_cnter[room] += 1
else:
heapq.heappush(available_rooms, room)
ret = 0
for room in range(n):
if meeting_cnter[room] > meeting_cnter[ret]:
ret = room
return ret
| [
"[email protected]"
]
| |
d3b472805b2615dba2cc942d9347ee58fddd00d3 | c3c5e21f02dc1ce325e4ba0ea49f04503b2124e5 | /Code/bigger_nn/plot_data.py | db6d913bed2c04cdfd9179ac0b7baf3b67594253 | []
| no_license | Rootpie-Studios/RL-in-HaliteIV | 5fdd76cc5523deec2847059cc6237d638c2a9881 | 431f35d47b898e68983772f9b908764741347ad5 | refs/heads/master | 2023-06-05T20:21:07.543805 | 2021-06-21T11:18:57 | 2021-06-21T11:18:57 | 378,900,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | import tensorflow as tf
import src.plot as plot
import bigger_nn.conf as conf
user_choice = input('Plot exploit data? y/n \n')
if user_choice == 'y':
folder = conf.get('EXPLOIT_GAMES_FOLDER')
else:
folder = conf.get('GAMES_FOLDER')
try:
model = tf.keras.models.load_model(conf.get('SHIP_MODEL'))
except:
model = conf.get('build_model')()
model.save(conf.get('SHIP_MODEL'), save_format='tf')
plot.plot_progress(10, folder, conf.get('NAME'), conf.get('AGENT2')[:-3], model, conf.get('input_data')) | [
"[email protected]"
]
| |
c12b59a23c758ac14e36e2ed849148850d9a5571 | bc441bb06b8948288f110af63feda4e798f30225 | /capacity_admin_sdk/model/container/pod_status_pb2.py | 6de9ae973841286f0c359c4fb191d12570e42f8d | [
"Apache-2.0"
]
| permissive | easyopsapis/easyops-api-python | 23204f8846a332c30f5f3ff627bf220940137b6b | adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0 | refs/heads/master | 2020-06-26T23:38:27.308803 | 2020-06-16T07:25:41 | 2020-06-16T07:25:41 | 199,773,131 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | true | 4,896 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: pod_status.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from capacity_admin_sdk.model.container import container_status_pb2 as capacity__admin__sdk_dot_model_dot_container_dot_container__status__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='pod_status.proto',
package='container',
syntax='proto3',
serialized_options=_b('ZCgo.easyops.local/contracts/protorepo-models/easyops/model/container'),
serialized_pb=_b('\n\x10pod_status.proto\x12\tcontainer\x1a\x39\x63\x61pacity_admin_sdk/model/container/container_status.proto\"\xbc\x01\n\tPodStatus\x12\r\n\x05phase\x18\x01 \x01(\t\x12\x0f\n\x07message\x18\x02 \x01(\t\x12\x39\n\x15initContainerStatuses\x18\x03 \x03(\x0b\x32\x1a.container.ContainerStatus\x12\x35\n\x11\x63ontainerStatuses\x18\x04 \x03(\x0b\x32\x1a.container.ContainerStatus\x12\x0e\n\x06hostIP\x18\x05 \x01(\t\x12\r\n\x05podIP\x18\x06 \x01(\tBEZCgo.easyops.local/contracts/protorepo-models/easyops/model/containerb\x06proto3')
,
dependencies=[capacity__admin__sdk_dot_model_dot_container_dot_container__status__pb2.DESCRIPTOR,])
_PODSTATUS = _descriptor.Descriptor(
name='PodStatus',
full_name='container.PodStatus',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='phase', full_name='container.PodStatus.phase', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='message', full_name='container.PodStatus.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='initContainerStatuses', full_name='container.PodStatus.initContainerStatuses', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='containerStatuses', full_name='container.PodStatus.containerStatuses', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='hostIP', full_name='container.PodStatus.hostIP', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='podIP', full_name='container.PodStatus.podIP', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=91,
serialized_end=279,
)
_PODSTATUS.fields_by_name['initContainerStatuses'].message_type = capacity__admin__sdk_dot_model_dot_container_dot_container__status__pb2._CONTAINERSTATUS
_PODSTATUS.fields_by_name['containerStatuses'].message_type = capacity__admin__sdk_dot_model_dot_container_dot_container__status__pb2._CONTAINERSTATUS
DESCRIPTOR.message_types_by_name['PodStatus'] = _PODSTATUS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
PodStatus = _reflection.GeneratedProtocolMessageType('PodStatus', (_message.Message,), {
'DESCRIPTOR' : _PODSTATUS,
'__module__' : 'pod_status_pb2'
# @@protoc_insertion_point(class_scope:container.PodStatus)
})
_sym_db.RegisterMessage(PodStatus)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
]
| |
bc054ee3a41b51ba0c02fd50959e5203a9ce1be3 | cf3e0cd574248629ebeacb224fe96d3df19ea9ca | /django_todo_in_team/settings.py | d5bfaa75efdd103703ca23205c24708053e97cc2 | []
| no_license | ashur-k/Team-work-Hub | 34b9d9ec43cca53d11e072fd6a68e831ee6b4795 | 4da991e3166f8650cb24024ede07c485e9ee9dda | refs/heads/master | 2023-03-12T12:19:15.456078 | 2021-03-01T22:01:11 | 2021-03-01T22:01:11 | 340,626,504 | 0 | 0 | null | 2021-02-20T10:42:06 | 2021-02-20T10:29:03 | Shell | UTF-8 | Python | false | false | 3,416 | py | """
Django settings for django_todo_in_team project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+ivqwhb1)y)^hu@1ud@8*t1y&+q2(9+j(x%2^9_wj^sv^zonld'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'todo_in_team.apps.TodoInTeamConfig',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_todo_in_team.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
os.path.join(BASE_DIR, 'templates'),
os.path.join(BASE_DIR, 'templates', 'allauth'),
],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_todo_in_team.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
| [
"[email protected]"
]
| |
e2edf2037288c178e8a0f0e1fa79e543746def5c | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Games/Pygame/pygame-vkeyboard/pygame_vkeyboard/examples/numeric.py | 06131ee38998682cbdc8eb7bbac5590455f99b08 | []
| no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:79af64ea269c1a2a6da43c377fc8ff4650b04b20a967d881019b3c32032044c3
size 1411
| [
"[email protected]"
]
| |
e270bcdfe76d6e527c850b2cd3d2c445f116010a | 50ae1a0b7e0ebe031f3a45193e213fa7384cef23 | /uncertainty_baselines/datasets/places_test.py | ff649aa55888cee6af014940ade99f305bd14fcb | [
"Apache-2.0"
]
| permissive | MarkoOrescanin/uncertainty-baselines | 8634ec2ddbf7d8ca4d342f37b108d3e8787254f8 | 7256ab3b126e1dcc3fc796370e8ce94c688c1520 | refs/heads/main | 2023-06-11T17:32:08.594735 | 2021-07-01T22:15:20 | 2021-07-01T22:15:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 985 | py | # coding=utf-8
# Copyright 2021 The Uncertainty Baselines Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Lint as: python3
"""Tests for Places-365."""
import tensorflow as tf
import uncertainty_baselines as ub
class Places365DatasetTest(ub.datasets.DatasetTest):
def testDatasetSize(self):
super(Places365DatasetTest, self)._testDatasetSize(
ub.datasets.Places365Dataset, (224, 224, 3), validation_percent=0.1)
if __name__ == "__main__":
tf.test.main()
| [
"[email protected]"
]
| |
40a01e3075679851cc169322b9dbbbc9dc892738 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/exclamations/_heeling.py | ffab0ee927e6e8b45a4426cdd4c700dded04cec9 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | py |
from xai.brain.wordbase.exclamations._heel import _HEEL
#calss header
class _HEELING(_HEEL, ):
def __init__(self,):
_HEEL.__init__(self)
self.name = "HEELING"
self.specie = 'exclamations'
self.basic = "heel"
self.jsondata = {}
| [
"[email protected]"
]
| |
2d4172e12adf3d83dd245b7a72488ead42370f77 | 4d675034878c4b6510e1b45b856cc0a71af7f886 | /configs/gcnet/cascade_mask_rcnn_x101_32x4d_fpn_syncbn-backbone_dconv_c3-c5_r16_gcb_c3-c5_1x_coco.py | 50689aadf6cab9414aab1a7a9e72ef8231355e4f | [
"Apache-2.0",
"BSD-2-Clause-Views",
"MIT",
"BSD-2-Clause"
]
| permissive | shinya7y/UniverseNet | 101ebc2ad8f15482ee45ea8d6561aa338a0fa49e | 3652b18c7ce68122dae7a32670624727d50e0914 | refs/heads/master | 2023-07-22T08:25:42.646911 | 2023-07-08T18:09:34 | 2023-07-08T18:09:34 | 263,555,721 | 407 | 58 | Apache-2.0 | 2023-01-27T01:13:31 | 2020-05-13T07:23:43 | Python | UTF-8 | Python | false | false | 390 | py | _base_ = '../dcn/cascade_mask_rcnn_x101_32x4d_fpn_dconv_c3-c5_1x_coco.py'
model = dict(
backbone=dict(
norm_cfg=dict(type='SyncBN', requires_grad=True),
norm_eval=False,
plugins=[
dict(
cfg=dict(type='ContextBlock', ratio=1. / 16),
stages=(False, True, True, True),
position='after_conv3')
]))
| [
"[email protected]"
]
| |
5f5b8c3d8d2cd2aa32541dee22abcced270af05c | 18b9251055f88b6fc28108d2c209d2b71b6b6f5d | /rebnypy/lookups.py | 7c5a5411b6df113baed6e7a21d440d6121db1068 | [
"MIT"
]
| permissive | justinsteffy/rebnypy | 03335509513e4ad3f7cb999723db284b5936cd98 | e1ca47401d1ffc64d7969a73831de8a63a83751b | refs/heads/master | 2020-04-04T05:22:08.026875 | 2016-08-31T03:17:13 | 2016-08-31T03:17:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,867 | py | LOOKUPS = {
"AirConditioning": {
"C":"Central",
"F":"Free Standing",
"M":"Multi-Zone",
"N":"None",
"T":"Through the Wall",
"U":"Unknown Type",
"W":"Window Units",
},
"Borough": {
"BK":"Brooklyn",
"BX":"Bronx",
"NY":"Manhattan",
"QN":"Queens",
"SI":"Staten Island",
},
"BuildingAccess": {
"A":"Attended Elevator",
"E":"Elevator",
"K":"Keyed Elevator",
"N":"None",
"W":"Walk-up",
},
"BuildingAge": {
"O":"Post-war",
"R":"Pre-war",
},
"BuildingType": {
"D":"Development Site",
"F":"Loft",
"G":"Garage",
"H":"High-Rise",
"L":"Low-Rise",
"M":"Mid-Rise",
"O":"Hotel",
"P":"Parking Lot",
"S":"House",
"T":"Townhouse",
"V":"Vacant Lot",
},
"Heat": {
"B":"Baseboard",
"C":"Central",
"E":"Electric",
"G":"Gas",
"M":"Multi-Zone",
"O":"Oil",
"R":"Radiator",
"U":"Unknown Type",
},
"LeaseTerm": {
"1":"One Year",
"2":"Two Year",
"3":"Short-term",
"4":"Month-to-month",
"5":"Specific term",
"6":"One or Two year",
"7":"Short or Long term",
},
"LeaseType": {
"B":"Stabilized Lease",
"C":"Commercial",
"N":"Non-Stabilized Lease",
"On-Line":"Residential, Inc | IDX API documentation v1.0 | Published 11/01/2014 | Page 27 of 29",
"S":"Stabilized Sublease",
"U":"Non-Stabilized Sublease",
},
# Docs say ListingStatus, but the data is actually Status. So I'm duplicating this lookup here
"Status": {
"A":"Active",
"B":"Board Approved",
"C":"Contract Signed",
"E":"Leases Signed",
"H":"TOM",
"I":"POM",
"J":"Exclusive Expired",
"L":"Leases Out",
"O":"Contract Out",
"P":"Offer Accepted/Application",
"R":"Rented",
"S":"Sold",
},
"ListingStatus": {
"A":"Active",
"B":"Board Approved",
"C":"Contract Signed",
"E":"Leases Signed",
"H":"TOM",
"I":"POM",
"J":"Exclusive Expired",
"L":"Leases Out",
"O":"Contract Out",
"P":"Offer Accepted/Application",
"R":"Rented",
"S":"Sold",
},
"ListingStatusRental": {
"A":"Active",
"E":"Leases Signed",
"H":"TOM",
"I":"POM",
"J":"Exclusive Expired",
"L":"Leases Out",
"P":"Application",
"R":"Rented",
},
"ListingStatusSale": {
"A":"Active",
"B":"Board Approved",
"C":"Contract Signed",
"H":"TOM",
"I":"POM",
"J":"Exclusive Expired",
"O":"Contract Out",
"P":"Offer Accepted",
"S":"Sold",
},
"ListingType": {
"A":"Ours Alone",
"B":"Exclusive",
"C":"COF",
"L":"Limited",
"O":"Open",
"Y":"Courtesy",
"Z":"Buyer's Broker",
},
"MediaType": {
"F":"Floor plan",
"I":"Interior Photo",
"M":"Video",
"O":"Other",
"V":"Virtual Tour",
},
"Ownership": {
"C":"Commercial",
"D":"Condop",
"G":"Garage",
"I":"Income Property",
"M":"Multi-Family",
"N":"Condo",
"P":"Co-op",
"R":"Rental Property",
"S":"Single Family",
"T":"Institutional",
"V":"Development Site",
"X":"Mixed Use",
},
"PayPeriod": {
"M":"Monthly",
"Y":"Yearly",
},
"PetPolicy": {
"A":"Pets Allowed",
"C":"Case By Case",
"D":"No Dogs",
"N":"No Pets",
"T":"No Cats",
},
"SalesOrRent": {
"R":"Apartment for Rent",
"S":"Apartment for Sale",
"T":"Building for Sale",
},
"ServiceLevel": {
"A":"Attended Lobby",
"C":"Concierge",
"F":"Full Time Doorman",
"I":"Voice Intercom",
"N":"None",
"P":"Part Time Doorman",
"S":"Full Service",
"U":"Virtual Doorman",
"V":"Video Intercom",
}
}
def expand_row(row):
output = {}
for k, v in row.items():
if k in LOOKUPS:
output[k] = LOOKUPS[k].get(v, 'UNKNOWN')
elif hasattr(v, 'items'):
output[k] = expand_row(v)
else:
output[k] = v
return output
| [
"[email protected]"
]
| |
22ca343e3f7395a467d41262e0894c3079afe3eb | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_9542.py | 377c17d361a2194f088528cf78b28ae16b57ab04 | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 175 | py | # Python: getting lowest integer in list of tuples
>>> nums = [(), (), ('24', '25', '26', '27'), (), (), (), ()]
>>> min(int(j) for i in nums for j in i)
24
| [
"[email protected]"
]
| |
16b4b2d17bcc535c3735614aee383f6eb07d2f39 | c35d2b782a2626e16d72e03902cb37ab8de5548b | /ChikluFood/ChikluFood/settings.py | 18ee419de5096165b1beedb3363419508c174314 | []
| no_license | Sanketdave12/CHIKLU-FOOD | 6139af05138afe5e4322fcb47d167baf22c548a0 | a3fa7e781414f55e6629660771ed2b62c107c8e8 | refs/heads/master | 2023-03-12T03:54:07.015027 | 2021-02-27T14:38:52 | 2021-02-27T14:38:52 | 342,878,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,145 | py | """
Django settings for ChikluFood project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_5g5s1*^e+gmrldh*1lg7@eowde)!^3l69xe_j1l*@4mkx@pw)'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'ui.apps.UiConfig',
'api.apps.ApiConfig',
'rest_framework'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'ChikluFood.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'ChikluFood.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
b0839533fd268a50e8230932fdb9563bcd2b8a32 | 264ff719d21f2f57451f322e9296b2f55b473eb2 | /gvsoc/gvsoc/models/pulp/chips/oprecompkw/apb_soc.py | 3b5776e7f50d951a051a5409a2b768e0d1b931f0 | [
"Apache-2.0"
]
| permissive | knmcguire/gap_sdk | 06c9537c16fa45dea6b7f5c6b162b53953262915 | 7b0a09a353ab6f0550793d40bd46e98051f4a3d7 | refs/heads/master | 2020-12-20T06:51:19.580497 | 2020-01-21T14:52:28 | 2020-01-21T14:52:28 | 235,992,961 | 0 | 0 | Apache-2.0 | 2020-01-24T11:45:59 | 2020-01-24T11:45:58 | null | UTF-8 | Python | false | false | 786 | py | #
# Copyright (C) 2018 ETH Zurich and University of Bologna
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Authors: Germain Haugou, ETH ([email protected])
import vp_core as vp
class component(vp.component):
implementation = 'pulp.chips/oprecompkw/apb_soc_impl'
| [
"[email protected]"
]
| |
9f6014f724cb1cccfd86b1c42cd8bece2474f0e8 | d039bfad0e8cc3184b8112e23f9a1ef06b4001d3 | /map_motif_space.py | 4b0987e9c71f09552ac5e8da5b6151b9d3611ae0 | []
| no_license | vhsvhs/prob_motif | 9afa93f8f3c922103be77052641902c105fe4f16 | 7bdc2485ead23c7d092cc89d3975b37c52c31135 | refs/heads/master | 2021-01-01T20:48:55.052391 | 2012-05-29T22:48:04 | 2012-05-29T22:48:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,139 | py | #
# Input: a directory of mlib files
#
# Output: a graph where node size = # of motifs bounds by a PWM, edge weight = # of motifs jointly bound by two PWMs
#
from argparser import *
import matplotlib.pyplot as plt
import networkx as nx
import os
ap = ArgParser(sys.argv)
def build_mlib_hash(genes_files):
"""genes_files[gene] = path to mlib"""
"""Returns ret[gene] = list of motifs"""
ret = {}
for gene in genes_files.keys():
ret[gene] = []
f = genes_files[gene]
fin = open(f, "r")
lines = fin.readlines()
fin.close()
for l in lines:
if l.__len__() > 2 and False == l.startswith("#"):
ret[gene].append( l.strip() )
#print gene, ret[gene]
return ret
def get_mlib_files(dirpath):
"""Input: directory path, output = list of mlib files."""
mlib_files = {}
for f in os.listdir( dirpath ):
if f.__contains__("mlib"):
tokens = f.split(".")
gene = tokens[1]
mlib_files[gene] = dirpath + "/" + f
return mlib_files
def plot_mlib_distribution(tf_m):
mliblens = []
for tf in tf_m.keys():
mliblens.append( tf_m[tf].__len__() )
plt.hist(mliblens, 20)
plt.show()
def print_mlib_stats( tf_m ):
# Build a reverse lookup hash
mlen_tf = {}
for tf in tf_m.keys():
mlen = tf_m[tf].__len__()
if mlen not in mlen_tf:
mlen_tf[mlen] = []
mlen_tf[mlen].append( tf )
mlen_sorted = mlen_tf.keys()
mlen_sorted.sort()
print "\n. Motif Library Details:"
print "[N motifs]\t[tfs]"
for mlen in mlen_sorted:
print mlen, "\t", mlen_tf[mlen]
def intersect(a, b):
return list(set(a) & set(b))
def plot_motif_space(tf_m):
print "\n. Plotting Motif Space..."
G = nx.Graph()
for tf in tf_m.keys():
G.add_node(tf, size=1.0*tf_m[tf].__len__())
tfs = tf_m.keys()
for i in range(0, tfs.__len__()):
for j in range(i+1, tfs.__len__()):
x = intersect(tf_m[ tfs[i] ], tf_m[ tfs[j] ]).__len__()
if x > 0:
print tfs[i], tfs[j], x
G.add_edge(tfs[i], tfs[j], weight=0.1*x)
plt.figure(figsize=(8,8))
pos=nx.spring_layout(G,iterations=20)
nodesize=[]
for v in G.node:
nodesize.append(G.node[v]["size"])
nx.draw_networkx_nodes(G, pos, node_size=nodesize, node_color="blue", alpha=0.5, linewidths=0.1)
for e in G.edges():
#print e
edgewidth = [ G.get_edge_data(e[0],e[1])["weight"] ]
this_edge = [ e ]
#print this_edge, edgewidth
#print [(pos[e[0]],pos[e[1]]) for e in this_edge]
nx.draw_networkx_edges(G, pos, edgelist = this_edge, width = edgewidth)
nx.draw_networkx_labels(G, pos, font_size=9, font_family="Helvetica")
plt.show()
#
#
# MAIN:
#
#
mlib_dir = ap.getOptionalArg("--mlibdir")
if mlib_dir != False:
mlib_files = get_mlib_files(mlib_dir)
tf_m = build_mlib_hash(mlib_files)
plot_mlib_distribution( tf_m )
print_mlib_stats( tf_m )
plot_motif_space( tf_m )
| [
"[email protected]"
]
| |
cd27c38ac0da5b55f53fe18973011869bb0c24fd | 7a043d45cf0ed0938a10a03121c2b75fdd0cc76a | /081/p081.py | dd354f637d511c2ffdc9af4ac4929a3218868b0c | []
| no_license | tormobr/Project-euler | f8d67292a6426ffba9d589d01c31e2d59249e4ff | b544540b0fee111a6f6cfe332b096fe1ec88935c | refs/heads/master | 2020-05-29T17:27:03.767501 | 2020-02-13T13:06:34 | 2020-02-13T13:06:34 | 189,276,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 761 | py | from collections import defaultdict
import time
def solve():
return dynamic()
def create_dict():
d = defaultdict(lambda: [])
for i in range(h):
for j in range(w):
d[(i,j)].append((i+1, j))
d[(i,j)].append((i, j+1))
return d
def dynamic():
for i in range(h-1, -1, -1):
data[h][i] += data[h][i+1]
data[i][w] += data[i+1][w]
for i in range(h-1, -1, -1):
for j in range(w-1, -1, -1):
data[i][j] += min(data[i+1][j], data[i][j+1])
return data[0][0]
def read_file():
return [list(map(int, line.split(","))) for line in open("input.txt").read().strip().split("\n")]
data = read_file()
dist = defaultdict(int)
h = len(data) -1
w = len(data[0]) -1
print(solve())
| [
"[email protected]"
]
| |
68216f6212a047ad3f07031c8093629b15a45287 | c46a3546e58539444e508a97b68cac21e6422baa | /food_order/migrations/0002_auto_20181122_1056.py | 8b876e1ffa6057042762c414128bfa639c38c270 | []
| no_license | ahsanhabib98/Food-service-system | 7b21b9bd3d2f7db381bc01689c6a23d3b16bb933 | 5bbc50e375d1af8c551b1048f2c6504505ac0cf4 | refs/heads/master | 2022-12-11T02:35:05.097986 | 2018-11-28T11:19:23 | 2018-11-28T11:19:23 | 159,385,627 | 0 | 0 | null | 2022-12-08T02:27:40 | 2018-11-27T19:07:25 | Python | UTF-8 | Python | false | false | 1,099 | py | # Generated by Django 2.0.5 on 2018-11-22 04:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('food_area', '0001_initial'),
('food_order', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Client',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('client_name', models.CharField(max_length=100)),
('client_image', models.ImageField(upload_to='images')),
('client_contact_no', models.PositiveIntegerField()),
('area', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='food_area.Area')),
],
),
migrations.AddField(
model_name='order',
name='client_info',
field=models.OneToOneField(default=1, on_delete=django.db.models.deletion.CASCADE, to='food_order.Client'),
preserve_default=False,
),
]
| [
"[email protected]"
]
| |
15a9b3847e9f55cac74d29796b5229c70f430981 | 626da446e92b58808a179c4fc23c3de5b457e472 | /inventory/templatetags/inventory_tags.py | 6afb098ccfdf25cede7943eed30a96877ca09a56 | [
"BSD-3-Clause"
]
| permissive | Eraldo/eraldoenergy | 76049cbb06fcc26940b8c004875f8aefbf65a95e | cb07a7722826924df4d416e8930c87f11bec3dd8 | refs/heads/master | 2020-12-23T17:43:21.683449 | 2018-05-05T18:12:43 | 2018-05-05T18:12:43 | 44,062,390 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | from classytags.helpers import InclusionTag
from django import template
from django.utils.translation import ugettext_lazy as _
register = template.Library()
@register.tag
class Item(InclusionTag):
name = 'item'
template = 'inventory/widgets/item.html'
def get_context(self, context, **kwargs):
item = context.get('item')
if item:
return {
'name': item,
'image': item.image_1,
'price': item.price,
'original_price': item.price_original,
'url': item.url,
'id': item.pk,
}
else:
return {}
| [
"[email protected]"
]
| |
94730257260c0e6d4e04e0b65fa5129689586ecd | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/OperationTaskDTO.py | 989736892e65777dcec20f04f1a0c7083adda82e | [
"Apache-2.0"
]
| permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 6,284 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class OperationTaskDTO(object):
def __init__(self):
self._comment = None
self._operation_task_id = None
self._operation_task_name = None
self._operation_task_type = None
self._plan_end_time = None
self._plan_start_time = None
self._reject_comment = None
self._status = None
self._status_name = None
self._user_id = None
self._user_name = None
@property
def comment(self):
return self._comment
@comment.setter
def comment(self, value):
self._comment = value
@property
def operation_task_id(self):
return self._operation_task_id
@operation_task_id.setter
def operation_task_id(self, value):
self._operation_task_id = value
@property
def operation_task_name(self):
return self._operation_task_name
@operation_task_name.setter
def operation_task_name(self, value):
self._operation_task_name = value
@property
def operation_task_type(self):
return self._operation_task_type
@operation_task_type.setter
def operation_task_type(self, value):
self._operation_task_type = value
@property
def plan_end_time(self):
return self._plan_end_time
@plan_end_time.setter
def plan_end_time(self, value):
self._plan_end_time = value
@property
def plan_start_time(self):
return self._plan_start_time
@plan_start_time.setter
def plan_start_time(self, value):
self._plan_start_time = value
@property
def reject_comment(self):
return self._reject_comment
@reject_comment.setter
def reject_comment(self, value):
self._reject_comment = value
@property
def status(self):
return self._status
@status.setter
def status(self, value):
self._status = value
@property
def status_name(self):
return self._status_name
@status_name.setter
def status_name(self, value):
self._status_name = value
@property
def user_id(self):
return self._user_id
@user_id.setter
def user_id(self, value):
self._user_id = value
@property
def user_name(self):
return self._user_name
@user_name.setter
def user_name(self, value):
self._user_name = value
def to_alipay_dict(self):
params = dict()
if self.comment:
if hasattr(self.comment, 'to_alipay_dict'):
params['comment'] = self.comment.to_alipay_dict()
else:
params['comment'] = self.comment
if self.operation_task_id:
if hasattr(self.operation_task_id, 'to_alipay_dict'):
params['operation_task_id'] = self.operation_task_id.to_alipay_dict()
else:
params['operation_task_id'] = self.operation_task_id
if self.operation_task_name:
if hasattr(self.operation_task_name, 'to_alipay_dict'):
params['operation_task_name'] = self.operation_task_name.to_alipay_dict()
else:
params['operation_task_name'] = self.operation_task_name
if self.operation_task_type:
if hasattr(self.operation_task_type, 'to_alipay_dict'):
params['operation_task_type'] = self.operation_task_type.to_alipay_dict()
else:
params['operation_task_type'] = self.operation_task_type
if self.plan_end_time:
if hasattr(self.plan_end_time, 'to_alipay_dict'):
params['plan_end_time'] = self.plan_end_time.to_alipay_dict()
else:
params['plan_end_time'] = self.plan_end_time
if self.plan_start_time:
if hasattr(self.plan_start_time, 'to_alipay_dict'):
params['plan_start_time'] = self.plan_start_time.to_alipay_dict()
else:
params['plan_start_time'] = self.plan_start_time
if self.reject_comment:
if hasattr(self.reject_comment, 'to_alipay_dict'):
params['reject_comment'] = self.reject_comment.to_alipay_dict()
else:
params['reject_comment'] = self.reject_comment
if self.status:
if hasattr(self.status, 'to_alipay_dict'):
params['status'] = self.status.to_alipay_dict()
else:
params['status'] = self.status
if self.status_name:
if hasattr(self.status_name, 'to_alipay_dict'):
params['status_name'] = self.status_name.to_alipay_dict()
else:
params['status_name'] = self.status_name
if self.user_id:
if hasattr(self.user_id, 'to_alipay_dict'):
params['user_id'] = self.user_id.to_alipay_dict()
else:
params['user_id'] = self.user_id
if self.user_name:
if hasattr(self.user_name, 'to_alipay_dict'):
params['user_name'] = self.user_name.to_alipay_dict()
else:
params['user_name'] = self.user_name
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = OperationTaskDTO()
if 'comment' in d:
o.comment = d['comment']
if 'operation_task_id' in d:
o.operation_task_id = d['operation_task_id']
if 'operation_task_name' in d:
o.operation_task_name = d['operation_task_name']
if 'operation_task_type' in d:
o.operation_task_type = d['operation_task_type']
if 'plan_end_time' in d:
o.plan_end_time = d['plan_end_time']
if 'plan_start_time' in d:
o.plan_start_time = d['plan_start_time']
if 'reject_comment' in d:
o.reject_comment = d['reject_comment']
if 'status' in d:
o.status = d['status']
if 'status_name' in d:
o.status_name = d['status_name']
if 'user_id' in d:
o.user_id = d['user_id']
if 'user_name' in d:
o.user_name = d['user_name']
return o
| [
"[email protected]"
]
| |
23dbae54366ea14f75cf9be0f657750d526197d8 | 67055c66ae4bca82ee61dab1757b73cc21559cfd | /miko.py | ebef8e8f43676019b4164ec58017c98197ecbf5a | []
| no_license | kevinelong/network_python | 0482f694c9c050f4226bdfb7cc4fe31df26dd17b | 41158808bac5d127c3f3f3cfaea202cb60d7167c | refs/heads/master | 2023-07-14T12:20:14.550017 | 2021-08-26T00:13:45 | 2021-08-26T00:13:45 | 359,521,517 | 1 | 20 | null | 2021-04-28T18:08:15 | 2021-04-19T16:12:03 | Python | UTF-8 | Python | false | false | 2,195 | py | from netmiko import ConnectHandler
import os
os.environ["NET_TEXTFSM"] = "d:/python37/lib/site-packages/ntc_templates/templates"
linux = {
'device_type': 'linux', #cisco_ios
'host': '3.81.60.164',
'username': 'kevin',
'password': 'S!mpl312',
}
c = ConnectHandler(**linux) # use of kwargs optional, could just use regular parameters
raw = c.send_command("arp -a")
print(raw)
r = c.send_command("arp -a", use_textfsm=True)
print(r)
print(r[0]["ip_address"])
for item in r:
print(item)
print(item["ip_address"])
"""
EXPECTED OUTPUT:
[{'rev_dns': '_gateway', 'ip_address': '172.30.1.1', 'mac_address': '0e:18:8d:7f:b8:65', 'hw_type': 'ether', 'interface': 'eth0'}]
"""
# C:\Users\kevin\ntc-templates
# from netmiko import ConnectHandler
# import paramiko
# private_key_path = "~/.ssh/clvrclvr.pem"
# linux = {
# 'device_type': 'linux',
# 'host': 'clvrclvr.com',
# 'username': 'kevin',
# 'password': 'S!mpl312',
# 'pkey' : paramiko.RSAKey.from_private_key_file(private_key_path)
# }
# c = ConnectHandler(**linux) # use of kwargs optional, could just use regular parameters
# r = c.send_command("arp -a")
#SHOW COMMAND OUTPUT
#show platform diag
"""
Chassis type: ASR1004
Slot: R0, ASR1000-RP1
Running state : ok, active
Internal state : online
Internal operational state : ok
Physical insert detect time : 00:00:45 (2w5d ago)
Software declared up time : 00:00:45 (2w5d ago)
CPLD version : 07062111
Firmware version : 12.2(33r)XNC
Slot: F0, ASR1000-ESP10
Running state : ok, active
Internal state : online
Internal operational state : ok
Physical insert detect time : 00:00:45 (2w5d ago)
Software declared up time : 00:03:15 (2w5d ago)
Hardware ready signal time : 00:00:46 (2w5d ago)
Packet ready signal time : 00:04:00 (2w5d ago)
CPLD version : 07091401
Firmware version : 12.2(33r)XNC
Slot: P0, ASR1004-PWR-AC
State : ok
Physical insert detect time : 00:03:08 (2w5d ago)
Slot: P1, ASR1004-PWR-AC
State : ok
Physical insert d
""" | [
"[email protected]"
]
| |
ee39e7c0980af8ab5743db76e6b42c88addd8bd4 | dead81f54b0aa5292f69bb5fef69e9910a137fc4 | /demo/entrypoint.py | d42cf12bc00a567fb61b3167a8116c7fb936cb17 | [
"MIT"
]
| permissive | Nekmo/djangocms-bs3-theme | 0b7274b73b5072cbb8c737f13a94143363ae864d | 1155588414164d6e5d027131e9181856f8a80d5d | refs/heads/master | 2023-01-11T19:58:29.922023 | 2020-03-08T17:10:37 | 2020-03-08T17:10:37 | 56,414,025 | 0 | 0 | MIT | 2022-12-26T20:05:06 | 2016-04-17T01:47:51 | CSS | UTF-8 | Python | false | false | 1,387 | py | #!/usr/bin/env python
import sys
import os
import subprocess
COMMIT_FILE = '.last_build_commit'
os.environ.setdefault('BUILD_DJANGO', '1')
os.environ.setdefault('FORCE_BUILD', '1')
def execute_command(*args):
subprocess.check_call(args)
def get_current_commit():
return subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode('utf-8').strip()
def read_file():
if not os.path.lexists(COMMIT_FILE):
return ''
with open(COMMIT_FILE, 'r') as f:
return f.read().strip('\n')
def write_file(data):
if data is None:
return
with open(COMMIT_FILE, 'w') as f:
return f.write(data)
def build_now():
execute_command('make', 'collectstatic')
# execute_command('./manage.py', 'collectstatic', '--noinput')
execute_command('make', 'migrate')
def build(force_build=False):
current_commit = None
if not force_build:
current_commit = get_current_commit()
if force_build or read_file() != current_commit:
try:
build_now()
except subprocess.CalledProcessError:
exit(1)
else:
write_file(current_commit)
def start(*parameters):
subprocess.check_call(['gunicorn'] + list(parameters))
if __name__ == '__main__':
if os.environ.get('BUILD_DJANGO') == '1':
build(os.environ.get('FORCE_BUILD') == '1')
start(*sys.argv[1:])
| [
"[email protected]"
]
| |
c3acf2f9644f455d0582bdf419bac21f96bab503 | eebacbc58a1c99fb6e32f8cd56cac6e18947d3e7 | /1.python_foundation/2.String_and_encode.py | 266ce0314234e4a8b2c1d08d9bd197a30e5bfb48 | []
| no_license | fzingithub/LearnPythonFromLiao | ad7f959d7e667a464f2b9a6b1cedfd0f08baaf8e | fcb0f2e7f905aca253b3986c4a1ceab6b82b7cae | refs/heads/master | 2020-03-29T19:37:32.831341 | 2018-09-27T10:39:11 | 2018-09-27T10:39:11 | 150,273,212 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 590 | py | # -*- coding: utf-8 -*-
"""
Created on Sat Jan 13 15:25:06 2018
@author: FZ
"""
#string and unicode
#ASCII Unicode 可变长 UTF-8
print ('包含中的string')
print (ord('中'))
print (chr(20013))
#转码
print ('ABC'.encode('ascii'))
print ('中文'.encode('UTF-8'))
#字节流编码
print (b'\xe4\xb8\xad\xe6\x96\x87'.decode('UTF-8'))
print (len('youareabetterman'))
#通配符 excerse
s1 = 72
s2 = 85
rate = (85-72)/72*100
print ('%.1f%%'% rate)
#小结:python使用的是 unicode编码,直接支持多语言
#string 与 byte转换时需要指定编码最常用的是 UTF-8 | [
"[email protected]"
]
| |
2b5c14efee99ffcc5240e049f48d3ac73d1e0b14 | 762b4373122e5cc791eb81759590008bdfd1f034 | /core/models/others/capsnet_em.py | d64ae0859347d99465b89adabc666fd3340a2ac6 | []
| no_license | StephenTaylor1998/high-resolution-capsule | 50929527e84d57704e1295195c6a1b555367e565 | f999b01893bde98eb053d2778e8a1bad526d8293 | refs/heads/master | 2023-05-11T18:25:50.760820 | 2021-05-24T09:47:27 | 2021-05-24T09:47:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,711 | py | import torch
import torch.nn as nn
from core import models
from core.layers.others.base import weights_init, resnet20_backbone
from core.layers.others.layers_em import EmRouting2d
from core.models import resnet18_dwt_tiny_half, resnet18_tiny_half, resnet10_tiny_half
class Model(nn.Module):
def __init__(self, num_classes, planes=16, num_caps=16, depth=3, backbone=resnet18_dwt_tiny_half, caps_size=16,
in_shape=(3, 32, 32)):
super(Model, self).__init__()
self.num_caps = num_caps
self.depth = depth
self.layers = backbone(backbone=True, in_channel=in_shape[0])
self.conv_layers = nn.ModuleList()
self.norm_layers = nn.ModuleList()
# ========= ConvCaps Layers
for d in range(1, depth):
stride = 2 if d == 1 else 1
self.conv_layers.append(EmRouting2d(num_caps, num_caps, caps_size, kernel_size=3, stride=stride, padding=1))
self.norm_layers.append(nn.BatchNorm2d(4 * 4 * num_caps))
final_shape = 4
# EM
self.conv_a = nn.Conv2d(num_caps * planes, num_caps, kernel_size=3, stride=1, padding=1, bias=False)
self.conv_pose = nn.Conv2d(num_caps * planes, num_caps * caps_size, kernel_size=3, stride=1, padding=1, bias=False)
self.bn_a = nn.BatchNorm2d(num_caps)
self.bn_pose = nn.BatchNorm2d(num_caps * caps_size)
self.fc = EmRouting2d(num_caps, num_classes, caps_size, kernel_size=final_shape, padding=0)
self.apply(weights_init)
def forward(self, x):
out = self.layers(x)
# EM
a, pose = self.conv_a(out), self.conv_pose(out)
a, pose = torch.sigmoid(self.bn_a(a)), self.bn_pose(pose)
for m, bn in zip(self.conv_layers, self.norm_layers):
a, pose = m(a, pose)
pose = bn(pose)
a, _ = self.fc(a, pose)
out = torch.mean(a, dim=[2, 3], keepdim=False)
return out
def capsnet_em_depthx1(num_classes=10, args=None, **kwargs):
in_shape = (3, 32, 32) if args.in_shape is None else args.in_shape
backbone = models.__dict__[args.backbone]
return Model(num_classes, depth=1, backbone=backbone, in_shape=in_shape)
def capsnet_em_depthx2(num_classes=10, args=None, **kwargs):
in_shape = (3, 32, 32) if args.in_shape is None else args.in_shape
backbone = models.__dict__[args.backbone]
return Model(num_classes, depth=2, backbone=backbone, in_shape=in_shape)
def capsnet_em_depthx3(num_classes=10, args=None, **kwargs):
in_shape = (3, 32, 32) if args.in_shape is None else args.in_shape
backbone = models.__dict__[args.backbone]
return Model(num_classes, depth=3, backbone=backbone, in_shape=in_shape)
| [
"[email protected]"
]
| |
5b20d25002d847a60df58c8f76a76214777c80ff | 7530867a3f3d80600b1f728b65d778f7b4e3deb0 | /layers/linear.py | 7e903791440ba4262e4e1d8e443136de7d048a95 | [
"MIT"
]
| permissive | rezer0dai/zer0nets | 1fba5895fcb0397ec481b9cdbfa686f7b4cd83e8 | 982fa69571478dc61c6110f3287fad94af6d4f2c | refs/heads/master | 2020-03-24T09:36:23.499160 | 2018-07-28T00:02:08 | 2018-07-28T00:02:08 | 142,632,679 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 201 | py | import numpy as np
from feat_space import *
class Linear(FeatureSpace):
def name(self):
return "linear"
def signal(self, x):
return x
def prime(self, _):
return 1.
| [
"[email protected]"
]
| |
601a58e65541310880e10f036c051e58ddf089e2 | efe3c9ad40200e6a4cc54ade2867e455687eb11b | /home/migrations/0004_message.py | bb97cd54ee190824e8f4994f6e57f1580cb8bcbe | [
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-public-domain"
]
| permissive | andrewhstead/stream-three-project | bec3b70b354b812d1a875ee4e305377038fe179b | 60e5f946455f12019a266b8231737435702ff95e | refs/heads/master | 2023-06-23T17:53:09.379297 | 2023-06-13T16:09:22 | 2023-06-13T16:09:22 | 126,410,294 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 879 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04-23 23:14
from __future__ import unicode_literals
from django.db import migrations, models
import tinymce.models
class Migration(migrations.Migration):
initial = True
dependencies = [
('home', '0003_delete_team'),
]
operations = [
migrations.CreateModel(
name='Message',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('sender', models.CharField(max_length=100)),
('email', models.EmailField(max_length=100)),
('date_sent', models.DateTimeField(auto_now_add=True)),
('subject', models.CharField(max_length=100)),
('message', tinymce.models.HTMLField(blank=True)),
],
),
]
| [
"[email protected]"
]
| |
de5bdd8d7521907a0d02b916dded40acdace4814 | bf99b1b14e9ca1ad40645a7423f23ef32f4a62e6 | /AtCoder/other/日立製作所_社会システム事業部_プログラミングコンテスト2020/c.py | f0dbb2903e9188f925ea9ea87e867040ab1f0e43 | []
| no_license | y-oksaku/Competitive-Programming | 3f9c1953956d1d1dfbf46d5a87b56550ff3ab3db | a3ff52f538329bed034d3008e051f30442aaadae | refs/heads/master | 2021-06-11T16:14:12.635947 | 2021-05-04T08:18:35 | 2021-05-04T08:18:35 | 188,639,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,161 | py | from collections import deque
import sys
input = sys.stdin.buffer.readline
N = int(input())
edges = [[] for _ in range(N)]
for _ in range(N - 1):
fr, to = map(lambda a: int(a) - 1, input().split())
edges[fr].append(to)
edges[to].append(fr)
dist = [10**10] * N
que = deque([(0, 0)])
while que:
now, d = que.popleft()
if dist[now] <= d:
continue
dist[now] = d
for to in edges[now]:
que.append((to, d + 1))
A = [i for i, d in enumerate(dist) if d % 2 == 0]
B = [i for i, d in enumerate(dist) if d % 2 == 1]
if len(A) > len(B):
A, B = B, A
ans = [-1] * N
nums = set(range(1, N + 1))
if len(A) <= N // 3:
mul = 1
for i in A:
ans[i] = 3 * mul
nums.remove(3 * mul)
mul += 1
nums = list(nums)
for i, n in zip(B, nums):
ans[i] = n
else:
mul = 1
for c, i in enumerate(A):
if c * 3 + 1 > N:
ans[i] = mul * 3
mul += 1
else:
ans[i] = c * 3 + 1
for c, i in enumerate(B):
if c * 3 + 2 > N:
ans[i] = mul * 3
mul += 1
else:
ans[i] = c * 3 + 2
print(*ans)
| [
"[email protected]"
]
| |
c3b42d25f9116f1bf61fa704be8f0a121762c825 | 4e097df1d8ee1c864699ce917195aa79e6a78c24 | /backend/purple_fire_27872/urls.py | 6f28f3da85dcdbeecef932b43d280b8980e4bc0d | []
| no_license | crowdbotics-apps/purple-fire-27872 | 2ddbac1b9e0a640e80171d6dea3301de204e2a13 | d630e111e9144b698d3581fc45c0067a1d52c45c | refs/heads/master | 2023-05-15T07:36:11.511788 | 2021-06-09T13:01:26 | 2021-06-09T13:01:26 | 375,356,766 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,229 | py | """purple_fire_27872 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Purple Fire"
admin.site.site_title = "Purple Fire Admin Portal"
admin.site.index_title = "Purple Fire Admin"
# swagger
api_info = openapi.Info(
title="Purple Fire API",
default_version="v1",
description="API documentation for Purple Fire App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| [
"[email protected]"
]
| |
786bbf41efc469014729778a19aca2a7ce6dc054 | c991da8bae5a74dec3e6400ca780206758b9840a | /old/Session002/DynamicProgramming/Triangle.py | 8e8fef6ae6c114c304f3abc1c5d8ea2d824c1bdf | []
| no_license | MaxIakovliev/algorithms | 0503baca3d35c8ad89eca8821c5b2928d805064b | 54d3d9530b25272d4a2e5dc33e7035c44f506dc5 | refs/heads/master | 2021-07-23T02:21:18.443979 | 2021-07-18T08:05:37 | 2021-07-18T08:05:37 | 45,613,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 586 | py | class Solution:
"""
https://leetcode.com/problems/triangle/
solution:
https://leetcode.com/problems/triangle/discuss/38724/7-lines-neat-Java-Solution
"""
def minimumTotal(self, triangle: 'List[List[int]]') -> int:
dp=[0 for i in range(len(triangle)+1)]
for i in range(len(triangle)-1,-1,-1):
for j in range(len(triangle[i])):
dp[j]=min(dp[j],dp[j+1])+triangle[i][j]
return dp[0]
if __name__ == "__main__":
c=Solution()
print(c.minimumTotal([
[2],
[3,4],
[6,5,7],
[4,1,8,3]
]))#11
| [
"[email protected]"
]
| |
f6180d6e48614c2a0d648ee7c5c04d9b51cdd379 | bb311256e15179e929b9fba277e16f67b1e674e5 | /backend/athlete_auction_28818/urls.py | 122f438761b809957bed0a2e6d02e7d31a115685 | []
| no_license | crowdbotics-apps/athlete-auction-28818 | bd14650fcf008eca4132ea44a8064e6d8ef93310 | 457aa0b49b2ac9c2d94e09b7cd6b07ba9a1644d5 | refs/heads/master | 2023-06-16T17:13:45.772189 | 2021-07-13T23:46:46 | 2021-07-13T23:46:46 | 385,762,299 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,253 | py | """athlete_auction_28818 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Athlete Auction"
admin.site.site_title = "Athlete Auction Admin Portal"
admin.site.index_title = "Athlete Auction Admin"
# swagger
api_info = openapi.Info(
title="Athlete Auction API",
default_version="v1",
description="API documentation for Athlete Auction App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
urlpatterns += [path("", TemplateView.as_view(template_name='index.html'))]
urlpatterns += [re_path(r"^(?:.*)/?$",
TemplateView.as_view(template_name='index.html'))]
| [
"[email protected]"
]
| |
696ec13eb480eb65068ec5403f76bb30b5f0a8de | 71f00ed87cd980bb2f92c08b085c5abe40a317fb | /Data/GoogleCloud/google-cloud-sdk/lib/surface/ai_platform/models/list.py | 632e720d7f254e7d84e144a1789781bfd9835dff | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | factoryofthesun/Rao-NLP | 2bd8269a8eed1cb352c14c8fde88e3111ccca088 | 87f9723f5ee51bd21310d58c3425a2a7271ec3c5 | refs/heads/master | 2023-04-18T08:54:08.370155 | 2020-06-09T23:24:07 | 2020-06-09T23:24:07 | 248,070,291 | 0 | 1 | null | 2021-04-30T21:13:04 | 2020-03-17T20:49:03 | Python | UTF-8 | Python | false | false | 1,782 | py | # -*- coding: utf-8 -*- #
# Copyright 2019 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""ai-platform models list command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.ml_engine import models
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.ml_engine import endpoint_util
from googlecloudsdk.command_lib.ml_engine import flags
from googlecloudsdk.command_lib.ml_engine import models_util
from googlecloudsdk.core import resources
_COLLECTION = 'ml.models'
_DEFAULT_FORMAT = """
table(
name.basename(),
defaultVersion.name.basename()
)
"""
def _GetUri(model):
ref = resources.REGISTRY.ParseRelativeName(
model.name, models_util.MODELS_COLLECTION)
return ref.SelfLink()
class List(base.ListCommand):
"""List existing AI Platform models."""
@staticmethod
def Args(parser):
parser.display_info.AddFormat(_DEFAULT_FORMAT)
parser.display_info.AddUriFunc(_GetUri)
flags.GetRegionArg('model').AddToParser(parser)
def Run(self, args):
with endpoint_util.MlEndpointOverrides(region=args.region):
return models_util.List(models.ModelsClient())
| [
"[email protected]"
]
| |
5e643721bee6dc4b37e5b40540e5cf632e766789 | ccf94dcb6b1500fcbbd56964ae8c4832a496b8b3 | /python/baiduads-sdk-auto/baiduads/kr/model/get_kr_file_id_by_words_response_wrapper.py | 928cdd391cc8303ddb0b07ed7ed6057fb0718aae | [
"Apache-2.0"
]
| permissive | baidu/baiduads-sdk | 24c36b5cf3da9362ec5c8ecd417ff280421198ff | 176363de5e8a4e98aaca039e4300703c3964c1c7 | refs/heads/main | 2023-06-08T15:40:24.787863 | 2023-05-20T03:40:51 | 2023-05-20T03:40:51 | 446,718,177 | 16 | 11 | Apache-2.0 | 2023-06-02T05:19:40 | 2022-01-11T07:23:17 | Python | UTF-8 | Python | false | false | 11,702 | py | """
dev2 api schema
'dev2.baidu.com' api schema # noqa: E501
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from baiduads.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from baiduads.exceptions import ApiAttributeError
def lazy_import():
from baiduads.common.model.api_response_header import ApiResponseHeader
from baiduads.kr.model.get_kr_file_id_by_words_response_wrapper_body import GetKRFileIdByWordsResponseWrapperBody
globals()['ApiResponseHeader'] = ApiResponseHeader
globals()['GetKRFileIdByWordsResponseWrapperBody'] = GetKRFileIdByWordsResponseWrapperBody
class GetKRFileIdByWordsResponseWrapper(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'header': (ApiResponseHeader,), # noqa: E501
'body': (GetKRFileIdByWordsResponseWrapperBody,), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'header': 'header', # noqa: E501
'body': 'body', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""GetKRFileIdByWordsResponseWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiResponseHeader): [optional] # noqa: E501
body (GetKRFileIdByWordsResponseWrapperBody): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""GetKRFileIdByWordsResponseWrapper - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
header (ApiResponseHeader): [optional] # noqa: E501
body (GetKRFileIdByWordsResponseWrapperBody): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| [
"[email protected]"
]
| |
8bdab1439c9d2449522735f1c720d674074d966f | a560269290749e10466b1a29584f06a2b8385a47 | /Notebooks/py/trix999/starting-my-journey-here-with-titanic/starting-my-journey-here-with-titanic.py | 15f2cd17c64a621c783a8d96f243d5995897877e | []
| no_license | nischalshrestha/automatic_wat_discovery | c71befad1aa358ae876d5494a67b0f4aa1266f23 | 982e700d8e4698a501afffd6c3a2f35346c34f95 | refs/heads/master | 2022-04-07T12:40:24.376871 | 2020-03-15T22:27:39 | 2020-03-15T22:27:39 | 208,379,586 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,927 | py | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
from sklearn.model_selection import cross_val_score
accuracies = cross_val_score(estimator = classifier, X = X_test,
y = y_test.astype(int), cv = 10, scoring = 'precision')
print("Accuracy mean " + str(accuracies.mean()))
print("Accuracy std " + str(accuracies.std()))
# std seems high
#
# Before changing algorithm, let's try to work on features
#
# *Feature selection* using RFE (recursive feature elimination)
#
#
# In[ ]:
from sklearn.feature_selection import RFE
rfe = RFE(classifier, 6)
rfe = rfe.fit(X_test, y_test.astype(int))
# summarize the selection of the attributes
print(rfe.support_)
print(rfe.ranking_)
# Hello everybody,
#
# this is my first notebook/competition and I hope to have feedbacks about what I'm doing (especially wrong things).
#
# I haven't seen other submissions, as I want to start from scratch and see what I can find
#
# I'm very fascinated by ML and I'm eager to learn as much as possible
#
# Ok, let's start!
#
# Besides the results, what I'll like to do is to establish a correct general workflow helping to work with all datasets
#
# The steps:
#
#
#
# 1) Inspect the data to have a first guess of features, relations, instances quality and draw some graph helping to visualize them
#
# 2) Do some preprocessing (get rid of nan, categorical feature encoding, feature scaling - if necessary)
#
# 3) Further analysis
#
# 4) Build a baseline classifier (Logistic Regression in this case) just to have a starting point
#
# 5) Do features selection and engineering to improve results
#
# 6) Repeat from step 2 with another approach (algorithm, features, etc) until complete satisfaction :)
# In[ ]:
# Importing some libraries
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
# Importing the train dataset from file
dataset = pd.read_csv('../input/train.csv')
#Some info about it
dataset.info()
dataset.isnull().sum()
dataset.describe()
# Let's see what we have
#
# PassengerId: meta
#
# Survived: target
#
# Pclass: feature (seems important, based on position probably)
#
# Name: meta
#
# Sex: feature (not sure how can impact on surviving an iceberg hit :))
#
# Age: feature (maybe target related)
#
# Sibsp, Parch: (seem important, an event happening to all the people in a group)
#
# Fare: maybe related to class
#
# Ticket, Cabin, Embarked: not related, just meta
#
#
# Rows number seems ok respect the features
#
# Age is missing on 20% data, we'll see how to deal it
# In[ ]:
# Let's explore the data visually against the target
survived_pclass = pd.crosstab([dataset.Pclass], dataset.Survived.astype(bool))
survived_pclass.plot(kind='bar', stacked=False, color=['red','blue'], grid=False)
survived_sex = pd.crosstab([dataset.Sex], dataset.Survived.astype(bool))
survived_sex.plot(kind='bar', stacked=False, color=['red','blue'], grid=False)
survived_sibsp = pd.crosstab([dataset.SibSp], dataset.Survived.astype(bool))
survived_sibsp.plot(kind='bar', stacked=False, color=['red','blue'], grid=False)
survived_parch = pd.crosstab([dataset.Parch], dataset.Survived.astype(bool))
survived_parch.plot(kind='bar', stacked=False, color=['red','blue'], grid=False)
plt.show()
# So male, with 3rd class and alone is the victim type
# High SibSp too seems very deadly :(
#
# Ok, time to preprocess for further analysis
# In[ ]:
#get all relevant columns
workingDataset = dataset.iloc[:, [1,2,4,5,6,7,9]]
# get rid of age nan rows (first approach)
workingDataset = workingDataset[np.isfinite(workingDataset['Age'])]
# feature/target selection
workingData = workingDataset.values
X = workingData[:, 1:]
y = workingData[:, 0]
# encoding feature (sex)
from sklearn.preprocessing import LabelEncoder, OneHotEncoder
labelencoder_X = LabelEncoder()
X[:,1] = labelencoder_X.fit_transform(X[:, 1])
onehotencoder = OneHotEncoder(categorical_features = [1])
X = onehotencoder.fit_transform(X).toarray()
# avoid dummy trap
X = X[:, 1:]
from sklearn.preprocessing import StandardScaler
from pandas import DataFrame
sc = StandardScaler()
preprocessedData = sc.fit_transform(X)
# rebuild feature's dataframe with normalized data for graphs purpose
preprocessedDataset = DataFrame(data=preprocessedData)
preprocessedDataset.columns = ['Sex','Pclass', 'Age', 'SibSp', 'Parch', 'Fare']
preprocessedDataset.describe()
# In[ ]:
def rand_jitter(arr):
stdev = .01*(max(arr)-min(arr))
return arr + np.random.randn(len(arr)) * stdev
colors = np.where(dataset.Survived == 1, 'blue', 'red')
plt.scatter(x=rand_jitter(dataset.Parch), y=rand_jitter(dataset.SibSp), c = colors)
plt.xlabel('Parch')
plt.ylabel('SibSp')
# In[ ]:
plt.scatter(x=rand_jitter(preprocessedDataset.Age), y=rand_jitter(preprocessedDataset.Fare), c = colors)
plt.xlabel('Age')
plt.ylabel('Fare')
# In[ ]:
plt.boxplot(preprocessedData)
plt.xlabel("Attribute Index")
plt.ylabel(("Quartile Ranges - Normalized "))
# In[ ]:
#parallel coordinates
nRows = len(preprocessedDataset.index)
nCols = len(preprocessedDataset.columns)
nDataCol = nCols
for i in range(nRows):
#assign color based on "1" or "0" labels
if y[i] == 1: #survived
pcolor = "blue"
else:
pcolor = "red"
#plot rows of data as if they were series data
dataRow = preprocessedDataset.iloc[i,0:nDataCol]
dataRow.plot(color=pcolor, alpha=0.5)
plt.xlabel("Attribute Index")
plt.ylabel(("Attribute Values"))
plt.show()
#
# Low correlation betwen features
# Fare with some outliers, age should be ok...let's have confirmation with probplots
# In[ ]:
import scipy.stats as stats
import pylab
col = 5
colData = []
for row in X:
colData.append(float(row[col]))
stats.probplot(colData, dist="norm", plot=pylab)
pylab.show()
col = 2
colData = []
for row in X:
colData.append(float(row[col]))
stats.probplot(colData, dist="norm", plot=pylab)
pylab.show()
# In[ ]:
corMat = DataFrame(preprocessedDataset.corr())
#visualize correlations using heatmap
plt.pcolor(corMat)
plt.show()
# Correlation is low
#
# Time to build baseline classifier with Logistic Regression and simple split
# In[ ]:
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(preprocessedData, y,
test_size = 0.25, random_state = 0)
y_test = y_test.astype(int)
y_train = y_train.astype(int)
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression(random_state = 0)
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
import seaborn as sn
sn.heatmap(cm, annot=True)
# mmm I'm sure can be better...
#
# Let's check the accuracy doing k-fold cross validation
# In[ ]:
from sklearn.model_selection import cross_val_score
accuracy = cross_val_score(estimator = classifier, X = X_test,
y = y_test, cv = 10, scoring = 'accuracy')
print("Accuracy: %0.2f (+/- %0.2f)" % (accuracy.mean(), accuracy.std() * 2))
# std seems high
#
# Before changing algorithm, let's try to work on features
#
# *Feature selection* using RFE (recursive feature elimination)
#
#
# In[ ]:
from sklearn.feature_selection import RFE
rfe = RFE(classifier, 6)
rfe = rfe.fit(X_test, y_test)
# summarize the selection of the attributes
print(rfe.support_)
print(rfe.ranking_)
# Feature engineering using PCA
#
# (but should not work given the result of RFE)
# In[ ]:
from sklearn.decomposition import PCA
pca = PCA(n_components = 2)
X_train_pca = pca.fit_transform(X_train)
X_test_pca = pca.transform(X_test)
explained_variance = pca.explained_variance_ratio_
# Fitting Logistic Regression to the Training set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression(random_state = 0)
classifier.fit(X_train_pca, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test_pca)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
sn.heatmap(cm, annot=True)
accuracy = cross_val_score(estimator = classifier, X = X_test_pca,
y = y_test, cv = 10, scoring = 'accuracy')
print("Accuracy: %0.2f (+/- %0.2f)" % (accuracy.mean(), accuracy.std() * 2))
# In[ ]:
from matplotlib.colors import ListedColormap
X_set, y_set = X_test_pca, y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'blue')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'blue'))(i), label = j)
plt.title('Logistic Regression (Test set)')
plt.xlabel('PC1')
plt.ylabel('PC2')
plt.legend()
plt.show()
# Let's try LDA
# In[ ]:
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis as LDA
lda = LDA(n_components = 2)
X_train_lda = lda.fit_transform(X_train, y_train)
X_test_lda = lda.transform(X_test)
# Fitting Logistic Regression to the Training set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression(random_state = 0)
classifier.fit(X_train_lda, y_train)
# Predicting the Test set results
y_pred_lda = classifier.predict(X_test_lda)
# Making the Confusion Matrix
cm = confusion_matrix(y_test, y_pred_lda)
sn.heatmap(cm, annot=True)
accuracy = cross_val_score(estimator = classifier, X = X_test_lda,
y = y_test, cv = 10, scoring = 'accuracy')
print("Accuracy: %0.2f (+/- %0.2f)" % (accuracy.mean(), accuracy.std() * 2))
# ok, let's finish with kernel-pca using not linear approach
# In[ ]:
from sklearn.decomposition import KernelPCA
kpca = KernelPCA(n_components = 5, kernel = 'rbf')
X_train_kpca = kpca.fit_transform(X_train)
X_test_kpca = kpca.transform(X_test)
# Fitting Logistic Regression to the Training set
from sklearn.linear_model import LogisticRegression
classifier = LogisticRegression(random_state = 0)
classifier.fit(X_train_kpca, y_train)
# Predicting the Test set results
y_pred_kpca = classifier.predict(X_test_kpca)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred_kpca)
sn.heatmap(cm, annot=True)
accuracy = cross_val_score(estimator = classifier, X = X_test_kpca,
y = y_test, cv = 10, scoring = 'accuracy')
print("Accuracy: %0.2f (+/- %0.2f)" % (accuracy.mean(), accuracy.std() * 2))
| [
"[email protected]"
]
| |
0ff085f57b4a9657055b933dc0bfe0597fef0fa4 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /QFXMcwaQZ8FTAuEtg_12.py | c3acc86a9fb4cf4cf4d78a239f5630f30554b163 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 548 | py | """
Create a function that takes a single character as an argument and returns the
char code of its lowercased / uppercased counterpart.
### Examples
Given that:
- "A" char code is: 65
- "a" char code is: 97
counterpartCharCode("A") ➞ 97
counterpartCharCode("a") ➞ 65
### Notes
* The argument will always be a single character.
* Not all inputs will have a counterpart (e.g. numbers), in which case return the inputs char code.
"""
def counterpartCharCode(char):
return (ord(char.swapcase()))
| [
"[email protected]"
]
| |
4d38fc618d7a2428148d7e7d91a47a693b58017c | 7becaa0c899fb97edcf03b90ca019618ba89deca | /test/test_simple.py | 85eedbd799f93203d3915ad3fe4f681e1f7208fe | [
"MIT"
]
| permissive | tgbugs/idlib | eed7f828aeb66ed8d9514303fbb965684908f955 | 369481d059d10a5dd8240c0dc4ce09fc1cfb4301 | refs/heads/master | 2023-08-07T13:45:04.103554 | 2023-07-28T00:56:55 | 2023-07-28T00:56:55 | 218,661,877 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | import unittest
class TestSimple(unittest.TestCase):
def test_import(self):
import idlib
| [
"[email protected]"
]
| |
413532b7ca0867d03a3b8a5fab67927dad30a7fc | 494c191e87ae52470b9eb5d38d4851db168ed7cc | /leetcode/0179_largest_number.py | 82d09525cd00c4f7825de9c78d6378b767fd839d | []
| no_license | Jeetendranani/yaamnotes | db67e5df1e2818cf6761ab56cf2778cf1860f75e | 1f859fb1d26ffeccdb847abebb0f77e9842d2ca9 | refs/heads/master | 2020-03-19T01:12:45.826232 | 2018-05-30T20:14:11 | 2018-05-30T20:14:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,063 | py | """
179. Largest Number
Given a list of non negative integers, arrange them such that they form the largest number.
For example, given [3, 30, 34, 5, 9], the largest formed number is 9534330.
Note the result may be every large, so you need to return a string instead of an integer.
Approach 1: Sorting via custom comparator
Intuition
To construct the largest number, we want to ensure that the most significant digits are occupied by the largest digits.
Algorithm:
First, we convert each integer to a string. Then, we sort the array of strings.
While it might be tempting to simply sort the numbers in descending order, this causes leetcode for set of numbers with
the same leading digits. For example, sorting the problem example in descending order would produce the number 9534303,
while the correct answer can be achieved by transposing the 3 and 30. Therefore, for each pairwise comparison during
the sort, we compare the numbers achieved by concatenating the pair in both orders. We can prove that this sorts into
the proper order as following:
Assume that (without loss of generality), for some pair of integers a and b, our comparator dictates that a should
preceding b in sorted order. This means that a_b > b_a (where _ represents concatenation). For the sort to produce an
incorrect ordering, there must be some c for which b precendes c and c precedes a, this is a contradiction because
a_b > b_a and b_c > c_b implies a_c > c _a. In other words, our custom comparator preserves transitivity, so the sort
is correct.
Once the array is sorted, the most "signficant" number will at the front. There is a minor edge case comes up when the
array comesup when the array consists of only 0, we can simply return 0. Otherwise, we built a string out of the sorted
array and return it.
"""
class LargerNumKey(str):
def __lt__(x, y):
return x+y > y+x
class Solution:
def lagest_number(self, nums):
largest_num = ''.join(sorted(max(str, nums), key=LargerNumKey))
return '0' if largest_num[0] == '0' else largest_num | [
"[email protected]"
]
| |
29c57beb7192eb32d1352e5ca01ba1687eed5ad9 | c8a04384030c3af88a8e16de4cedc4ef8aebfae5 | /stubs/pandas/tests/indexes/timedeltas/test_timedelta_range.pyi | 2d3cd837b31cc6f1546a327e09061dedc2bb2bb9 | [
"MIT"
]
| permissive | Accern/accern-xyme | f61fce4b426262b4f67c722e563bb4297cfc4235 | 6ed6c52671d02745efabe7e6b8bdf0ad21f8762c | refs/heads/master | 2023-08-17T04:29:00.904122 | 2023-05-23T09:18:09 | 2023-05-23T09:18:09 | 226,960,272 | 3 | 2 | MIT | 2023-07-19T02:13:18 | 2019-12-09T20:21:59 | Python | UTF-8 | Python | false | false | 545 | pyi | # Stubs for pandas.tests.indexes.timedeltas.test_timedelta_range (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
# pylint: disable=unused-argument,redefined-outer-name,no-self-use,invalid-name
# pylint: disable=relative-beyond-top-level,line-too-long,arguments-differ
from typing import Any
class TestTimedeltas:
def test_timedelta_range(self) -> None:
...
def test_linspace_behavior(self, periods: Any, freq: Any) -> None:
...
def test_errors(self) -> None:
...
| [
"[email protected]"
]
| |
49c6ca0beb4a387dfc9bada06b432530f567f400 | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/googlecloudsdk/command_lib/dataproc/jobs/trino.py | 303c27738c721cac3724dfc2ee0bd9e9ac9e78be | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 3,677 | py | # -*- coding: utf-8 -*- #
# Copyright 2022 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for the Trino job."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import encoding
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.command_lib.dataproc.jobs import base as job_base
from googlecloudsdk.command_lib.dataproc.jobs import util as job_util
class TrinoBase(job_base.JobBase):
"""Submit a Trino job to a cluster."""
@staticmethod
def Args(parser):
"""Parses command line arguments specific to submitting Trino jobs."""
driver = parser.add_mutually_exclusive_group(required=True)
driver.add_argument(
'--execute',
'-e',
metavar='QUERY',
dest='queries',
action='append',
default=[],
help='A Trino query to execute.')
driver.add_argument(
'--file',
'-f',
help='HCFS URI of file containing the Trino script to execute.')
parser.add_argument(
'--properties',
type=arg_parsers.ArgDict(),
metavar='PARAM=VALUE',
help='A list of key value pairs to set Trino session properties.')
parser.add_argument(
'--properties-file',
help=job_util.PROPERTIES_FILE_HELP_TEXT)
parser.add_argument(
'--driver-log-levels',
type=arg_parsers.ArgDict(),
metavar='PACKAGE=LEVEL',
help=('A list of package-to-log4j log level pairs to configure driver '
'logging. For example: root=FATAL,com.example=INFO'))
parser.add_argument(
'--continue-on-failure',
action='store_true',
help='Whether to continue if a query fails.')
parser.add_argument(
'--query-output-format',
help=('The query output display format. See the Trino documentation '
'for supported output formats.'))
parser.add_argument(
'--client-tags',
type=arg_parsers.ArgList(),
metavar='CLIENT_TAG',
help='A list of Trino client tags to attach to this query.')
@staticmethod
def GetFilesByType(args):
return {'file': args.file}
@staticmethod
def ConfigureJob(messages, job, files_by_type, logging_config, args):
"""Populates the trinoJob member of the given job."""
trino_job = messages.TrinoJob(
continueOnFailure=args.continue_on_failure,
queryFileUri=files_by_type['file'],
loggingConfig=logging_config)
if args.queries:
trino_job.queryList = messages.QueryList(queries=args.queries)
if args.query_output_format:
trino_job.outputFormat = args.query_output_format
if args.client_tags:
trino_job.clientTags = args.client_tags
job_properties = job_util.BuildJobProperties(
args.properties, args.properties_file)
if job_properties:
# Sort properties to ensure tests comparing messages not fail on ordering.
trino_job.properties = encoding.DictToAdditionalPropertyMessage(
job_properties, messages.TrinoJob.PropertiesValue, sort_items=True)
job.trinoJob = trino_job
| [
"[email protected]"
]
| |
1aa77b0cf7ef09c20fc0e64eec1906052fe467e9 | cb14afc9864e370a17f21f4486a17c824fb10294 | /simple questions on loops and list comprehensions/Use a List Comprehension to create a list of all numbers between 1 and 50 that are divisible by 3.py | ec1abc951f14dacd5746142d8179b8e0ee50030d | []
| no_license | sandeepshiven/python-practice | 92130a1d34fe830433c0526b386ee4550a713d55 | 1bfa6145c5662231128a39fdfadf8db06f4b0958 | refs/heads/master | 2020-06-16T12:04:52.983978 | 2020-02-04T18:19:55 | 2020-02-04T18:19:55 | 195,565,480 | 0 | 1 | null | 2019-09-15T18:25:54 | 2019-07-06T17:21:17 | Python | UTF-8 | Python | false | false | 163 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Jul 18 17:43:26 2019
@author: sandeep
"""
lst = [x for x in range(1,50) if x%3 == 0]
print(lst) | [
"[email protected]"
]
| |
826b03c57e962e20dbce7975d779ddf393b8a6c0 | 6f8267e19ad9bf828432d34780e7dde92fed054b | /src/exp/expChooseView.py | 2da4f79ad80ba95a9f34f71807af50e884eeaf23 | []
| no_license | ravika/expresso | 3129b5227cfc664d2adbec8c768bea9751898e0b | 319380d25e2ca4fc6111651d8e1c7cd98ad44a25 | refs/heads/master | 2016-08-03T19:32:15.823161 | 2015-05-02T10:16:37 | 2015-05-02T10:16:37 | 35,533,945 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,198 | py | # -*- coding: utf-8 -*-
##############
# Written by : Jaley Dholakiya
# Video Analytics Lab,IISc
#############
# Form implementation generated from reading ui file 'expChooseView.ui'
#
# Created: Sat Mar 14 01:53:22 2015
# by: PyQt4 UI code generator 4.10.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
import os
root = os.getenv('EXPRESSO_ROOT')
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Form(QtGui.QWidget):
def __init__(self,parent=None):
super(Ui_Form,self).__init__(parent)
self.setupUi(self)
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(611, 591)
Form.setStyleSheet(_fromUtf8("background-color:rgb(195,195,135);"))
self.widget = QtGui.QWidget(Form)
self.widget.setGeometry(QtCore.QRect(50, 20, 171, 241))
self.widget.setStyleSheet(_fromUtf8("background-color:rgb(195,195,135)"))
self.widget.setObjectName(_fromUtf8("widget"))
self.label = QtGui.QLabel(self.widget)
self.label.setGeometry(QtCore.QRect(20, 170, 201, 71))
self.label.setStyleSheet(_fromUtf8("font: 15pt \"Ubuntu Condensed\";color:rgb(45,60,45)"))
self.label.setObjectName(_fromUtf8("label"))
self.pushButton = QtGui.QPushButton(self.widget)
self.pushButton.setGeometry(QtCore.QRect(10, 20, 141, 141))
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.widget_2 = QtGui.QWidget(Form)
self.widget_2.setGeometry(QtCore.QRect(230, 20, 171, 241))
self.widget_2.setStyleSheet(_fromUtf8("background-color:rgb(195,195,135)"))
self.widget_2.setObjectName(_fromUtf8("widget_2"))
self.pushButton_2 = QtGui.QPushButton(self.widget_2)
self.pushButton_2.setGeometry(QtCore.QRect(10, 20, 141, 141))
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.label_2 = QtGui.QLabel(self.widget_2)
self.label_2.setGeometry(QtCore.QRect(10, 170, 151, 71))
self.label_2.setStyleSheet(_fromUtf8("font: 15pt \"Ubuntu Condensed\";color:rgb(45,60,45)"))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.widget_3 = QtGui.QWidget(Form)
self.widget_3.setGeometry(QtCore.QRect(410, 20, 171, 241))
self.widget_3.setStyleSheet(_fromUtf8("background-color:rgb(195,195,135)"))
self.widget_3.setObjectName(_fromUtf8("widget_3"))
self.pushButton_3 = QtGui.QPushButton(self.widget_3)
self.pushButton_3.setGeometry(QtCore.QRect(10, 20, 141, 141))
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.label_3 = QtGui.QLabel(self.widget_3)
self.label_3.setGeometry(QtCore.QRect(10, 170, 151, 71))
self.label_3.setStyleSheet(_fromUtf8("font: 15pt \"Ubuntu Condensed\";color:rgb(45,60,45)"))
self.label_3.setObjectName(_fromUtf8("label_3"))
self.widget_4 = QtGui.QWidget(Form)
self.widget_4.setGeometry(QtCore.QRect(230, 270, 171, 241))
self.widget_4.setStyleSheet(_fromUtf8("background-color:rgb(195,195,135)"))
self.widget_4.setObjectName(_fromUtf8("widget_3"))
self.pushButton_4 = QtGui.QPushButton(self.widget_4)
self.pushButton_4.setGeometry(QtCore.QRect(10, 20, 141, 141))
self.pushButton_4.setObjectName(_fromUtf8("pushButton_3"))
self.label_4 = QtGui.QLabel(self.widget_4)
self.label_4.setGeometry(QtCore.QRect(10, 170, 151, 71))
self.label_4.setStyleSheet(_fromUtf8("font: 15pt \"Ubuntu Condensed\";color:rgb(45,60,45)"))
self.label_4.setObjectName(_fromUtf8("label_3"))
self.widget_5 = QtGui.QWidget(Form)
self.widget_5.setGeometry(QtCore.QRect(50, 270, 171, 241))
self.widget_5.setStyleSheet(_fromUtf8("background-color:rgb(195,195,135)"))
self.widget_5.setObjectName(_fromUtf8("widget_3"))
self.pushButton_5 = QtGui.QPushButton(self.widget_5)
self.pushButton_5.setGeometry(QtCore.QRect(10, 20, 141, 141))
self.pushButton_5.setObjectName(_fromUtf8("pushButton_3"))
self.label_5 = QtGui.QLabel(self.widget_5)
self.label_5.setGeometry(QtCore.QRect(10, 170, 151, 71))
self.label_5.setStyleSheet(_fromUtf8("font: 15pt \"Ubuntu Condensed\";color:rgb(45,60,45)"))
self.label_5.setObjectName(_fromUtf8("label_3"))
self.widget_4.hide() #To to decided(to remove or not)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(_translate("Form", "Form", None))
self.label.setText(_translate("Form", "Extract Features\n"
"via pre-trained net", None))
self.setPushButtonIcons()
self.label_2.setText(_translate("Form", "Visuallize deep\n"
"network Features", None))
self.label_3.setText(_translate("Form", "Evaluate \n"
"pre-trained Net", None))
self.label_4.setText(_translate("Form", "Model Weight \n"
"Surgery", None))
self.label_5.setText(_translate("Form", "Evaluate \n"
"pre-trained SVM", None))
def setPushButtonIcons(self):
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(root+"/res/exp/extractFeatures.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton.setIcon(icon)
self.pushButton.setIconSize(QtCore.QSize(141,141))
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(root+"/res/exp/visuallize.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_2.setIcon(icon1)
self.pushButton_2.setIconSize(QtCore.QSize(141,141))
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(root+"/res/exp/accuracy.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_3.setIcon(icon2)
self.pushButton_3.setIconSize(QtCore.QSize(141,141))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(root+"/res/exp/accuracy.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_4.setIcon(icon3)
self.pushButton_4.setIconSize(QtCore.QSize(141,141))
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(root+"/res/exp/accuracy.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton_5.setIcon(icon4)
self.pushButton_5.setIconSize(QtCore.QSize(141,141))
def clickSlot(self):
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(root+"/src/train/images/visuallize.jpg")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.pushButton.setIcon(icon1)
self.pushButton.setIconSize(QtCore.QSize(141,141))
if __name__ == "__main__":
import sys
app = QtGui.QApplication(sys.argv)
Form = QtGui.QWidget()
ui = Ui_Form()
ui.setupUi(Form)
Form.show()
sys.exit(app.exec_())
| [
"[email protected]"
]
| |
1e657850c5da26a216c9f3c344f970b5484ce00d | 56cf9276e7e503cf0121151fbcdfc7d299ddd185 | /gbp/scripts/import_srpm.py | 5771d30c236b42104cd07cc5f2a0687e817e8249 | []
| no_license | maxyz/git-buildpackage | f1aeb1da37b0ddf653886bf03a757d4480880b6c | bf46e26ff985802277fa500a8ecc515acc5da093 | refs/heads/master | 2021-01-15T13:42:48.804282 | 2016-08-15T06:45:08 | 2016-08-15T06:46:06 | 66,767,668 | 0 | 0 | null | 2016-08-28T12:45:56 | 2016-08-28T12:45:55 | null | UTF-8 | Python | false | false | 19,145 | py | # vim: set fileencoding=utf-8 :
#
# (C) 2006,2007,2011 Guido Guenther <[email protected]>
# (C) 2012 Intel Corporation <[email protected]>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, please see
# <http://www.gnu.org/licenses/>
"""Import an RPM source package into a Git repository"""
import sys
import re
import os
import glob
import time
import shutil
import errno
from six.moves.urllib.request import urlopen
from six.moves import urllib
import gbp.command_wrappers as gbpc
from gbp.tmpfile import init_tmpdir, del_tmpdir, tempfile
from gbp.rpm import (parse_srpm, guess_spec, SpecFile, NoSpecError,
RpmUpstreamSource, compose_version_str)
from gbp.rpm.git import (RpmGitRepository, GitRepositoryError)
from gbp.git.modifier import GitModifier
from gbp.config import (GbpOptionParserRpm, GbpOptionGroup,
no_upstream_branch_msg)
from gbp.errors import GbpError
from gbp.scripts.common import ExitCodes
import gbp.log
from gbp.pkg import parse_archive_filename
no_packaging_branch_msg = """
Repository does not have branch '%s' for packaging/distribution sources.
You need to reate it or use --packaging-branch to specify it.
"""
class SkipImport(Exception):
"""Nothing imported"""
pass
def download_file(target_dir, url):
"""Download a remote file"""
gbp.log.info("Downloading '%s'..." % url)
try:
urlobj = urlopen(url)
local_fn = os.path.join(target_dir, os.path.basename(url))
with open(local_fn, "wb") as local_file:
local_file.write(urlobj.read())
except urllib.error.HTTPError as err:
raise GbpError("Download failed: %s" % err)
except urllib.error.URLError as err:
raise GbpError("Download failed: %s" % err.reason)
return local_fn
def download_source(pkg):
"""Download package from a remote location"""
if re.match(r'[a-z]{1,5}://', pkg):
mode = 'python urllib'
else:
mode = 'yumdownloader'
tmpdir = tempfile.mkdtemp(prefix='download_')
gbp.log.info("Trying to download '%s' using '%s'..." % (pkg, mode))
if mode == 'yumdownloader':
gbpc.RunAtCommand('yumdownloader',
['--source', '--destdir=', '.', pkg],
shell=False)(dir=tmpdir)
else:
download_file(tmpdir, pkg)
srpm = glob.glob(os.path.join(tmpdir, '*.src.rpm'))[0]
return srpm
def committer_from_author(author, options):
"""Get committer info based on options"""
committer = GitModifier()
if options.author_is_committer:
committer.name = author.name
committer.email = author.email
return committer
def move_tag_stamp(repo, tag_format, tag_str_fields):
"Move tag out of the way appending the current timestamp"
old = repo.version_to_tag(tag_format, tag_str_fields)
new = repo.version_to_tag('%s~%d' % (tag_format, int(time.time())),
tag_str_fields)
repo.move_tag(old, new)
def set_bare_repo_options(options):
"""Modify options for import into a bare repository"""
if options.pristine_tar:
gbp.log.info("Bare repository: setting %s option '--no-pristine-tar'")
options.pristine_tar = False
def force_to_branch_head(repo, branch):
"""Checkout branch and reset --hard"""
if repo.get_branch() == branch:
# Update HEAD if we modified the checked out branch
repo.force_head(branch, hard=True)
# Checkout packaging branch
repo.set_branch(branch)
def build_parser(name):
"""Construct command line parser"""
try:
parser = GbpOptionParserRpm(command=os.path.basename(name),
prefix='',
usage='%prog [options] /path/to/package'
'.src.rpm')
except GbpError as err:
gbp.log.err(err)
return None
import_group = GbpOptionGroup(parser, "import options",
"pristine-tar and filtering")
tag_group = GbpOptionGroup(parser, "tag options",
"options related to git tag creation")
branch_group = GbpOptionGroup(parser, "version and branch naming options",
"version number and branch layout options")
for group in [import_group, branch_group, tag_group ]:
parser.add_option_group(group)
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
default=False, help="verbose command execution")
parser.add_config_file_option(option_name="color", dest="color",
type='tristate')
parser.add_config_file_option(option_name="color-scheme",
dest="color_scheme")
parser.add_config_file_option(option_name="tmp-dir", dest="tmp_dir")
parser.add_config_file_option(option_name="vendor", action="store",
dest="vendor")
parser.add_option("--download", action="store_true", dest="download",
default=False, help="download source package")
branch_group.add_config_file_option(option_name="packaging-branch",
dest="packaging_branch")
branch_group.add_config_file_option(option_name="upstream-branch",
dest="upstream_branch")
branch_group.add_boolean_config_file_option(
option_name="create-missing-branches",
dest="create_missing_branches")
branch_group.add_option("--orphan-packaging", action="store_true",
dest="orphan_packaging", default=False,
help="The packaging branch doesn't base on upstream")
branch_group.add_option("--native", action="store_true",
dest="native", default=False,
help="This is a dist native package, no separate "
"upstream branch")
tag_group.add_boolean_config_file_option(option_name="sign-tags",
dest="sign_tags")
tag_group.add_config_file_option(option_name="keyid",
dest="keyid")
tag_group.add_config_file_option(option_name="packaging-tag",
dest="packaging_tag")
tag_group.add_config_file_option(option_name="upstream-tag",
dest="upstream_tag")
import_group.add_config_file_option(option_name="filter",
dest="filters", action="append")
import_group.add_boolean_config_file_option(option_name="pristine-tar",
dest="pristine_tar")
import_group.add_option("--allow-same-version", action="store_true",
dest="allow_same_version", default=False,
help="allow to import already imported version")
import_group.add_boolean_config_file_option(
option_name="author-is-committer",
dest="author_is_committer")
import_group.add_config_file_option(option_name="packaging-dir",
dest="packaging_dir")
return parser
def parse_args(argv):
"""Parse commandline arguments"""
parser = build_parser(argv[0])
if not parser:
return None, None
(options, args) = parser.parse_args(argv[1:])
gbp.log.setup(options.color, options.verbose, options.color_scheme)
return options, args
def main(argv):
"""Main function of the git-import-srpm script"""
dirs = dict(top=os.path.abspath(os.curdir))
ret = 0
skipped = False
options, args = parse_args(argv)
if not options:
return ExitCodes.parse_error
if len(args) != 1:
gbp.log.err("Need to give exactly one package to import. Try --help.")
return 1
try:
dirs['tmp_base'] = init_tmpdir(options.tmp_dir, 'import-srpm_')
except GbpError as err:
gbp.log.err(err)
return 1
try:
srpm = args[0]
if options.download:
srpm = download_source(srpm)
# Real srpm, we need to unpack, first
true_srcrpm = False
if not os.path.isdir(srpm) and not srpm.endswith(".spec"):
src = parse_srpm(srpm)
true_srcrpm = True
dirs['pkgextract'] = tempfile.mkdtemp(prefix='pkgextract_')
gbp.log.info("Extracting src rpm to '%s'" % dirs['pkgextract'])
src.unpack(dirs['pkgextract'])
preferred_spec = src.name + '.spec'
srpm = dirs['pkgextract']
elif os.path.isdir(srpm):
preferred_spec = os.path.basename(srpm.rstrip('/')) + '.spec'
else:
preferred_spec = None
# Find and parse spec file
if os.path.isdir(srpm):
gbp.log.debug("Trying to import an unpacked srpm from '%s'" % srpm)
dirs['src'] = os.path.abspath(srpm)
spec = guess_spec(srpm, True, preferred_spec)
else:
gbp.log.debug("Trying to import an srpm from '%s' with spec "\
"file '%s'" % (os.path.dirname(srpm), srpm))
dirs['src'] = os.path.abspath(os.path.dirname(srpm))
spec = SpecFile(srpm)
# Check the repository state
try:
repo = RpmGitRepository('.')
is_empty = repo.is_empty()
(clean, out) = repo.is_clean()
if not clean and not is_empty:
gbp.log.err("Repository has uncommitted changes, commit "
"these first: ")
raise GbpError(out)
except GitRepositoryError:
gbp.log.info("No git repository found, creating one.")
is_empty = True
repo = RpmGitRepository.create(spec.name)
os.chdir(repo.path)
if repo.bare:
set_bare_repo_options(options)
# Create more tempdirs
dirs['origsrc'] = tempfile.mkdtemp(prefix='origsrc_')
dirs['packaging_base'] = tempfile.mkdtemp(prefix='packaging_')
dirs['packaging'] = os.path.join(dirs['packaging_base'],
options.packaging_dir)
try:
os.mkdir(dirs['packaging'])
except OSError as err:
if err.errno != errno.EEXIST:
raise
if true_srcrpm:
# For true src.rpm we just take everything
files = os.listdir(dirs['src'])
else:
# Need to copy files to the packaging directory given by caller
files = [os.path.basename(patch.path) \
for patch in spec.patchseries(unapplied=True, ignored=True)]
for filename in spec.sources().values():
files.append(os.path.basename(filename))
files.append(os.path.join(spec.specdir, spec.specfile))
# Don't copy orig source archive, though
if spec.orig_src and spec.orig_src['filename'] in files:
files.remove(spec.orig_src['filename'])
for fname in files:
fpath = os.path.join(dirs['src'], fname)
if os.path.exists(fpath):
shutil.copy2(fpath, dirs['packaging'])
else:
gbp.log.err("File '%s' listed in spec not found" % fname)
raise GbpError
# Unpack orig source archive
if spec.orig_src:
orig_tarball = os.path.join(dirs['src'], spec.orig_src['filename'])
sources = RpmUpstreamSource(orig_tarball)
sources.unpack(dirs['origsrc'], options.filters)
else:
sources = None
src_tag_format = options.packaging_tag if options.native \
else options.upstream_tag
tag_str_fields = dict(spec.version, vendor=options.vendor.lower())
src_tag = repo.version_to_tag(src_tag_format, tag_str_fields)
ver_str = compose_version_str(spec.version)
if repo.find_version(options.packaging_tag, tag_str_fields):
gbp.log.warn("Version %s already imported." % ver_str)
if options.allow_same_version:
gbp.log.info("Moving tag of version '%s' since import forced" %
ver_str)
move_tag_stamp(repo, options.packaging_tag, tag_str_fields)
else:
raise SkipImport
if is_empty:
options.create_missing_branches = True
# Determine author and committer info, currently same info is used
# for both sources and packaging files
author = None
if spec.packager:
match = re.match(r'(?P<name>.*[^ ])\s*<(?P<email>\S*)>',
spec.packager.strip())
if match:
author = GitModifier(match.group('name'), match.group('email'))
if not author:
author = GitModifier()
gbp.log.debug("Couldn't determine packager info")
committer = committer_from_author(author, options)
# Import sources
if sources:
src_commit = repo.find_version(src_tag_format, tag_str_fields)
if not src_commit:
gbp.log.info("Tag %s not found, importing sources" % src_tag)
branch = [options.upstream_branch,
options.packaging_branch][options.native]
if not repo.has_branch(branch):
if options.create_missing_branches:
gbp.log.info("Will create missing branch '%s'" %
branch)
else:
gbp.log.err(no_upstream_branch_msg % branch + "\n"
"Also check the --create-missing-branches option.")
raise GbpError
src_vendor = "Native" if options.native else "Upstream"
msg = "%s version %s" % (src_vendor, spec.upstreamversion)
src_commit = repo.commit_dir(sources.unpacked,
"Import %s" % msg,
branch,
author=author,
committer=committer,
create_missing_branch=options.create_missing_branches)
repo.create_tag(name=src_tag,
msg=msg,
commit=src_commit,
sign=options.sign_tags,
keyid=options.keyid)
if not options.native:
if options.pristine_tar:
archive_fmt = parse_archive_filename(orig_tarball)[1]
if archive_fmt == 'tar':
repo.pristine_tar.commit(orig_tarball,
'refs/heads/%s' %
options.upstream_branch)
else:
gbp.log.warn('Ignoring pristine-tar, %s archives '
'not supported' % archive_fmt)
else:
gbp.log.info("No orig source archive imported")
# Import packaging files. For native packages we assume that also
# packaging files are found in the source tarball
if not options.native or not sources:
gbp.log.info("Importing packaging files")
branch = options.packaging_branch
if not repo.has_branch(branch):
if options.create_missing_branches:
gbp.log.info("Will create missing branch '%s'" % branch)
else:
gbp.log.err(no_packaging_branch_msg % branch + "\n"
"Also check the --create-missing-branches "
"option.")
raise GbpError
tag = repo.version_to_tag(options.packaging_tag, tag_str_fields)
msg = "%s release %s" % (options.vendor, ver_str)
if options.orphan_packaging or not sources:
commit = repo.commit_dir(dirs['packaging_base'],
"Import %s" % msg,
branch,
author=author,
committer=committer,
create_missing_branch=options.create_missing_branches)
else:
# Copy packaging files to the unpacked sources dir
try:
pkgsubdir = os.path.join(sources.unpacked,
options.packaging_dir)
os.mkdir(pkgsubdir)
except OSError as err:
if err.errno != errno.EEXIST:
raise
for fname in os.listdir(dirs['packaging']):
shutil.copy2(os.path.join(dirs['packaging'], fname),
pkgsubdir)
commit = repo.commit_dir(sources.unpacked,
"Import %s" % msg,
branch,
other_parents=[src_commit],
author=author,
committer=committer,
create_missing_branch=options.create_missing_branches)
# Import patches on top of the source tree
# (only for non-native packages with non-orphan packaging)
force_to_branch_head(repo, options.packaging_branch)
# Create packaging tag
repo.create_tag(name=tag,
msg=msg,
commit=commit,
sign=options.sign_tags,
keyid=options.keyid)
force_to_branch_head(repo, options.packaging_branch)
except KeyboardInterrupt:
ret = 1
gbp.log.err("Interrupted. Aborting.")
except gbpc.CommandExecFailed:
ret = 1
except GitRepositoryError as err:
gbp.log.err("Git command failed: %s" % err)
ret = 1
except GbpError as err:
if str(err):
gbp.log.err(err)
ret = 1
except NoSpecError as err:
gbp.log.err("Failed determine spec file: %s" % err)
ret = 1
except SkipImport:
skipped = True
finally:
os.chdir(dirs['top'])
del_tmpdir()
if not ret and not skipped:
gbp.log.info("Version '%s' imported under '%s'" % (ver_str, spec.name))
return ret
if __name__ == '__main__':
sys.exit(main(sys.argv))
# vim:et:ts=4:sw=4:et:sts=4:ai:set list listchars=tab\:»·,trail\:·:
| [
"[email protected]"
]
| |
a18d2854e9b097c3be8c7134d21f2cde9d04db3a | 7aa33a8a8d5360523bf2f6a2ce73f93fd5e63d23 | /robotics/Controll.py | e51768b510a2c08a7bac2113c5a90f9ab486318c | []
| no_license | iamMHZ/image-processing-with-opencv | 33b6fac0d50649c99fe35f078af8a38d53358447 | 7412f182ad564905bf24c8fa30f0492b7eb01bd1 | refs/heads/master | 2021-03-17T16:31:23.640213 | 2020-05-09T10:22:04 | 2020-05-09T10:22:04 | 247,002,943 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 172 | py | import inputs
print(inputs.devices.gamepads)
while True:
events = inputs.get_gamepad()
for event in events:
print(event.ev_type, event.code, event.state)
| [
"[email protected]"
]
| |
40647bde765a91a69ab9bf788cf3b28a4ec6715a | e811662c890217c77b60aa2e1295dd0f5b2d4591 | /src/problem_763.py | 33eb4a1f687002f6082644d2dd08682d2f076cda | []
| no_license | rewonderful/MLC | 95357f892f8cf76453178875bac99316c7583f84 | 7012572eb192c29327ede821c271ca082316ff2b | refs/heads/master | 2022-05-08T05:24:06.929245 | 2019-09-24T10:35:22 | 2019-09-24T10:35:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 968 | py | #!/usr/bin/env python
# _*_ coding:utf-8 _*_
def partitionLabels(S):
"""
算法:贪心,双指针
思路:
记录每个字母的最后出现的位置,再遍历整个字符串,用一个指针start和end记录当前区间的起止位置,
目标区间应该是能使得区间内的所有字母都只出现在区间内的最短的区间,
所以再遍历一次S,设置end = max(end, last[char]),当前位置 == end时,就说明一段区间已经
添加完了,ans append进去,更新start为end + 1 为下一区段的开始处
说是贪心就是因为是处理完一小段再处理一小段,前后还没关系
"""
last = {char: position for position, char in enumerate(S)}
start = end = 0
ans = []
for position, char in enumerate(S):
end = max(end, last[char])
if position == end:
ans.append(end - start + 1)
start = end + 1
return ans | [
"[email protected]"
]
| |
6aecf7de4273913f02af82ef752225319d622d37 | ddf002d1084d5c63842a6f42471f890a449966ee | /basics/Python/PYTHON --------/Loops/for_perfect_number.py | 12c1710e98af7953b5053badaf4ec9ed6496e5f7 | []
| no_license | RaghavJindal2000/Python | 0ab3f198cbc5559bdf46ac259c7136356f7f09aa | 8e5c646585cff28ba3ad9bd6c384bcb5537d671a | refs/heads/master | 2023-01-01T23:56:02.073029 | 2020-10-18T19:30:01 | 2020-10-18T19:30:01 | 263,262,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | num=int(input("Enter the Number : "))
sum=0
for i in range(1,int(num/2)+1):
if(num%i==0):
sum=sum+i
if(sum==num):
print("Perfect Number")
else:
print("Not Perfect Number")
input() | [
"[email protected]"
]
| |
a77a33ec7d947da341e4206109d82d8d7f44e697 | 11aaeaeb55d587a950456fd1480063e1aed1d9e5 | /.history/test_20190626133340.py | 12865a307e437ef3704eed2ac3124c68bd758365 | []
| no_license | Gr4cchus/Learn-Python-3-The-Hard-Way | 8ce9e68f6a91ea33ea45fe64bfff82d65422c4a8 | f5fa34db16cdd6377faa7fcf45c70f94bb4aec0d | refs/heads/master | 2020-05-17T23:18:29.483160 | 2019-06-26T18:42:52 | 2019-06-26T18:42:52 | 184,023,439 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,413 | py | import random
# # class Foo:
# # answer = 42
# # f1 = Foo()
# # f2 = Foo()
# # print(f1.answer)
# # print(f2.answer)
# # # both will print 42
# # f1.answer = 84
# # Foo.answer = 21
# # print(f1.answer) # 84
# # print(f2.answer) # 21
# class Foo:
# def __init__(self):
# self.answer = 42
# f1 = Foo()
# f2 = Foo()
# # f2.answer = 4000
# Foo.answer = 21
# # f1.answer = 2000
# print(f1.answer)
# print(f2.answer)
# # both will print 42 still
class Scenes(object):
# def __init__(self):
# # self.starting_room = starting_room
# # self.locations = {
# # 'room1': Room1(),
# # 'room2': Room2()
# # }
map_list = [
'room1',
'room2',
'finish'
]
def start(self):
print("You are at the start")
print("Where would you like to go")
self.locations()
def room1(self):
print("You enter room 1")
print("Where would you like to go")
self.locations()
def room2(self):
print("You enter room 2")
print("Where would you like to go")
self.locations()
def finish(self):
print("You have finished")
exit(0)
def locations(self):
print("def locations:", self.map_list)
for i in self.map_list:
print(i)
cmd = {
'room1': room1,
'room2': room2,
}
def guessing_game(self):
n = random.randint(1,4)
print("Oh no a mini-game.")
print("Guess the number between 1-4. To pass")
answer = 0
while answer =! n:
answer = input("> ")
print("wrong guess again!")
if answer == n:
print("Success")
# class Map(Scenes):
# a = Scenes()
# map_dict = {
# 'room1': a.room1(),
# 'room2': a.room2(),
# }
# class Engine():
# def __init__(self, map):
# self.map = map
# def play(self):
# while True:
# # a = self.map.dict_locations
# print('yes')
thescenes = Scenes()
# thelocations = Locations()
# thedict = thelocations.map()
# while True:
# print("loop")
# thelocations.map.dict_locations.get('room1')
thescenes.start()
while True:
action = input("> ")
if action in thescenes.map_list:
print("success")
thescenes.map_list[action](thescenes)
| [
"[email protected]"
]
| |
1f79efdb1f12760d507a1294acfc682189e2cc4f | 200abee8ebb5fa255e594c8d901c8c68eb9c1a9c | /venv/01_Stepik/Python_Osnovi_i_primenenie/2.3_2.py | 50544368335316c290b184d30ded2008229713e4 | []
| no_license | Vestenar/PythonProjects | f083cbc07df57ea7a560c6b18efed2bb0dc42efb | f8fdf9faff013165f8d835b0ccb807f8bef6dac4 | refs/heads/master | 2021-07-20T14:14:15.739074 | 2019-03-12T18:05:38 | 2019-03-12T18:05:38 | 163,770,129 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 372 | py | import itertools
def primes():
num = 2
while True:
if is_prime(num):
yield num
num += 1
def is_prime(num):
if num == 2: return True
if num % 2 == 0: return False
for _ in range(3, num // 2, 2):
if num % _ == 0:
return False
return True
print(list(itertools.takewhile(lambda x : x <= 31, primes()))) | [
"[email protected]"
]
| |
e3740376355a7ad6d32d7fb3097ea9e1f04a6db2 | 4df3712caff818c0554e7fbe4b97dee5fcfd8675 | /common/sendMail.py | e8175fd9a7b1c03e70da7d866819a40cdff5ba85 | []
| no_license | Qingyaya/interface | 456057a740bd77ba6c38eda27dd1aef658e0add9 | 3ae37816f52ad8c45e192596a854848d8e546b14 | refs/heads/master | 2020-03-22T07:16:04.171904 | 2018-12-05T05:20:25 | 2018-12-05T05:20:25 | 139,690,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,523 | py |
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import smtplib
from common.ReadConfig import ReadConfig
from common.Log import Log
log=Log()
rc=ReadConfig()
def send_mail(report_file):
sender=rc.get_email('sender')
psw=rc.get_email('psw')
receiver=rc.get_email('receiver')
smtpserver=rc.get_email('smtp_server')
port=rc.get_email('port')
with open(report_file,'rb') as f:
mailbody = f.read()
# 定义邮件内容
msg = MIMEMultipart()
body = MIMEText(mailbody, _subtype='html', _charset='utf-8')
msg['Subject'] = u'自动化测试报告'
msg['from'] = sender
msg['To'] = ', '.join(eval(receiver))
msg.attach(body)
# 添加附件
att = MIMEText(open(report_file, 'rb').read(), 'base64', 'utf-8')
att['Content-Type'] = 'application/octet-stream'
att['Content-Disposition'] = 'attachment; filename = "TestReport.html"'
msg.attach(att)
try:
smtp = smtplib.SMTP_SSL(smtpserver, port)
except:
smtp = smtplib.SMTP()
smtp.connect(smtpserver, port)
# 用户名密码
try:
smtp.login(sender, psw)
smtp.sendmail(sender, eval(receiver), msg.as_string())
log.info('Send mail Success!!! test report email has send out!')
except Exception as e:
log.error('Send Mail Failed !!! error: %s' %e)
smtp.quit()
if __name__ == '__main__':
report_file='E:\\IDScloud_ui_demo\\report\\20180517\\20180517100220.html'
send_mail(report_file)
| [
"[email protected]"
]
| |
3035e52b9cc917ae6870cd17760f97e41ca9995c | b5a9d42f7ea5e26cd82b3be2b26c324d5da79ba1 | /tensorflow/python/keras/applications/mobilenet.py | 97c6b85882e6ea224b0201820317c92823c32ddd | [
"Apache-2.0"
]
| permissive | uve/tensorflow | e48cb29f39ed24ee27e81afd1687960682e1fbef | e08079463bf43e5963acc41da1f57e95603f8080 | refs/heads/master | 2020-11-29T11:30:40.391232 | 2020-01-11T13:43:10 | 2020-01-11T13:43:10 | 230,088,347 | 0 | 0 | Apache-2.0 | 2019-12-25T10:49:15 | 2019-12-25T10:49:14 | null | UTF-8 | Python | false | false | 1,662 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=invalid-name
"""MobileNet v1 models for Keras.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from keras_applications import mobilenet
from tensorflow.python.keras.applications import keras_modules_injection
from tensorflow.python.util.tf_export import keras_export
@keras_export('keras.applications.mobilenet.MobileNet',
'keras.applications.MobileNet')
@keras_modules_injection
def MobileNet(*args, **kwargs):
return mobilenet.MobileNet(*args, **kwargs)
@keras_export('keras.applications.mobilenet.decode_predictions')
@keras_modules_injection
def decode_predictions(*args, **kwargs):
return mobilenet.decode_predictions(*args, **kwargs)
@keras_export('keras.applications.mobilenet.preprocess_input')
@keras_modules_injection
def preprocess_input(*args, **kwargs):
return mobilenet.preprocess_input(*args, **kwargs)
| [
"[email protected]"
]
| |
b516fc14e72cd98ba60397e18718e0b2b396a2e6 | b43cee0973a455a58b74233d4e02d522587f93ae | /skillbox/basic/module22/war_peace.py | 1871dfaea75c70bd1b4c14d260a3e1c153729316 | []
| no_license | ivadimn/py-input | 5861cc92758378f44433bd6b1af7ba78da04d1c0 | bbfdd74c4dffe66440490d79082de2c0318e5027 | refs/heads/master | 2023-08-15T03:34:01.916026 | 2023-07-24T14:48:08 | 2023-07-24T14:48:08 | 202,401,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,134 | py | import zipfile
import string
def frequency(text: str) -> dict:
len_text = len(text)
symbols = set(text)
freq = {ch: text.count(ch) / len_text for ch in sorted(symbols)}
return {key : freq[key] for key in sorted(freq, key=freq.get)}
exclude = string.punctuation + " 0123456789" + \
"\u3002\u201e\u201c\u201f\u201d\u301e\u301f\u00ab\u00bb\u00b7\u00a8" + \
"\u2116\u00a0\u00b0\u00ac\u0227\u2007\u2026\u2012\u2013\u2014\n\r\t"
table = "".maketrans("", "", exclude)
zip_file = zipfile.ZipFile("voyna-i-mir.zip")
print(zip_file.namelist())
text = ""
for fileName in zip_file.namelist():
bytes = zip_file.read(fileName)
content = bytes.decode("UTF-8")
content = content.translate(table)
text = text.join(content)
zip_file.close()
freq_table = frequency(text)
freq_file = open("wp_analysis.txt", "w")
print("\nСдержимое файла wp_analysis.txt: \n")
for k, v in freq_table.items():
if k.isalpha():
line = "{0} {1}\n".format(k, v)
else:
line = "{0} {1}\n".format(ord(k), v)
print(line, end = "")
freq_file.write(line)
print()
freq_file.close() | [
"[email protected]"
]
| |
d674d9782d314530754af4814fa59a5ad03c66f8 | 630681b5a80acdad9b5597449559ecf89e917aa0 | /env/bin/cftp | e46f526ce10262450281bfedc3754cf60aefe6d6 | []
| no_license | stuartses/trivia | ed5cd090fe7143159c8ed669edd5540de5f9f0f4 | 203b9ff4b3834d4f4a58c23f573187d0f960a64c | refs/heads/master | 2022-12-17T15:57:09.735439 | 2020-09-20T16:32:24 | 2020-09-20T16:32:24 | 296,960,114 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | #!/home/stuartes/repositories/chat/env/bin/python3
# EASY-INSTALL-ENTRY-SCRIPT: 'Twisted==20.3.0','console_scripts','cftp'
__requires__ = 'Twisted==20.3.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('Twisted==20.3.0', 'console_scripts', 'cftp')()
)
| [
"[email protected]"
]
| ||
b6357591f310a910f5139ea6c1aafba52ff7d277 | d9d0d3a82d5ba4016097491c276409b9b1ea778a | /Kattis/relocation.py | 99a38f2674f48771aab4f6fdf4bd413803ff3370 | [
"MIT"
]
| permissive | ruidazeng/online-judge | 311b9f2c3120b47da91da2d404e2ea1d9a2a24dd | 6bdf8bbf1af885637dab474d0ccb58aff22a0933 | refs/heads/master | 2022-02-16T00:35:11.852600 | 2022-01-26T02:28:53 | 2022-01-26T02:28:53 | 191,827,952 | 0 | 1 | MIT | 2019-07-31T10:25:36 | 2019-06-13T20:21:18 | Python | UTF-8 | Python | false | false | 273 | py | _, Q = map(int, input().split())
companies = [int(x) for x in input().split()]
for _ in range(Q):
indicator, x, y = map(int, input().split())
if indicator == 1:
companies[x-1] = y
elif indicator == 2:
print(abs(companies[x-1] - companies[y-1])) | [
"[email protected]"
]
| |
9909a4bf17bd4ab682eeda9005e2ed143f162e54 | c913c952cf4019d67f02bf1971917116da375c81 | /Data/OMIMresults/omimResults3600to3620.py | 2a5d59462d0283887c78b87aa40ecad57e1a2ac5 | []
| no_license | jiangchb/OMIMscraping | 57afa5b2f8b7ca975e7459814e0410a872f71990 | 27d4ac8faea526b1c70937317caec064bed00a0a | refs/heads/master | 2022-03-14T21:35:56.102665 | 2019-11-22T15:48:48 | 2019-11-22T15:48:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116,639 | py | omim = {'omim': {
'version': '1.0',
'searchResponse': {
'search': '*',
'expandedSearch': '*:*',
'parsedSearch': '+*:* ()',
'searchSuggestion': None,
'searchSpelling': None,
'filter': '',
'expandedFilter': None,
'fields': '',
'searchReport': None,
'totalResults': 7368,
'startIndex': 3600,
'endIndex': 3619,
'sort': '',
'operator': '',
'searchTime': 2.0,
'clinicalSynopsisList': [
{'clinicalSynopsis': {
'mimNumber': 255990,
'preferredTitle': 'NATHALIE SYNDROME',
'oldFormat': {
'HEENT': 'Deafness {SNOMEDCT:343087000} {ICD10CM:H91.9} {UMLS C0018772,C0011053 HP:0000365} {HPO HP:0000365 C0011053,C0018772,C0339789,C1384666}; Cataract {SNOMEDCT:193570009,420123008,247053007,128306009} {ICD10CM:H26.9} {ICD9CM:366,366.9} {UMLS C4555209,C1690964,C0086543,C1962983 HP:0000518} {HPO HP:0000518 C0086543,C1510497};',
'Muscle': 'Muscular atrophy {SNOMEDCT:88092000} {UMLS C0541794,C0026846 HP:0003202} {HPO HP:0003202 C0234958,C0270948,C0541794,C1843479};',
'Growth': 'Growth retardation {SNOMEDCT:59576002,444896005} {UMLS C0151686 HP:0001510} {HPO HP:0001510 C0151686,C0456070,C0878787,C1837385,C3552463};',
'GU': 'Sexual development retarded {SNOMEDCT:400003000,123526007} {ICD10CM:E30.0} {UMLS C0034012 HP:0000823};',
'Cardiac': 'Abnormal EKG {SNOMEDCT:102594003} {UMLS C0522055 HP:0003115} {HPO HP:0003115 C0522055};',
'Inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899};'
} ,
'oldFormatExists': True,
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': False,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 255995,
'prefix': '#',
'preferredTitle': 'MYOPATHY, CONGENITAL, BAILEY-BLOCH; MYPBB',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthHeight': 'Short stature {SNOMEDCT:422065006,237837007,237836003} {ICD10CM:R62.52,E34.3} {ICD9CM:783.43} {UMLS C0013336,C0349588,C2237041,C2919142 HP:0004322,HP:0003510} {HPO HP:0004322 C0349588}',
'growthOther': 'Poor overall growth {UMLS C3552219}',
'headAndNeckHead': '''Small head circumference {SNOMEDCT:271611007} {UMLS C4551563,C0424688 HP:0000252,HP:0040195} {HPO HP:0000252 C0424688};\nBrachycephaly {SNOMEDCT:13649004} {UMLS C0221356 HP:0000248} {HPO HP:0000248 C0221356,C4072823,C4072824} {EOM ID:babe3e4648eac88a IMG:Brachycephaly-Large-small.jpg}''',
'headAndNeckFace': '''Myopathic facies {SNOMEDCT:26432009} {UMLS C0332615 HP:0002058} {HPO HP:0002058 C0332615};\nMicrognathia {SNOMEDCT:32958008} {UMLS C0025990 HP:0000347} {HPO HP:0000347 C0025990,C0240295,C1857130} {EOM ID:8bbf61b4ad7ca2ef IMG:Micrognathia-small.jpg};\nOral hypotonia {UMLS C3550597};\nMidface hypoplasia {UMLS C1853242 HP:0011800} {HPO HP:0011800 C1853242,C2673410,C4280320,C4280321} {EOM ID:5b7ad34ab35682b5 IMG:Midface_Retrusion-small.jpg}''',
'headAndNeckEars': '''Low-set ears {SNOMEDCT:95515009} {ICD10CM:Q17.4} {UMLS C0239234 HP:0000369} {HPO HP:0000369 C0239234};\nHearing loss, conductive {SNOMEDCT:44057004} {ICD10CM:H90.2} {ICD9CM:389.0,389.00} {UMLS C0018777 HP:0000405} {HPO HP:0000405 C0018777}''',
'headAndNeckEyes': '''Ptosis {SNOMEDCT:11934000,29696001} {ICD10CM:H02.4,H02.40,H02.409} {ICD9CM:374.3,374.30} {UMLS C0005745,C0033377 HP:0000508} {HPO HP:0000508 C0005745} {EOM ID:1bd157b764ec7aea IMG:Ptosis-small.jpg};\nDownslanting palpebral fissures {SNOMEDCT:246800008} {UMLS C0423110 HP:0000494} {HPO HP:0000494 C0423110};\nShort palpebral fissures {SNOMEDCT:246802000} {UMLS C0423112 HP:0012745} {HPO HP:0012745 C0423112};\nTelecanthus {SNOMEDCT:246803005} {UMLS C0423113 HP:0000506} {HPO HP:0000506 C0423113} {EOM ID:55fb0667392bab43 IMG:Telecanthus-small.jpg}''',
'headAndNeckMouth': '''Downturned mouth {UMLS C1866195 HP:0002714} {HPO HP:0002714 C1866195};\nHigh-arched palate {SNOMEDCT:27272007} {ICD10CM:Q38.5} {UMLS C0240635 HP:0000218} {HPO HP:0000218 C0240635};\nCleft palate {SNOMEDCT:87979003,63567004} {ICD10CM:Q35.5,Q35,Q35.9} {ICD9CM:749.0,749.00} {UMLS C2981150,C0008925,C2240378 HP:0000175} {HPO HP:0000175 C0008925,C2981150}''',
'respiratory': 'Restrictive respiratory insufficiency {UMLS C1609528 HP:0002111}',
'abdomenGastrointestinal': 'Poor feeding {SNOMEDCT:78164000,299698007} {ICD10CM:R63.3} {UMLS C0576456,C0232466 HP:0011968} {HPO HP:0011968 C0232466}',
'genitourinaryExternalGenitaliaMale': 'Cryptorchidism {SNOMEDCT:204878001} {ICD10CM:Q53.9} {ICD9CM:752.51} {UMLS C0010417 HP:0000028} {HPO HP:0000028 C0010417}',
'skeletal': 'Joint contractures {SNOMEDCT:7890003} {ICD10CM:M24.5} {ICD9CM:718.40,718.4} {UMLS C0009918 HP:0001371} {HPO HP:0001371 C0009917,C0009918,C0333068,C1850530}',
'skeletalSpine': 'Kyphoscoliosis {SNOMEDCT:405773007,405771009,405772002} {ICD10CM:M41} {UMLS C0575158,C0345392,C0600033 HP:0008453,HP:0002751} {HPO HP:0002751 C0575158}',
'skeletalFeet': 'Talipes deformities {UMLS C2673319}',
'muscleSoftTissue': '''Muscle weakness, congenital {UMLS C2673318};\nMuscle wasting {SNOMEDCT:88092000} {UMLS C0541794,C0026846 HP:0003202} {HPO HP:0003202 C0234958,C0270948,C0541794,C1843479}''',
'neurologicCentralNervousSystem': '''Delayed motor development {UMLS C1854301 HP:0001270} {HPO HP:0001270 C1854301,C4020874};\nMental retardation (rare) {UMLS C1850387} {HPO HP:0001249 C0025362,C0423903,C0917816,C1843367,C3714756,C4020876};\nEnlarged ventricles (in some) {UMLS C3550596} {HPO HP:0002119 C3278923}''',
'neurologicPeripheralNervousSystem': 'Hyporeflexia {SNOMEDCT:22994000,405946002} {UMLS C0151888,C0700078 HP:0001315,HP:0001265} {HPO HP:0001265 C0700078}',
'metabolicFeatures': 'Malignant hyperthermia {SNOMEDCT:405501007,213026003} {ICD10CM:T88.3} {ICD9CM:995.86} {UMLS C0024591 HP:0002047} {HPO HP:0002047 C0024591}',
'laboratoryAbnormalities': '''Serum creatine kinase may be increased {UMLS C1969489};\nIncreased prevalence among the Native American Lumbee Indians {UMLS C3550598}''',
'molecularBasis': 'Caused by mutation in the SH3 and cysteine-rich domains 3 gene (STAC3, {615521.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': True,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': True,
'headAndNeckHeadExists': True,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': True,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': True,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': True,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': True,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': True,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': True,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': False,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256000,
'prefix': '#',
'preferredTitle': 'LEIGH SYNDROME; LS',
'inheritance': '''Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899};\nMitochondrial {SNOMEDCT:312239001,75056005} {UMLS C0887941,C0026237 HP:0001427} {HPO HP:0001427 C0887941}''',
'growthOther': 'Failure to thrive {SNOMEDCT:54840006,433476000,432788009} {ICD10CM:R62.51} {ICD9CM:783.41} {UMLS C2315100,C0015544,C3887638 HP:0001508} {HPO HP:0001508 C0231246,C2315100}',
'headAndNeckEyes': '''Ophthalmoplegia {SNOMEDCT:16110005} {UMLS C0029089 HP:0000602} {HPO HP:0000602 C0029089};\nOptic atrophy {SNOMEDCT:76976005} {ICD10CM:H47.2,H47.20} {ICD9CM:377.10,377.1} {UMLS C0029124 HP:0000648} {HPO HP:0000648 C0029124};\nNystagmus {SNOMEDCT:563001} {ICD10CM:H55.0,H55.00} {ICD9CM:379.50} {UMLS C1963184,C4554036,C0028738 HP:0000639} {HPO HP:0000639 C0028738};\nStrabismus {SNOMEDCT:22066006,128602000} {ICD10CM:H50.40,H50.9} {ICD9CM:378.30} {UMLS C2020541,C1423541,C0038379 HP:0032012,HP:0000486} {HPO HP:0000486 C0038379};\nPtosis {SNOMEDCT:11934000,29696001} {ICD10CM:H02.4,H02.40,H02.409} {ICD9CM:374.3,374.30} {UMLS C0005745,C0033377 HP:0000508} {HPO HP:0000508 C0005745} {EOM ID:1bd157b764ec7aea IMG:Ptosis-small.jpg};\nPigmentary retinopathy {SNOMEDCT:28835009} {ICD10CM:H35.52} {UMLS C4551715,C0035334 HP:0000580,HP:0000510,HP:0000547} {HPO HP:0000580 C0035334}''',
'respiratory': '''Abnormal respiratory patterns {UMLS C1837388 HP:0002793} {HPO HP:0002793 C1837388};\nRespiratory failure {SNOMEDCT:409622000} {ICD10CM:J96.9} {UMLS C4552651,C1145670 HP:0002878} {HPO HP:0002878 C1145670}''',
'skinNailsHairHair': 'Hypertrichosis {SNOMEDCT:29966009,271607001} {ICD10CM:L68.9,L68.3,L68} {UMLS C0020555,C4553009 HP:0000998} {HPO HP:0000998 C0020555}',
'muscleSoftTissue': 'Hypotonia {SNOMEDCT:398152000,398151007} {UMLS C0026827,C1858120 HP:0001290,HP:0001252} {HPO HP:0001290 C1858120}',
'neurologicCentralNervousSystem': '''Psychomotor retardation {SNOMEDCT:398991009} {UMLS C0424230 HP:0025356} {HPO HP:0025356};\nHypotonia {SNOMEDCT:398152000,398151007} {UMLS C0026827,C1858120 HP:0001290,HP:0001252} {HPO HP:0001290 C1858120};\nAtaxia {SNOMEDCT:39384006,85102008,20262006} {ICD10CM:R27.0} {ICD9CM:438.84} {UMLS C0004134,C1135207,C0007758,C4554639 HP:0010867,HP:0001251} {HPO HP:0001251 C0007758};\nDystonia {SNOMEDCT:15802004} {ICD10CM:G24,G24.9} {UMLS C0393593,C0013421 HP:0001332} {HPO HP:0001332 C0013421,C4020871};\nDysarthria {SNOMEDCT:8011004} {ICD9CM:438.13,784.51} {UMLS C0013362,C4553903 HP:0001260} {HPO HP:0001260 C0013362};\nSpasticity {SNOMEDCT:221360009,397790002} {UMLS C0026838,C4553743 HP:0001257} {HPO HP:0001257 C0026838};\nHyperreflexia {SNOMEDCT:86854008} {UMLS C0151889 HP:0001347} {HPO HP:0001347 C0151889};\nSeizures {SNOMEDCT:91175000} {UMLS C0036572 HP:0001250} {HPO HP:0001250 C0014544,C0036572};\nBrainstem abnormalities {UMLS C1850601 HP:0002363};\nMental retardation {SNOMEDCT:110359009,228156007} {ICD9CM:317-319.99} {UMLS C0025362,C3714756 HP:0001249} {HPO HP:0001249 C0025362,C0423903,C0917816,C1843367,C3714756,C4020876};\nLesions in basal ganglia, brainstem, cerebellum, thalamus, spinal cord characterized by demyelination, necrosis, gliosis, spongiosis, and capillary proliferation {UMLS C2673315}''',
'neurologicBehavioralPsychiatricManifestations': 'Emotional lability {SNOMEDCT:18963009} {ICD10CM:R45.86} {ICD9CM:799.24} {UMLS C0085633 HP:0000720,HP:0000712,HP:0001575} {HPO HP:0000712 C0085633}',
'metabolicFeatures': 'Lactic acidosis {SNOMEDCT:91273001} {ICD10CM:E87.2} {UMLS C0001125 HP:0003128} {HPO HP:0003128 C0001125,C0347959}',
'laboratoryAbnormalities': '''Increased serum lactate {UMLS C1836440 HP:0002151} {HPO HP:0002151 C1836440};\nIncreased CSF lactate {UMLS C1167918 HP:0002490} {HPO HP:0002490 C1167918}''',
'miscellaneous': '''Onset usually in infancy or early childhood {UMLS C1846410};\nProgressive disorder, usually with rapid, relentless course {UMLS C1850619};\nClinical heterogeneity {UMLS C1837514 HP:0003812} {HPO HP:0003812 C1837514,C1839039,C1850667,C1866210};\nGenetic heterogeneity (may be caused by mutation in nuclear-encoded or mitochondrial-encoded genes) {UMLS C1850621} {HPO HP:0001425 C0242960};\nSubset of patients have cytochrome c oxidase deficiency (see {220110});\nSee also X-linked Leigh syndrome ({312170});\nSee also French-Canadian type of Leigh syndrome ({220111})''',
'molecularBasis': '''Caused by mutation in the NADH dehydrogenase, subunit 2 gene (MTND2, {516001.0006});\nCaused by mutation in the NADH dehydrogenase, subunit 3 gene (MTND3, {516002.0003});\nCaused by mutation in the NADH dehydrogenase, subunit 5 gene (MTND5, {516005.0003});\nCaused by mutation in the NADH dehydrogenase, subunit 6 gene (MTND6, {516006.0002});\nCaused by mutation in the ATP synthase 6 gene (MTATP6, {516060.0001});\nCaused by mutation in the cytochrome c oxidase III gene (MTCO3, {516050.0005});\nCaused by mutation in the mitochondrial tRNA (valine) gene (MTTV, {590105.0002});\nCaused by mutation in the mitochondrial tRNA (lysine) gene (MTTK, {590060.0001});\nCaused by mutation in the NADH-ubiquinone oxidoreductase 1 alpha subcomplex, 2 gene (NDUFA2, {602137.0001});\nCaused by mutation in the NADH-ubiquinone oxidoreductase 1 alpha subcomplex, 10 gene (NDUFA10, {603835.0001});\nCaused by mutation in the NADH-ubiquinone oxidoreductase 1 alpha subcomplex, 12 gene (NDUFA12, {614530.0001});\nCaused by mutation in the NADH dehydrogenase (ubiquinone) complex I, assembly factor 5 gene (NDUFAF5, {612360.0002});\nCaused by mutation in the NADH dehydrogenase (ubiquinone) Fe-S protein 3 gene (NDUFV3, {603846.0001});\nCaused by mutation in the NADH dehydrogenase (ubiquinone) Fe-S protein 1 gene (NDUFS1, {157655.0001});\nCaused by mutation in the NADH dehydrogenase (ubiquinone) Fe-S protein 4 gene (NDUFS4, {602694.0004});\nCaused by mutation in the NADH dehydrogenase (ubiquinone) Fe-S protein 7 gene (NDUFS7, {601825.0001});\nCaused by mutation in the NADH dehydrogenase (ubiquinone) Fe-S protein 8 gene (NDUFS8, {602141.0001});\nCaused by mutation in the NADH-ubiquinone oxidoreductase 1 alpha subcomplex 9 gene (NDUFA9, {603834.0001});\nCaused by mutation in the succinate dehydrogenase complex, subunit A, flavoprotein gene (SDHA, {600857.0001});\nCaused by mutation in the FAD-dependent oxidoreductase domain-containing protein 1 gene (FOXRED1, {613622.0001});\nCaused by mutation in the bcs1, S. cerevisiae, homolog-like gene (BCS1L, {603647.0002});\nCaused by mutation in the surfeit-1 gene (SURF1, {185620.0001});\nCaused by mutation in the cytochrome c oxidase, subunit 15 gene (COX15, {603646.0001});\nCaused by mutation in the C8ORF38 gene (C8ORF38, {612392.0001});\nCaused by mutation in the translational activator of mitochondrially encoded cytochrome c oxidase subunit 1 gene (TACO1, {612958.0001});\nCaused by mutation in the mitochondrial methionyl-tRNA formyltransferase gene (MTFMT, {611766.0001});\nCaused by mutation in the homolog of the S. cerevisiae PET100 gene (PET100, {614770.0001}).''',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': True,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': True,
'voiceExists': False,
'metabolicFeaturesExists': True,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256020,
'preferredTitle': 'NAIL-PATELLA-LIKE RENAL DISEASE',
'oldFormat': {
'GU': 'Nephropathy {SNOMEDCT:90708001} {ICD10CM:N08,N28.9} {UMLS C0022658 HP:0000112} {HPO HP:0000112 C0022658,C1408258}; Glomerulodysplasia; Renal failure {SNOMEDCT:723188008,42399005} {ICD10CM:N19} {ICD9CM:586} {UMLS C1963154,C0035078 HP:0000083} {HPO HP:0000083 C0035078,C1565489,C1839604};',
'Lab': 'Proteinuria {SNOMEDCT:29738008,231860006} {ICD10CM:R80,R80.9} {ICD9CM:791.0} {UMLS C4554346,C1279888,C0033687,C1962972 HP:0000093} {HPO HP:0000093 C0033687}; Renal biopsy EM shows glomerular basement membrane changes like nail-patella syndrome;',
'Nails': 'Normal nails {SNOMEDCT:297988008} {UMLS C0574760};',
'Skel': 'No bone disorder;',
'Inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899};'
} ,
'oldFormatExists': True,
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': True,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': False,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256030,
'prefix': '#',
'preferredTitle': 'NEMALINE MYOPATHY 2; NEM2',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'headAndNeckFace': '''Myopathic facies {SNOMEDCT:26432009} {UMLS C0332615 HP:0002058} {HPO HP:0002058 C0332615};\nFacial muscle weakness {SNOMEDCT:95666008} {ICD10CM:R29.810} {ICD9CM:438.83,781.94} {UMLS C0427055,C4553723 HP:0030319,HP:0007209} {HPO HP:0030319 C4022514};\nLong philtrum (severe form) {UMLS C4692605} {HPO HP:0000343 C1865014} {EOM ID:e1d74175c310388d IMG:Philtrum,Long-small.jpg}''',
'headAndNeckEars': 'Low-set ears (severe form) {UMLS C1850576} {HPO HP:0000369 C0239234}',
'headAndNeckEyes': 'Hypertelorism (severe form) {UMLS C4692606} {HPO HP:0000316 C0020534} {EOM ID:71d9f1be67c7f8b6 IMG:Eyes,Widely_Spaced-small.jpg}',
'headAndNeckMouth': '''High-arched palate {SNOMEDCT:27272007} {ICD10CM:Q38.5} {UMLS C0240635 HP:0000218} {HPO HP:0000218 C0240635};\nCleft palate (severe form) {UMLS C1850578} {HPO HP:0000175 C0008925,C2981150}''',
'headAndNeckNeck': 'Neck flexor muscle weakness {UMLS C1843637 HP:0003722} {HPO HP:0003722 C1843637}',
'respiratory': '''Respiratory insufficiency due to muscle weakness {UMLS C3806467 HP:0002747} {HPO HP:0002747 C3806467};\nAbsence of spontaneous respiration (severe form) {UMLS C3806473} {HPO HP:0002104 C0003578}''',
'chestExternalFeatures': 'Chest deformities {UMLS C0238983}',
'abdomenGastrointestinal': '''Poor feeding {SNOMEDCT:78164000,299698007} {ICD10CM:R63.3} {UMLS C0576456,C0232466 HP:0011968} {HPO HP:0011968 C0232466};\nDysphagia {SNOMEDCT:40739000,288939007} {ICD10CM:R13.1,R13.10} {ICD9CM:787.2,787.20} {UMLS C0011168,C1560331 HP:0002015,HP:0200136} {HPO HP:0002015 C0011168}''',
'skeletal': '''Joint contractures {SNOMEDCT:7890003} {ICD10CM:M24.5} {ICD9CM:718.40,718.4} {UMLS C0009918 HP:0001371} {HPO HP:0001371 C0009917,C0009918,C0333068,C1850530};\nJoint deformities (may develop over time) {UMLS C1850588};\nArthrogryposis (severe form) {UMLS C1850589} {HPO HP:0002804 C0003886}''',
'skeletalSpine': '''Hyperlordosis {SNOMEDCT:249710008,61960001} {ICD10CM:M40.5} {UMLS C0024003 HP:0003307} {HPO HP:0003307 C0024003};\nScoliosis (onset around puberty) {UMLS C1850590} {HPO HP:0002650 C0037932,C0700208};\nRigid spine {UMLS C1858025 HP:0003306} {HPO HP:0003306 C1858025}''',
'skeletalHands': 'Clenched hands (severe form) {UMLS C4692607} {HPO HP:0001188 C0239815}',
'skeletalFeet': 'Talipes (severe form) {UMLS C4692608} {HPO HP:0001883 C1301937,C3552713}',
'muscleSoftTissue': '''Hypotonia, neonatal {SNOMEDCT:205294008} {UMLS C2267233 HP:0001319} {HPO HP:0001319 C2267233};\nMuscle weakness, generalized {ICD10CM:M62.81} {ICD9CM:728.87} {UMLS C0746674 HP:0003324} {HPO HP:0003324 C0746674};\nBulbar muscle weakness {SNOMEDCT:398432008} {UMLS C1301959 HP:0001283} {HPO HP:0001283 C1301959,C4082299};\nFacial muscle weakness {SNOMEDCT:95666008} {ICD10CM:R29.810} {ICD9CM:438.83,781.94} {UMLS C0427055,C4553723 HP:0030319,HP:0007209} {HPO HP:0030319 C4022514};\nNeck muscle weakness {UMLS C0240479 HP:0000467} {HPO HP:0000467 C0240479};\nProximal limb muscle weakness initially {UMLS C1850580};\nDistal limb muscle weakness occurs later {UMLS C1864972};\nDistal limb muscle weakness initially (in some patients) {UMLS C4692603};\n\'Waddling\' gait {SNOMEDCT:271706000} {UMLS C0231712 HP:0002515} {HPO HP:0002515 C0231712};\nInability to run {UMLS C1854490};\nInability to walk on heels {UMLS C3809657};\nFrequent falls {UMLS C0850703 HP:0002359} {HPO HP:0002359 C0850703};\nMyopathic changes early in disease seen on EMG {UMLS C3278556};\nNeurogenic changes later in disease seen on EMG {UMLS C3278557};\nNemaline bodies (rods) on Gomori trichrome staining {UMLS C3278558};\nNemaline bodies are usually subsarcolemmal or sarcoplasmic {UMLS C1850583};\nNemaline bodies are rarely intranuclear {UMLS C1850584};\nNonspecific myopathic changes without dystrophic or inflammatory changes seen on muscle biopsy {UMLS C3805774};\nCores with lack of oxidative activity and mitochondrial depletion may also be found and extend along length of fiber {UMLS C3278559};\nType 1 muscle fiber predominance {UMLS C1854387 HP:0003803} {HPO HP:0003803 C1854387};\nDecreased muscle density on imaging {UMLS C1850585};\nIncreased fatty infiltration {UMLS C1850586};\nAbsence of spontaneous activity at birth (severe form) {UMLS C1850587}''',
'neurologicCentralNervousSystem': '''Delayed motor development {UMLS C1854301 HP:0001270} {HPO HP:0001270 C1854301,C4020874};\nFailure to achieve sitting or walking (severe form) {UMLS C1850570};\nAbsent gag reflex {SNOMEDCT:5258001} {UMLS C0234784};\nHyporeflexia {SNOMEDCT:22994000,405946002} {UMLS C0151888,C0700078 HP:0001315,HP:0001265} {HPO HP:0001265 C0700078};\nAreflexia {SNOMEDCT:37280007} {UMLS C0234146 HP:0001284} {HPO HP:0001284 C0234146,C0241772,C0278124};\nSlow gross motor activity {UMLS C1850571};\nNormal fine motor activity {UMLS C1850572}''',
'prenatalManifestationsMovement': 'Decreased fetal movement (severe form) {UMLS C1850574} {HPO HP:0001558 C0235659}',
'prenatalManifestationsAmnioticFluid': '''Polyhydramnios (severe form) {UMLS C1850575} {HPO HP:0001561 C0020224};\nFetal hydrops (severe form) {UMLS C4692604}''',
'laboratoryAbnormalities': 'Normal or mildly increased serum creatine kinase {UMLS C1864981}',
'miscellaneous': '''Extraocular muscles are not involved {UMLS C1850577};\nOnset in infancy {UMLS C1848924 HP:0003593} {HPO HP:0003593 C1848924};\nHighly variable severity, ranging from \'typical\' to \'severe\' disease {UMLS C4692610};\nSlowly progressive or nonprogressive course {UMLS C1850594};\nMany adults with typical form remain ambulatory {UMLS C1850595};\nDeath at birth or within first 2 years of life (severe form) {UMLS C1850596}''',
'molecularBasis': 'Caused by mutation in the nebulin gene (NEB, {161650.0001})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': True,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': True,
'chestExternalFeaturesExists': True,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': True,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': True,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': True,
'skeletalFeetExists': True,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': True,
'prenatalManifestationsMovementExists': True,
'prenatalManifestationsAmnioticFluidExists': True,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256040,
'prefix': '#',
'preferredTitle': 'PROTEASOME-ASSOCIATED AUTOINFLAMMATORY SYNDROME 1; PRAAS1',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthHeight': 'Short stature (less common) {UMLS C1843420} {HPO HP:0004322 C0349588}',
'growthOther': '''Failure to thrive {SNOMEDCT:54840006,433476000,432788009} {ICD10CM:R62.51} {ICD9CM:783.41} {UMLS C2315100,C0015544,C3887638 HP:0001508} {HPO HP:0001508 C0231246,C2315100};\nPoor growth {UMLS C1837385 HP:0001510} {HPO HP:0001510 C0151686,C0456070,C0878787,C1837385,C3552463}''',
'headAndNeckFace': '''Loss of facial subcutaneous fat {UMLS C3278566};\nPeriorbital swelling due to violaceous plaques on the eyelids {UMLS C3278567}''',
'headAndNeckEyes': '''Conjunctivitis {SNOMEDCT:9826008} {ICD10CM:H10,H10.9} {ICD9CM:372.30} {UMLS C4553305,C0009763 HP:0000509} {HPO HP:0000509 C0009763,C1864156};\nEpiscleritis {SNOMEDCT:815008} {ICD10CM:H15.1} {UMLS C0014583 HP:0100534} {HPO HP:0100534 C0014583}''',
'headAndNeckMouth': '''Macroglossia {SNOMEDCT:270516002,25273001} {ICD10CM:K14.8,Q38.2} {ICD9CM:750.15} {UMLS C0024421,C0009677 HP:0000158} {HPO HP:0000158 C0024421} {EOM ID:9638e4ba815e5908 IMG:Tongue,Large-small.jpg};\nThick lips {UMLS C1836543 HP:0012471} {HPO HP:0012471 C1836543}''',
'cardiovascularHeart': '''Cardiac insufficiency (in some) {UMLS C3278586} {HPO HP:0001635 C0018801,C0018802};\nArrhythmias (in some) {UMLS C3278587} {HPO HP:0011675 C0003811,C0264886,C0522055,C0855329,C1832603,C1842820}''',
'abdomen': 'Prominent abdomen {UMLS C1850290}',
'abdomenLiver': 'Hepatomegaly {SNOMEDCT:80515008} {ICD10CM:R16.0} {ICD9CM:789.1} {UMLS C0019209 HP:0002240} {HPO HP:0002240 C0019209}',
'abdomenSpleen': 'Splenomegaly (variable) {UMLS C3278565} {HPO HP:0001744 C0038002}',
'skeletal': '''Joint contractures {SNOMEDCT:7890003} {ICD10CM:M24.5} {ICD9CM:718.40,718.4} {UMLS C0009918 HP:0001371} {HPO HP:0001371 C0009917,C0009918,C0333068,C1850530};\nNarrowing of the joint spaces {UMLS C1859695};\nPeriarticular osteopenia {UMLS C0877138};\nBone pain {SNOMEDCT:12584003} {UMLS C4554063,C0151825,C1963077 HP:0002653} {HPO HP:0002653 C0151825};\nJoint pain {SNOMEDCT:57676002} {ICD10CM:M25.5} {ICD9CM:719.4,719.40} {UMLS C0003862,C1963066,C4085641 HP:0002829} {HPO HP:0002829 C0003862}''',
'skeletalLimbs': 'Elbow contractures {SNOMEDCT:239734000,202271004} {ICD10CM:M24.52,M21.22} {UMLS C0409338,C1833142 HP:0002987} {HPO HP:0002987 C0409338}',
'skeletalHands': '''Finger contractures, severe {UMLS C3278569};\nHand contractures, severe {UMLS C3278570};\nClubbed fingers {SNOMEDCT:30760008} {ICD10CM:R68.3} {ICD9CM:781.5} {UMLS C0009080 HP:0100759} {HPO HP:0100759 C0009080};\nLong fingers {UMLS C1858091 HP:0100807} {HPO HP:0100807 C1858091} {EOM ID:41535e8ed3dc9076 IMG:Fingers,Long-small.jpg};\nFinger deformities {SNOMEDCT:20948006} {UMLS C0265605};\nFinger swelling {SNOMEDCT:299060006} {UMLS C0239598 HP:0025131} {HPO HP:0025131}''',
'skeletalFeet': '''Toe contractures, severe {UMLS C3278571} {HPO HP:0005830 C1406835};\nFoot contractures, severe {UMLS C3278572}''',
'skinNailsHairSkin': '''Erythematous nodular skin lesions {UMLS C3278577};\nAnnular erythematous edematous plaques {UMLS C3278578};\nLesions become purpuric {UMLS C3278579};\nResidual hyperpigmentation {UMLS C3278580};\nLesions predominantly on face and limbs {UMLS C3278581};\nPanniculitis {SNOMEDCT:22125009} {ICD10CM:M79.3} {ICD9CM:729.3,729.30} {UMLS C0030326 HP:0012490} {HPO HP:0012490 C0030326};\nDry, stiff skin {UMLS C3278582};\nFrostbitten hands {UMLS C3278583}''',
'skinNailsHairSkinHistology': '''Mononuclear cell infiltrates {UMLS C3278584};\nAtypical mononuclear cells with many mitoses {UMLS C3278585}''',
'muscleSoftTissue': '''Lipodystrophy, partial {SNOMEDCT:75659004} {UMLS C0220989};\nLipodystrophy, generalized, panniculitis-induced (in some) {UMLS C3278562};\nMarked loss of subcutaneous fat in the limbs, face, and sometimes chest {UMLS C3278563};\nMuscle atrophy (variable) {UMLS C3278564} {HPO HP:0003202 C0234958,C0270948,C0541794,C1843479};\nMuscle weakness {SNOMEDCT:26544005} {UMLS C0151786,C0030552 HP:0001324} {HPO HP:0001324 C0151786}''',
'neurologicCentralNervousSystem': '''Mental retardation, mild (2 families) {UMLS C3278561} {HPO HP:0001256 C0026106};\nSeizures (uncommon) {UMLS C3278456} {HPO HP:0001250 C0014544,C0036572};\nBasal ganglia calcification {ICD10CM:G23.8} {UMLS C1389280 HP:0002135} {HPO HP:0002135 C1389280}''',
'metabolicFeatures': 'Fever, intermittent, recurrent (in some) {UMLS C3278593}',
'hematology': '''Microcytic anemia {SNOMEDCT:722005000,234349007} {UMLS C0085576 HP:0001935} {HPO HP:0001935 C0085576};\nThrombocytopenia {SNOMEDCT:302215000,415116008} {ICD10CM:D69.6} {ICD9CM:287.5} {UMLS C0040034,C0392386 HP:0001873} {HPO HP:0001873 C0040034,C0392386}''',
'immunology': '''Antinuclear autoantibodies (in some) {UMLS C3278568};\nLymphadenopathy {SNOMEDCT:30746006} {ICD10CM:R59,R59.9,R59.1} {ICD9CM:785.6} {UMLS C0497156,C4282165 HP:0002716} {HPO HP:0002716 C0497156}''',
'laboratoryAbnormalities': '''Increased erythrocyte sedimentation rate {SNOMEDCT:165468009} {ICD10CM:R70.0} {ICD9CM:790.1} {UMLS C0151632 HP:0003565} {HPO HP:0003565 C0151632};\nHypergammaglobulinemia {SNOMEDCT:127388009} {UMLS C0020455 HP:0010702} {HPO HP:0010702 C0020455,C0151669,C0541985,C1306857,C2048011};\nIncreased gamma-interferon {UMLS C3278573};\nIncreased IgG {UMLS C0239988};\nIncreased IgA {UMLS C0239984 HP:0003261};\nIncreased IL-6 {UMLS C3278574};\nIncreased IL-8 {UMLS C3278575};\nIncreased C-reactive protein {SNOMEDCT:119971000119104} {UMLS C0742906 HP:0011227};\nAbnormal liver enzymes, intermittent {UMLS C3278576};\nIncreased serum triglycerides {SNOMEDCT:166848004} {UMLS C0813230 HP:0002155} {HPO HP:0002155 C1522137}''',
'miscellaneous': '''Onset of autoinflammation in infancy or first few years of life {UMLS C3278589};\nOnset of lipodystrophy later in childhood {UMLS C3278590};\nOnset of joint contractures later in life {UMLS C3278591};\nSome features are variable {UMLS C3278592}''',
'molecularBasis': 'Caused by mutation in the proteasome subunit, beta-type, 8 gene (PSMB8, {177046.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': True,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': True,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': True,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': True,
'skeletalHandsExists': True,
'skeletalFeetExists': True,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': True,
'skinNailsHairSkinHistologyExists': True,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': True,
'endocrineFeaturesExists': False,
'hematologyExists': True,
'immunologyExists': True,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256050,
'prefix': '#',
'preferredTitle': 'ATELOSTEOGENESIS, TYPE II; AO2',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'headAndNeckFace': '''Midface hypoplasia {UMLS C1853242 HP:0011800} {HPO HP:0011800 C1853242,C2673410,C4280320,C4280321} {EOM ID:5b7ad34ab35682b5 IMG:Midface_Retrusion-small.jpg};\nMicrognathia {SNOMEDCT:32958008} {UMLS C0025990 HP:0000347} {HPO HP:0000347 C0025990,C0240295,C1857130} {EOM ID:8bbf61b4ad7ca2ef IMG:Micrognathia-small.jpg}''',
'headAndNeckNose': 'Flattened nasal bridge {UMLS C1836542 HP:0005280} {HPO HP:0005280 C1836542,C3550546,C4280495}',
'headAndNeckMouth': 'Cleft palate {SNOMEDCT:87979003,63567004} {ICD10CM:Q35.5,Q35,Q35.9} {ICD9CM:749.0,749.00} {UMLS C2981150,C0008925,C2240378 HP:0000175} {HPO HP:0000175 C0008925,C2981150}',
'headAndNeckNeck': 'Short neck {SNOMEDCT:95427009} {UMLS C0521525 HP:0000470} {HPO HP:0000470 C0521525} {EOM ID:c75e63fd749ec7a8 IMG:Neck,Short-small.jpg}',
'respiratoryLung': '''Respiratory insufficiency {SNOMEDCT:91434003,409623005,409622000} {ICD10CM:J96.9} {UMLS C0035229,C0034088,C1145670 HP:0010444,HP:0002093,HP:0002878} {HPO HP:0002093 C0035229,C4020855};\nPulmonary hypoplasia {SNOMEDCT:80825009} {UMLS C0265783 HP:0002089} {HPO HP:0002089 C0265783}''',
'chestRibsSternumClaviclesAndScapulae': 'Small thorax {SNOMEDCT:298709006} {UMLS C0575483,C1837482 HP:0005257} {HPO HP:0005257 C1837482}',
'skeletalSpine': '''Cervical kyphosis {SNOMEDCT:298393001} {UMLS C0575170 HP:0002947} {HPO HP:0002947 C0575170};\nPlatyspondyly {UMLS C1844704 HP:0000926} {HPO HP:0000926 C1844704};\nVertebral body coronal clefts {UMLS C1850557};\nScoliosis {SNOMEDCT:298382003,20944008,111266001} {ICD10CM:Q67.5,M41,M41.9} {UMLS C0559260,C0036439,C4552773,C0700208 HP:0002650} {HPO HP:0002650 C0037932,C0700208};\nLumbar hyperlordosis {UMLS C1184923 HP:0002938} {HPO HP:0002938 C1184923};\nHorizontal sacrum {UMLS C1850558 HP:0003440} {HPO HP:0003440 C1850558}''',
'skeletalPelvis': '''Round-shaped iliac bones {UMLS C3550600};\nFlat acetabulae {UMLS C1865196};\nShortened sacroiliac notches {UMLS C1866689 HP:0003185} {HPO HP:0003185 C1866689}''',
'skeletalLimbs': '''Severe micromelia {UMLS C1832986};\nBifid distal humerus {UMLS C1850560};\nShort, dumbbell femur {UMLS C1850561};\nAbducted thumbs and great toes {UMLS C1850562};\nGap between first and second toe {UMLS C1840069 HP:0001852} {HPO HP:0001852 C1840069};\nHypoplastic, rounded middle phalanges {UMLS C1850564};\nTalipes equinovarus {SNOMEDCT:397932003} {ICD10CM:Q66.89,Q66.0} {ICD9CM:754.51} {UMLS C0009081 HP:0001762} {HPO HP:0001762 C0009081}''',
'prenatalManifestationsDelivery': 'Stillborn or death shortly after birth {UMLS C1850556}',
'laboratoryAbnormalities': 'Lacunar halos around chondrocytes in skeletal cartilage {UMLS C1850565}',
'miscellaneous': 'Allelic to diastrophic dysplasia ({222600}), achondrogenesis, type 1b ({600972}), and multiple epiphyseal dysplasia, type 4 ({226900})',
'molecularBasis': 'Caused by mutation in the solute carrier family 26 (sulfate transporter), member 2 gene (SLC26A2, {606718.0002})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': True,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': True,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': True,
'chestExists': True,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': True,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': True,
'skeletalPelvisExists': True,
'skeletalLimbsExists': True,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': True,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': True,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256100,
'prefix': '#',
'preferredTitle': 'NEPHRONOPHTHISIS 1; NPHP1',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthOther': 'Growth retardation {SNOMEDCT:59576002,444896005} {UMLS C0151686 HP:0001510} {HPO HP:0001510 C0151686,C0456070,C0878787,C1837385,C3552463}',
'genitourinaryKidneys': '''Nephronophthisis {SNOMEDCT:204958008} {ICD10CM:Q61.5} {UMLS C0687120 HP:0000090,HP:0008659} {HPO HP:0000090 C0687120};\nEnd stage renal disease {SNOMEDCT:90688005,433146000,46177005} {ICD10CM:N18.9,N18.6,N18.5} {ICD9CM:585.6} {UMLS C0022661,C2316810 HP:0003774} {HPO HP:0003774 C2316810};\nTubular atrophy {UMLS C1858395,C4521759 HP:0000092} {HPO HP:0000092 C1858395};\nTubular basement membrane disintegration {UMLS C1968618 HP:0005583} {HPO HP:0005583 C1968618};\nInterstitial fibrosis {SNOMEDCT:125565008} {UMLS C3887486,C0240035};\nCorticomedullary renal cysts {UMLS C1968619 HP:0000108} {HPO HP:0000108 C1968619}''',
'metabolicFeatures': '''Polyuria {SNOMEDCT:28442001,718402002,56574000} {ICD10CM:R35,R35.8} {ICD9CM:788.42} {UMLS C0032617 HP:0000103} {HPO HP:0000103 C0032617};\nPolydipsia {SNOMEDCT:17173007,267026004} {ICD10CM:R63.1} {ICD9CM:783.5} {UMLS C0085602 HP:0001959} {HPO HP:0001959 C0085602};\nAbsence of hypertension {UMLS C1968624}''',
'hematology': 'Anemia {SNOMEDCT:271737000} {ICD10CM:D64.9} {ICD9CM:285.9} {UMLS C0002871,C4554633,C1000483 HP:0001903} {HPO HP:0001903 C0002871,C0162119}',
'laboratoryAbnormalities': 'Hyposthenuria (inability to concentrate urine normally) {UMLS C1968620} {HPO HP:0003158 C0232831}',
'miscellaneous': '''Medial onset of end stage renal disease 13 years {UMLS C1968622};\nAllelic to Senior-Loken syndrome 1 ({266900}) and Joubert syndrome 4 ({609583})''',
'molecularBasis': 'Caused by mutation in the nephrocystin 1 gene (NPHP1, {607100.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': True,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': True,
'endocrineFeaturesExists': False,
'hematologyExists': True,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256120,
'preferredTitle': 'NEPHROPATHY, DEAFNESS, AND HYPERPARATHYROIDISM',
'oldFormat': {
'GU': 'Renal failure {SNOMEDCT:723188008,42399005} {ICD10CM:N19} {ICD9CM:586} {UMLS C1963154,C0035078 HP:0000083} {HPO HP:0000083 C0035078,C1565489,C1839604}; No hematuria {UMLS C1858397};',
'Metabolic': 'Hyperparathyroidism {SNOMEDCT:66999008} {ICD10CM:E21.3} {ICD9CM:252.0,252.00} {UMLS C4553963,C0020502 HP:0000843} {HPO HP:0000843 C0020502};',
'HEENT': 'Sensorineural deafness {SNOMEDCT:60700002} {ICD10CM:H90.5} {ICD9CM:389.1,389.10} {UMLS C0018784 HP:0000407} {HPO HP:0000407 C0018784};',
'Lab': 'Parathyroid hyperplasia {SNOMEDCT:9092004} {ICD10CM:E21.0} {UMLS C0271844 HP:0008208} {HPO HP:0008208 C0271844};',
'Inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899};'
} ,
'oldFormatExists': True,
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': True,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': False,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256150,
'preferredTitle': 'NEPHROSIALIDOSIS',
'oldFormat': {
'GU': 'Childhood glomerular nephropathy; Nephrosis {SNOMEDCT:90708001,52254009} {ICD10CM:N04} {ICD9CM:581} {UMLS C0027726,C0027720 HP:0000100} {HPO HP:0000100 C0027726}; Renal failure {SNOMEDCT:723188008,42399005} {ICD10CM:N19} {ICD9CM:586} {UMLS C1963154,C0035078 HP:0000083} {HPO HP:0000083 C0035078,C1565489,C1839604};',
'HEENT': 'Dysmorphic facies {SNOMEDCT:248200007} {UMLS C0424503 HP:0001999} {HPO HP:0001999 C0266617,C0424503,C1385263,C4072832,C4072833}; Fundus cherry red spot;',
'GI': 'Visceral storage disease;; Congenital ascites;',
'Neuro': 'Early and severe mental retardation {SNOMEDCT:40700009} {ICD10CM:F72} {ICD9CM:318.1} {UMLS C0036857 HP:0010864} {HPO HP:0010864 C0036857};',
'Skel': 'Skeletal abnormalities {UMLS C4021790 HP:0000924} {HPO HP:0000924 C4021790};',
'Heme': 'Bone marrow foam cells {UMLS C1856560 HP:0004333} {HPO HP:0004333 C1856560};',
'Cardiac': 'Early-onset pericardial effusion;',
'Misc': 'Early death {UMLS C1836407};',
'Lab': 'Leukocyte alpha-(2-6) neuraminidase defect; Renal epithelial cell damage worst in glomeruli and proximal tubules, with high molecular weight sialyloligosaccharide storage;',
'Inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}; ? neuraminidase deficiency (256550);'
} ,
'oldFormatExists': True,
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': True,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256200,
'preferredTitle': 'NEPHROSIS WITH DEAFNESS AND URINARY TRACT AND DIGITAL MALFORMATIONS',
'oldFormat': {
'Limbs': 'Digital malformations; Short bifid distal phalanges of thumbs and big toes;',
'Ears': 'Hearing loss {SNOMEDCT:15188001,343087000,103276001} {ICD10CM:H91.9} {ICD9CM:389,389.9} {UMLS C3887873,C2029884,C1384666,C0018772,C0011053 HP:0000365} {HPO HP:0000365 C0011053,C0018772,C0339789,C1384666};',
'GU': 'Nephrosis {SNOMEDCT:90708001,52254009} {ICD10CM:N04} {ICD9CM:581} {UMLS C0027726,C0027720 HP:0000100} {HPO HP:0000100 C0027726}; Urinary tract anomalies {UMLS C4021821 HP:0000079} {HPO HP:0000079 C4021821};',
'HEENT': 'Bifid uvula {SNOMEDCT:18910001} {UMLS C4551488 HP:0000193} {HPO HP:0000193 C0266122} {EOM ID:89bbddff1c94bf45 IMG:Uvula,Cleft-small.jpg};',
'Inheritance': 'Autosomal recessive vs. X-linked dominant;'
} ,
'oldFormatExists': True,
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': True,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': False,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256300,
'prefix': '#',
'preferredTitle': 'NEPHROTIC SYNDROME, TYPE 1; NPHS1',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthOther': 'Growth retardation {SNOMEDCT:59576002,444896005} {UMLS C0151686 HP:0001510} {HPO HP:0001510 C0151686,C0456070,C0878787,C1837385,C3552463}',
'genitourinaryKidneys': '''Nephrotic syndrome {SNOMEDCT:52254009} {ICD10CM:N04} {ICD9CM:581} {UMLS C0027726,C4553910 HP:0000100} {HPO HP:0000100 C0027726};\nProteinuria, severe {UMLS C0240817} {HPO HP:0000093 C0033687};\nBiopsy shows dilated proximal tubules {UMLS C3278594};\nTubular atrophy {UMLS C1858395,C4521759 HP:0000092} {HPO HP:0000092 C1858395};\nInterstitial fibrosis {SNOMEDCT:125565008} {UMLS C3887486,C0240035};\nMesangial cell proliferation {UMLS C3278595};\nDiffuse mesangial sclerosis {SNOMEDCT:111406002} {UMLS C0268747 HP:0001967} {HPO HP:0001967 C0268747};\nGlomerulosclerosis and fibrosis {UMLS C3278596};\nLoss of podocyte foot processes {UMLS C3278597}''',
'muscleSoftTissue': 'Edema {SNOMEDCT:267038008,20741006,79654002} {ICD10CM:R60.9} {ICD9CM:782.3} {UMLS C1717255,C0013604 HP:0000969} {HPO HP:0000969 C0013604}',
'prenatalManifestationsAmnioticFluid': '''Proteinuria {SNOMEDCT:29738008,231860006} {ICD10CM:R80,R80.9} {ICD9CM:791.0} {UMLS C4554346,C1279888,C0033687,C1962972 HP:0000093} {HPO HP:0000093 C0033687};\nIncreased alpha-fetoprotein {UMLS C0235971 HP:0006254}''',
'prenatalManifestationsPlacentaAndUmbilicalCord': 'Enlarged placenta {UMLS C3278598}',
'prenatalManifestationsDelivery': 'Prematurity {SNOMEDCT:282020008,49550006,395507008,771299009} {ICD10CM:P07.3} {ICD9CM:644.2} {UMLS C0021294,C0151526 HP:0001622}',
'laboratoryAbnormalities': '''Hyperlipidemia {SNOMEDCT:166816003,55822004,3744001} {ICD10CM:E78.5} {UMLS C0020476,C0428465,C4555212,C0020473 HP:0010980,HP:0003077} {HPO HP:0003077 C0020473};\nHypoalbuminemia {SNOMEDCT:119247004} {UMLS C3665623,C0239981,C4554345 HP:0003073} {HPO HP:0003073 C0239981}''',
'miscellaneous': '''Onset in utero {UMLS C1836142 HP:0003577} {HPO HP:0003577 C1836142,C2752013};\nRapidly progressive {UMLS C1838681 HP:0003678} {HPO HP:0003678 C1838681,C1850776};\nEnd-stage renal failure in first decade {UMLS C3278600};\nEarly death without kidney transplant {UMLS C3278601};\nNot responsive to steroid treatment {UMLS C3278602};\nSome patients may have a milder phenotype {UMLS C3278603};\nIncidence of 12.2 per 100,000 in Finland {UMLS C3278604}''',
'molecularBasis': 'Caused by mutation in the nephrin gene (NPHS1, {602716.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': True,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': True,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': True,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': True,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': True,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256370,
'prefix': '#',
'preferredTitle': 'NEPHROTIC SYNDROME, TYPE 4; NPHS4',
'inheritance': 'Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147}',
'genitourinaryKidneys': '''Nephrotic syndrome {SNOMEDCT:52254009} {ICD10CM:N04} {ICD9CM:581} {UMLS C0027726,C4553910 HP:0000100} {HPO HP:0000100 C0027726};\nRenal failure {SNOMEDCT:723188008,42399005} {ICD10CM:N19} {ICD9CM:586} {UMLS C1963154,C0035078 HP:0000083} {HPO HP:0000083 C0035078,C1565489,C1839604};\nDiffuse mesangial sclerosis {SNOMEDCT:111406002} {UMLS C0268747 HP:0001967} {HPO HP:0001967 C0268747};\nFocal segmental glomerulosclerosis (less common) {UMLS C3151569} {HPO HP:0000097 C0017668}''',
'miscellaneous': '''Onset in early childhood {UMLS C1851422};\nProgressive disorder {UMLS C1864985 HP:0003676} {HPO HP:0003676 C0205329,C1864985}''',
'molecularBasis': 'Caused by mutation in the Wilms tumor 1 gene (WT1, {607102.0021})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': True,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': False,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256450,
'prefix': '#',
'preferredTitle': 'HYPERINSULINEMIC HYPOGLYCEMIA, FAMILIAL, 1; HHF1',
'inheritance': '''Autosomal dominant {SNOMEDCT:263681008} {UMLS C0443147 HP:0000006} {HPO HP:0000006 C0443147};\nAutosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}''',
'growthOther': 'Large for gestational age {UMLS C1848395 HP:0001520} {HPO HP:0001520 C1848395}',
'abdomenPancreas': '''Islet cell hyperplasia, diffuse {UMLS C1864907};\nFocal adenomatous hyperplasia of beta cells (uncommon) {UMLS C1850548}''',
'neurologicCentralNervousSystem': '''Loss of consciousness due to hypoglycemia {SNOMEDCT:267384006} {ICD10CM:E15} {ICD9CM:251.0} {UMLS C0020617 HP:0001325} {HPO HP:0001325 C0020617};\nSeizures, hypoglycemic {UMLS C0877056 HP:0002173};\nMental retardation due to repeated episodes of hypoglycemia {UMLS C1864949}''',
'endocrineFeatures': '''Hyperinsulinemic hypoglycemia {SNOMEDCT:360339005} {UMLS C1864903,C3888018 HP:0000825} {HPO HP:0000825 C1864903};\nInsulin deficiency (may develop later in life) {UMLS C1850546};\nDiabetes (may develop later in life) {UMLS C1850547}''',
'laboratoryAbnormalities': '''Hypoglycemia {SNOMEDCT:271327008,302866003,237630007} {ICD10CM:E16.2} {ICD9CM:251.2} {UMLS C4553659,C0020615 HP:0001943} {HPO HP:0001943 C0020615};\nHyperinsulinemia {SNOMEDCT:83469008,131103005} {ICD10CM:E16.1} {UMLS C0020459,C0852795 HP:0000842} {HPO HP:0000842 C0020459}''',
'miscellaneous': 'Genetic heterogeneity {UMLS C0242960 HP:0001425} {HPO HP:0001425 C0242960}',
'molecularBasis': 'Caused by mutation in the ATP-binding cassette, subfamily C, member 8 gene (ABCC8, {600509.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': True,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': True,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256500,
'prefix': '#',
'preferredTitle': 'NETHERTON SYNDROME; NETH',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthOther': 'Failure to thrive {SNOMEDCT:54840006,433476000,432788009} {ICD10CM:R62.51} {ICD9CM:783.41} {UMLS C2315100,C0015544,C3887638 HP:0001508} {HPO HP:0001508 C0231246,C2315100}',
'headAndNeckEyes': 'Sparse eyebrows {SNOMEDCT:422441003} {UMLS C0578682,C1832446 HP:0045075,HP:0002223} {HPO HP:0045075}',
'respiratoryAirways': 'Asthma {SNOMEDCT:195967001} {ICD10CM:J45,J45.909,J45.90} {ICD9CM:493.9,493} {UMLS C2984299,C0004096 HP:0002099} {HPO HP:0002099 C0004096,C3714497}',
'abdomenGastrointestinal': '''Enteropathy with villous atrophy {UMLS C1850538};\nIntestinal atresia (rare) {HPO HP:0011100 C0021828}''',
'skinNailsHairSkin': '''Generalized erythroderma {SNOMEDCT:200948000,399992009,400005007} {ICD10CM:L53.9,L26} {UMLS C0011606 HP:0001019} {HPO HP:0001019 C0011606};\nIchthyosis linearis circumflexa {SNOMEDCT:312514006,54336006,34638006} {UMLS C0265962};\nCongenital lamellar ichthyosis {SNOMEDCT:268245001,205550003,267372009} {ICD10CM:Q80.2} {UMLS C0079154 HP:0007479} {HPO HP:0007479 C0079154};\nUrticaria {SNOMEDCT:247472004,126485001,64305001} {ICD10CM:L50,L50.9} {ICD9CM:708,708.9} {UMLS C2186740,C1559188,C0042109 HP:0001025} {HPO HP:0001025 C0042109}''',
'skinNailsHairSkinHistology': '''Psoriasiform epidermal hyperplasia;\nParakeratosis {SNOMEDCT:65068000,200766001} {UMLS C0030436 HP:0001036} {HPO HP:0001036 C0030436}''',
'skinNailsHairHair': '''Sparse, brittle scalp hair {UMLS C1850541};\nTrichorrhexis invaginata ("bamboo hair") {UMLS C1850542};\nSparse eyebrows {SNOMEDCT:422441003} {UMLS C0578682,C1832446 HP:0045075,HP:0002223} {HPO HP:0045075}''',
'muscleSoftTissue': 'Angioedema {SNOMEDCT:400075008,41291007} {ICD10CM:T78.3} {UMLS C0002994 HP:0100665} {HPO HP:0100665 C0002994}',
'neurologicCentralNervousSystem': 'Developmental delay {SNOMEDCT:248290002,224958001} {ICD10CM:F88} {ICD9CM:315.9} {UMLS C0424605,C0557874 HP:0001263} {HPO HP:0001263 C0557874,C1864897,C4020875}',
'metabolicFeatures': 'Hypernatremic dehydration {SNOMEDCT:427784006} {UMLS C1850544 HP:0004906} {HPO HP:0004906 C1850544}',
'hematology': 'Hypereosinophilia {UMLS C0745091 HP:0032061} {HPO HP:0032061}',
'immunology': '''Elevated immunoglobulin E (IgE) {UMLS C1850539} {HPO HP:0003212 C0236175};\nHay fever {SNOMEDCT:300910009,444316004,21719001,367498001} {ICD10CM:J30,J30.1} {ICD9CM:477.0} {UMLS C0018621 HP:0003193,HP:0012395} {HPO HP:0003193 C0847614,C2607914};\nAsthma {SNOMEDCT:195967001} {ICD10CM:J45,J45.909,J45.90} {ICD9CM:493.9,493} {UMLS C2984299,C0004096 HP:0002099} {HPO HP:0002099 C0004096,C3714497};\nAngioedema {SNOMEDCT:400075008,41291007} {ICD10CM:T78.3} {UMLS C0002994 HP:0100665} {HPO HP:0100665 C0002994};\nFood allergy {SNOMEDCT:414285001} {UMLS C4554344,C1548335,C0016470 HP:0500093} {HPO HP:0500093};\nRecurrent infections {SNOMEDCT:451991000124106} {UMLS C0239998 HP:0002719} {HPO HP:0002719 C0239998};\nInnate immunodeficiency;\nCognate immunodeficiency;\nDefective responses to vaccination;\nReduced IgG levels {SNOMEDCT:123785006} {UMLS C4520847 HP:0004315} {HPO HP:0004315 C0162539};\nReduced C3 components;\nSkewed Th1 phenotype;\nElevated proinflammatory cytokine levels;\nReduced chemokine CC-motif ligand-5 levels;\nReduced natural killer cell cytotoxicity {UMLS C1849419}''',
'miscellaneous': 'Some severely affected infants die in the neonatal period',
'molecularBasis': 'Caused by mutation in the serine protease inhibitor, Kazal type, 5 gene (SPINK5, {605010.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': True,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': True,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': True,
'skinNailsHairSkinHistologyExists': True,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': True,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': True,
'endocrineFeaturesExists': False,
'hematologyExists': True,
'immunologyExists': True,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256520,
'prefix': '#',
'preferredTitle': 'NEU-LAXOVA SYNDROME 1; NLS1',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthOther': 'Prenatal onset growth retardation {SNOMEDCT:22033007} {ICD9CM:764.90,764.9} {UMLS C0015934 HP:0001511} {HPO HP:0001511 C0015934,C0021296,C1386048}',
'headAndNeckHead': 'Microcephaly {SNOMEDCT:1829003} {ICD10CM:Q02} {ICD9CM:742.1} {UMLS C4551563,C0025958 HP:0000252} {HPO HP:0000252 C0424688} {EOM ID:8ae2118220c1308f IMG:Microcephaly-small.jpg}',
'headAndNeckFace': '''Sloping forehead {UMLS C1857679 HP:0000340} {HPO HP:0000340 C1857679} {EOM ID:913a7d5a25c24e6a IMG:Forehead,Sloping-small.jpg};\nMicrognathia {SNOMEDCT:32958008} {UMLS C0025990 HP:0000347} {HPO HP:0000347 C0025990,C0240295,C1857130} {EOM ID:8bbf61b4ad7ca2ef IMG:Micrognathia-small.jpg}''',
'headAndNeckEars': 'Large ears {SNOMEDCT:275480001} {UMLS C0554972 HP:0000400} {HPO HP:0000400 C0152421,C0554972,C1835581,C1848570,C1850189,C1855062,C1860838}',
'headAndNeckEyes': '''Hypertelorism {SNOMEDCT:22006008} {ICD10CM:Q75.2} {ICD9CM:376.41} {UMLS C0020534 HP:0000316} {HPO HP:0000316 C0020534} {EOM ID:71d9f1be67c7f8b6 IMG:Eyes,Widely_Spaced-small.jpg};\nProtruding eyes {UMLS C1848490 HP:0000520} {HPO HP:0000520 C0015300,C1837760,C1848490,C1862425};\nAbsent eyelids {SNOMEDCT:13401001,708541009} {ICD10CM:Q10.3} {UMLS C0266574 HP:0011224} {HPO HP:0011224 C0266574,C4020757};\nCataract {SNOMEDCT:193570009,420123008,247053007,128306009} {ICD10CM:H26.9} {ICD9CM:366,366.9} {UMLS C4555209,C1690964,C0086543,C1962983 HP:0000518} {HPO HP:0000518 C0086543,C1510497};\nMicrophthalmia {SNOMEDCT:61142002,204108000} {ICD10CM:Q11.2} {ICD9CM:743.1,743.11,743.10} {UMLS C0026010 HP:0000568} {HPO HP:0000568 C0026010,C4280625,C4280808};\nAbsent eyelashes {UMLS C1843005 HP:0000561} {HPO HP:0000561 C1843005,C4280626,C4280627} {EOM ID:9d425f5e9624c299 IMG:Eyelashes,Absent-small.jpg}''',
'headAndNeckNose': 'Flattened nose {SNOMEDCT:249329004} {UMLS C0426431}',
'headAndNeckMouth': '''Round, gaping mouth {UMLS C1850528};\nThick lips {UMLS C1836543 HP:0012471} {HPO HP:0012471 C1836543};\nCleft lip {SNOMEDCT:80281008} {ICD10CM:Q36.9,Q36} {ICD9CM:749.1,749.10} {UMLS C0008924,C4321245 HP:0000204,HP:0410030} {HPO HP:0410030};\nCleft palate {SNOMEDCT:87979003,63567004} {ICD10CM:Q35.5,Q35,Q35.9} {ICD9CM:749.0,749.00} {UMLS C2981150,C0008925,C2240378 HP:0000175} {HPO HP:0000175 C0008925,C2981150}''',
'headAndNeckNeck': 'Short neck {SNOMEDCT:95427009} {UMLS C0521525 HP:0000470} {HPO HP:0000470 C0521525} {EOM ID:c75e63fd749ec7a8 IMG:Neck,Short-small.jpg}',
'cardiovascularHeart': '''Patent foramen ovale {SNOMEDCT:204315000,204317008} {ICD10CM:Q21.1} {ICD9CM:745.5} {UMLS C0344724,C0016522 HP:0001684,HP:0001655} {HPO HP:0001655 C0016522};\nAtrial septal defect {SNOMEDCT:70142008,253366007,405752007} {ICD10CM:Q21.1} {UMLS C0018817 HP:0001631} {HPO HP:0001631 C0018817};\nVentricular septal defect {SNOMEDCT:30288003,768552007,253549006} {ICD10CM:Q21.0} {ICD9CM:745.4} {UMLS C0018818 HP:0001629} {HPO HP:0001629 C0018818}''',
'cardiovascularVascular': '''Transposition of great vessels {SNOMEDCT:26146002,204296002} {ICD10CM:Q20.3} {ICD9CM:745.1,745.10} {UMLS C0040761,C3536741 HP:0001669} {HPO HP:0001669 C3536741};\nPatent ductus arteriosus {SNOMEDCT:83330001} {ICD10CM:Q25.0} {ICD9CM:747.0} {UMLS C3495549,C0013274 HP:0001643} {HPO HP:0001643 C0013274}''',
'respiratoryLung': 'Pulmonary hypoplasia {SNOMEDCT:80825009} {UMLS C0265783 HP:0002089} {HPO HP:0002089 C0265783}',
'genitourinaryInternalGenitaliaMale': 'Cryptorchidism {SNOMEDCT:204878001} {ICD10CM:Q53.9} {ICD9CM:752.51} {UMLS C0010417 HP:0000028} {HPO HP:0000028 C0010417}',
'genitourinaryInternalGenitaliaFemale': 'Bifid uterus {UMLS C1850327 HP:0000136} {HPO HP:0000136 C1850327}',
'genitourinaryKidneys': 'Renal agenesis {SNOMEDCT:41962002,204942005,204938007} {ICD10CM:Q60,Q60.1,Q60.2} {ICD9CM:753.0} {UMLS C1609433,C0158699,C0542519 HP:0000104,HP:0010958} {HPO HP:0000104 C0542519}',
'skeletal': 'Poorly mineralized bones {UMLS C1850529}',
'skeletalLimbs': '''Short limbs {UMLS C0239399 HP:0009826} {HPO HP:0009826 C0239399};\nFlexion contractures of joints {UMLS C1850530 HP:0001371} {HPO HP:0001371 C0009917,C0009918,C0333068,C1850530};\nPterygia {UMLS C0033999,C4084831 HP:0001059} {HPO HP:0001059 C0033999}''',
'skeletalHands': '''Finger syndactyly {SNOMEDCT:249769001,34048007,268251006} {ICD10CM:Q70.1,Q70.10} {ICD9CM:755.11} {UMLS C0221352 HP:0006101} {HPO HP:0006101 C0221352};\nPuffiness of hands {UMLS C1850531};\nCamptodactyly {SNOMEDCT:29271008} {UMLS C0221369,C0685409 HP:0012385} {HPO HP:0012385 C0685409} {EOM ID:e2dc697e402380a8 IMG:Camptodactyly-large-small.jpg};\nClinodactyly {SNOMEDCT:17268007} {UMLS C4551485,C0265610 HP:0030084,HP:0040019} {HPO HP:0030084 C0265610,C4280304} {EOM ID:483af428f909c76c IMG:Clinodactyly-small.jpg}''',
'skeletalFeet': '''Toe syndactyly {SNOMEDCT:32113001} {ICD10CM:Q70.3} {UMLS C0265660 HP:0001770} {HPO HP:0001770 C0265660};\nPuffiness of feet {UMLS C1850532};\nCalcaneovalgus {UMLS C1860450 HP:0001848} {HPO HP:0001848 C1860450};\nVertical talus {SNOMEDCT:205082007} {UMLS C0240912 HP:0001838}''',
'skinNailsHairSkin': '''Yellow subcutaneous tissue covered by thin, scaly skin {UMLS C1850533 HP:0007525} {HPO HP:0007525 C1850533};\nGeneralized tissue edema {SNOMEDCT:271808008} {ICD10CM:R60.1} {UMLS C1850534 HP:0007430} {HPO HP:0007430 C1850534}''',
'skinNailsHairHair': '''Absent scalp hair {UMLS C1850535 HP:0002293};\nAbsent eyelashes {UMLS C1843005 HP:0000561} {HPO HP:0000561 C1843005,C4280626,C4280627} {EOM ID:9d425f5e9624c299 IMG:Eyelashes,Absent-small.jpg}''',
'neurologicCentralNervousSystem': '''Lissencephaly {SNOMEDCT:204036008} {ICD10CM:Q04.3} {UMLS C0266463 HP:0001339} {HPO HP:0001339 C0266463,C1879312};\nAbsence of corpus callosum {SNOMEDCT:5102002} {ICD10CM:Q04.0} {UMLS C0175754 HP:0001274} {HPO HP:0001274 C0175754};\nCerebellar hypoplasia {SNOMEDCT:16026008} {UMLS C0266470 HP:0001321} {HPO HP:0001321 C0266470};\nAbsence of olfactory bulbs {UMLS C1850527};\nHydranencephaly {SNOMEDCT:30023002} {ICD10CM:Q04.3} {UMLS C0020225 HP:0002324} {HPO HP:0002324 C0020225};\nSpina bifida {SNOMEDCT:67531005} {ICD10CM:Q05.9,Q05} {ICD9CM:741,741.9,741.90} {UMLS C0080178,C0158534 HP:0002414,HP:0010301} {HPO HP:0002414 C0080178};\nDandy-Walker malformation {SNOMEDCT:14447001} {ICD10CM:Q03.1} {UMLS C0010964 HP:0001305} {HPO HP:0001305 C0010964};\nChoroid plexus cyst {SNOMEDCT:230790004} {UMLS C0338597 HP:0002190} {HPO HP:0002190 C0338597}''',
'miscellaneous': '''Decreased fetal activity {SNOMEDCT:276369006} {ICD10CM:O36.8190,O36.81} {UMLS C0235659 HP:0001558} {HPO HP:0001558 C0235659};\nStillborn {SNOMEDCT:237364002} {ICD10CM:P95} {UMLS C0595939 HP:0003826} {HPO HP:0003826 C0015927,C0595939};\nPolyhydramnios {SNOMEDCT:86203003} {ICD10CM:O40} {ICD9CM:657,657.0} {UMLS C0020224 HP:0001561} {HPO HP:0001561 C0020224};\nShort umbilical cord {SNOMEDCT:59795007} {UMLS C0266786 HP:0001196} {HPO HP:0001196 C0266786};\nSmall placenta {SNOMEDCT:289264006} {UMLS C0566694 HP:0006266} {HPO HP:0006266 C0566694};\nMost patients are stillborn or die in immediate neonatal period {UMLS C1850537}''',
'molecularBasis': 'Caused by mutation in the phosphoglycerate dehydrogenase gene (PHGDH, {606879.0007})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': True,
'headAndNeckExists': True,
'headAndNeckHeadExists': True,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': True,
'headAndNeckMouthExists': True,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': True,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': True,
'respiratoryExists': True,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': True,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': True,
'genitourinaryInternalGenitaliaFemaleExists': True,
'genitourinaryKidneysExists': True,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': True,
'skeletalHandsExists': True,
'skeletalFeetExists': True,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': True,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': True,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': False,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256540,
'prefix': '#',
'preferredTitle': 'GALACTOSIALIDOSIS; GSL',
'oldFormat': {
'Growth': 'Dwarfism {SNOMEDCT:422065006,237837007,237836003} {ICD10CM:E34.3} {UMLS C0013336 HP:0003510} {HPO HP:0003510 C0013336};',
'HEENT': 'Coarse facies {UMLS C1845847 HP:0000280} {HPO HP:0000280 C1845847,C4072825}; Conjunctival telangiectases {SNOMEDCT:231870008} {UMLS C0239105 HP:0000524} {HPO HP:0000524 C0239105}; Corneal clouding {SNOMEDCT:413921009,64634000,95735008} {ICD9CM:371.00} {UMLS C0010038,C0521719 HP:0007957} {HPO HP:0007957 C0010038,C0521719}; Macular cherry red spot {UMLS C2216370 HP:0010729} {HPO HP:0010729 C2216370}; Hearing loss {SNOMEDCT:15188001,343087000,103276001} {ICD10CM:H91.9} {ICD9CM:389,389.9} {UMLS C3887873,C2029884,C1384666,C0018772,C0011053 HP:0000365} {HPO HP:0000365 C0011053,C0018772,C0339789,C1384666};',
'Neuro': 'Mental retardation {SNOMEDCT:110359009,228156007} {ICD9CM:317-319.99} {UMLS C0025362,C3714756 HP:0001249} {HPO HP:0001249 C0025362,C0423903,C0917816,C1843367,C3714756,C4020876}; Seizures {SNOMEDCT:91175000} {UMLS C0036572 HP:0001250} {HPO HP:0001250 C0014544,C0036572};',
'Skel': 'Dysostosis multiplex {SNOMEDCT:254069004,65327002} {ICD10CM:E76.01} {UMLS C0086795 HP:0000943} {HPO HP:0000943 C0086795};',
'GI': 'Usually no organomegaly; Occasionally hepatosplenomegaly; Vacuolated Kupffer cells;',
'Skin': 'Widespread hemangiomas;',
'Cardiac': 'Mitral valvular disease {SNOMEDCT:11851006} {ICD9CM:424.0} {UMLS C0026265}; Aortic valvular disease {SNOMEDCT:8722008} {ICD9CM:424.1,395} {UMLS C1260873};',
'Lab': 'EM of skin biopsy and peripheral blood lymphocytes shows membrane-bound fibrillogranular inclusions; Elevated urine sialyloligosaccharides but no free sialic acid; Neuraminidase deficiency {SNOMEDCT:38795005,124461006} {ICD10CM:E77.1} {UMLS C4282398,C0268226}; Beta-galactosidase deficiency {SNOMEDCT:124465002} {UMLS C2718068}; Decreased carboxypeptidase-L/protective protein activity;',
'Inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899};'
} ,
'oldFormatExists': True,
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': True,
'skinNailsHairSkinExists': True,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': False,
'molecularBasisExists': False,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256550,
'prefix': '#',
'preferredTitle': 'NEURAMINIDASE DEFICIENCY',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'growthHeight': 'Short stature (type II, infantile and juvenile) {UMLS C2673284} {HPO HP:0004322 C0349588}',
'headAndNeckFace': '''Coarse facies (type II, all types) {UMLS C2673289} {HPO HP:0000280 C1845847,C4072825};\nFacial edema (type II, congenital) {UMLS C2673290} {HPO HP:0000282 C0542571}''',
'headAndNeckEars': 'Hearing loss, sensorineural (type II) {UMLS C2673291} {HPO HP:0000407 C0018784}',
'headAndNeckEyes': '''Vision loss, progressive (type I) {UMLS C3278605} {HPO HP:0000529 C1839364,C3277697};\nNystagmus (type I) {UMLS C2673293} {HPO HP:0000639 C0028738};\nCherry-red spot (type II, infantile and juvenile and type I) {UMLS C2673294};\nLens opacities (type II, infantile and juvenile) {UMLS C2673295} {HPO HP:0000518 C0086543,C1510497}''',
'cardiovascularHeart': '''Cardiomegaly (type II, infantile) {UMLS C2673307} {HPO HP:0001640 C0018800};\nCardiomyopathy (type II, congenital) {UMLS C2673308} {HPO HP:0001638 C0878544}''',
'abdomenExternalFeatures': 'Neonatal ascites (type II, congenital) {UMLS C2673286}',
'abdomenLiver': 'Hepatomegaly (type II, all subtypes) {UMLS C2673287} {HPO HP:0002240 C0019209}',
'abdomenSpleen': 'Splenomegaly (type II, all subtypes) {UMLS C2673288} {HPO HP:0001744 C0038002}',
'genitourinaryExternalGenitaliaMale': 'Inguinal hernia (type II, congenital) {UMLS C2673283} {HPO HP:0000023 C0019294}',
'skeletal': 'Dysostosis multiplex (type II, all types) {UMLS C2673298} {HPO HP:0000943 C0086795}',
'skeletalLimbs': '''Epiphyseal stippling (type II, congenital) {UMLS C2673299} {HPO HP:0010655 C1859126};\nPeriosteal cloaking (type II, congenital) {UMLS C2673300}''',
'muscleSoftTissue': '''Muscle weakness (type I) {UMLS C2673296} {HPO HP:0001324 C0151786};\nMuscle atrophy (type I) {UMLS C2673297} {HPO HP:0003202 C0234958,C0270948,C0541794,C1843479}''',
'neurologicCentralNervousSystem': '''Ataxia (type I and type II, infantile and juvenile) {UMLS C2673276} {HPO HP:0001251 C0007758};\nSeizures (type I and type II, juvenile) {UMLS C2673277} {HPO HP:0001250 C0014544,C0036572};\nMental retardation, moderate to severe (type II, infantile and juvenile) {UMLS C2673278};\nMyoclonus (type I and type II, infantile and juvenile) {UMLS C2673279} {HPO HP:0001336 C0027066,C1854302};\nDysmetria (type I) {UMLS C2673280} {HPO HP:0001310 C0234162};\nHypotonia (type I and type II, infantile) {UMLS C2673281} {HPO HP:0001290 C1858120};\nHyperreflexia (type I) {UMLS C2673282} {HPO HP:0001347 C0151889}''',
'voice': 'Slurred speech (type I) {UMLS C2673309} {HPO HP:0001350 C0234518}',
'hematology': '''Vacuolated lymphocytes (type II) {UMLS C2673305} {HPO HP:0001922 C1836855};\nBone marrow foam cells (type II) {UMLS C2673306} {HPO HP:0004333 C1856560}''',
'prenatalManifestations': 'Hydrops fetalis (type II, congenital) {UMLS C2673285} {HPO HP:0001789 C0020305}',
'prenatalManifestationsDelivery': 'Still birth {SNOMEDCT:237364002} {ICD10CM:P95} {UMLS C0595939 HP:0003826}',
'laboratoryAbnormalities': '''Proteinuria (type II, congenital) {UMLS C2673301} {HPO HP:0000093 C0033687};\nIncreased urinary sialyloligosaccharides {UMLS C2673302 HP:0012061} {HPO HP:0012061 C2673302};\nIncreased urinary sialylglycopeptides {UMLS C2673303};\nNeuraminidase deficiency (white blood cells, fibroblasts, cultured amniotic cells) {UMLS C2673304}''',
'miscellaneous': '''Type I sialidosis (cherry-red spot/myoclonus syndrome ) - mild disease, no dysmorphic features, onset in second decade {UMLS C2673311};\nType II sialidosis - severe disease, dysmorphic features, variable onset (congenital or hydropic (in utero), infantile (1-12 months), juvenile (2-20 years)) {UMLS C2673312}''',
'molecularBasis': 'Caused by mutation in the neuraminidase 1 gene (NEU1, {608272.0001})',
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': True,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': True,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': True,
'headAndNeckEarsExists': True,
'headAndNeckEyesExists': True,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': True,
'cardiovascularHeartExists': True,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': True,
'abdomenExternalFeaturesExists': True,
'abdomenLiverExists': True,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': True,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': True,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': True,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': True,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': True,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': True,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': True,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': True,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256600,
'prefix': '#',
'preferredTitle': 'NEURODEGENERATION WITH BRAIN IRON ACCUMULATION 2A; NBIA2A',
'inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899}',
'neurologicCentralNervousSystem': '''Developmental delay {SNOMEDCT:248290002,224958001} {ICD10CM:F88} {ICD9CM:315.9} {UMLS C0424605,C0557874 HP:0001263} {HPO HP:0001263 C0557874,C1864897,C4020875};\nPsychomotor regression, progressive {UMLS C1850493 HP:0002376} {HPO HP:0002376 C1836550,C1836830,C1850493,C1855009,C1855019,C1855996,C1857121,C1859678};\nHypotonia {SNOMEDCT:398152000,398151007} {UMLS C0026827,C1858120 HP:0001290,HP:0001252} {HPO HP:0001290 C1858120};\nGeneralized weakness {SNOMEDCT:13791008} {ICD10CM:M62.81,R53.1,R53.81} {ICD9CM:728.87,799.3} {UMLS C0746674,C3714552 HP:0003324} {HPO HP:0003324 C0746674};\nGait instability {SNOMEDCT:394616008,22631008} {UMLS C0231686 HP:0002317} {HPO HP:0002317 C0231686};\nAtaxia {SNOMEDCT:39384006,85102008,20262006} {ICD10CM:R27.0} {ICD9CM:438.84} {UMLS C0004134,C1135207,C0007758,C4554639 HP:0010867,HP:0001251} {HPO HP:0001251 C0007758};\nPyramidal tract signs {SNOMEDCT:14648003} {UMLS C0234132 HP:0007256} {HPO HP:0007256 C0234132};\nSpastic tetraplegia {SNOMEDCT:192965001} {UMLS C0426970 HP:0002510} {HPO HP:0002510 C0426970};\nHyperreflexia (70%) {UMLS C2749487} {HPO HP:0001347 C0151889};\nAreflexia (30%) {UMLS C2749490} {HPO HP:0001284 C0234146,C0241772,C0278124};\nMental retardation {SNOMEDCT:110359009,228156007} {ICD9CM:317-319.99} {UMLS C0025362,C3714756 HP:0001249} {HPO HP:0001249 C0025362,C0423903,C0917816,C1843367,C3714756,C4020876};\nSeizures {SNOMEDCT:91175000} {UMLS C0036572 HP:0001250} {HPO HP:0001250 C0014544,C0036572};\nAutonomic involvement may occur {UMLS C1850495};\nCerebral atrophy {SNOMEDCT:278849000} {UMLS C0235946 HP:0002059} {HPO HP:0002059 C0154671,C0235946,C4020860};\nCerebellar atrophy {UMLS C0740279 HP:0001272} {HPO HP:0001272 C0262404,C0740279,C4020873};\nNeuronal loss {UMLS C1850496 HP:0002529} {HPO HP:0002529 C1850496};\nGliosis {SNOMEDCT:81415000,359580009} {UMLS C0017639 HP:0002171} {HPO HP:0002171 C0017639};\nAxonal dystrophy {UMLS C1850497};\nAxonal swelling or thickening {UMLS C1850498};\nAxonal \'spheroid\' inclusions {UMLS C1850499};\nHigh voltage, fast rhythms seen on EEG {UMLS C3806475};\nCerebellar atrophy with signal hyperintensity in the cerebellar cortex seen on T2-weighted MRI {UMLS C3806476};\nIncreased iron deposition in the basal ganglia (40%) {UMLS C2749488};\nThin optic chiasm {UMLS C1850503}''',
'neurologicPeripheralNervousSystem': '''Decreased nerve conduction velocities (NCV) (30%) {UMLS C2749489} {HPO HP:0000762 C1857640};\nChronic denervation seen on EMG {UMLS C3550584};\nAxonal dystrophy {UMLS C1850497};\nAxonal swelling or thickening {UMLS C1850498};\nAxonal \'spheroid\' inclusions {UMLS C1850499}''',
'laboratoryAbnormalities': 'Characteristic spheroids can be found in peripheral tissue, such as skin and conjunctiva {UMLS C1850505}',
'miscellaneous': '''Onset usually in infancy or up to 2 years of age although later onset has been reported (\'late-infantile\') {UMLS C3806477};\nDeath usually by age 10 years {UMLS C1850508};\nAllelic disorder to neurodegeneration with brain iron accumulation 2B (NBIA2B, {610217});\nPhenotypic overlap with PKAN neuroaxonal dystrophy (NBIA1, {234200})''',
'molecularBasis': 'Caused by mutation in the phospholipase A2, group VI gene (PLA2G6, {603604.0001})',
'inheritanceExists': True,
'growthExists': False,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': False,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': False,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': False,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': True,
'neurologicPeripheralNervousSystemExists': True,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': True,
'molecularBasisExists': True,
'matches': ''
} },
{'clinicalSynopsis': {
'mimNumber': 256690,
'preferredTitle': 'NEUROFACIODIGITORENAL SYNDROME',
'oldFormat': {
'Neuro': 'Mental retardation {SNOMEDCT:110359009,228156007} {ICD9CM:317-319.99} {UMLS C0025362,C3714756 HP:0001249} {HPO HP:0001249 C0025362,C0423903,C0917816,C1843367,C3714756,C4020876}; No seizures {UMLS C1856553}; HEENT Vertical groove in tip of nose; Bifid nose {SNOMEDCT:204521002} {UMLS C0221363 HP:0011803} {HPO HP:0011803 C0221363,C4280318,C4280319} {EOM ID:7124d8708f2360b3 IMG:Nose,Bifid-small.jpg}; Prominent forehead {UMLS C1837260 HP:0011220} {HPO HP:0011220 C1837260,C1867446} {EOM ID:510a51e4083c1d6f IMG:Forehead,Prominent-small.jpg}; Abnormal ear shape;',
'Growth': 'Short stature {SNOMEDCT:422065006,237837007,237836003} {ICD10CM:R62.52,E34.3} {ICD9CM:783.43} {UMLS C0013336,C0349588,C2237041,C2919142 HP:0004322,HP:0003510} {HPO HP:0004322 C0349588};',
'Limbs': 'Triphalangeal thumbs {SNOMEDCT:205308004} {ICD10CM:Q74.0} {UMLS C0241397 HP:0001199} {HPO HP:0001199 C0241397};',
'GU': 'Unilateral renal agenesis {ICD10CM:Q60.0} {UMLS C0266294 HP:0000122} {HPO HP:0000122 C0266294};',
'Lab': 'Highly abnormal EEG;',
'Inheritance': 'Autosomal recessive {SNOMEDCT:258211005} {UMLS C0441748 HP:0000007} {HPO HP:0000007 C0441748,C4020899};'
} ,
'oldFormatExists': True,
'inheritanceExists': True,
'growthExists': True,
'growthHeightExists': False,
'growthWeightExists': False,
'growthOtherExists': False,
'headAndNeckExists': False,
'headAndNeckHeadExists': False,
'headAndNeckFaceExists': False,
'headAndNeckEarsExists': False,
'headAndNeckEyesExists': False,
'headAndNeckNoseExists': False,
'headAndNeckMouthExists': False,
'headAndNeckTeethExists': False,
'headAndNeckNeckExists': False,
'cardiovascularExists': False,
'cardiovascularHeartExists': False,
'cardiovascularVascularExists': False,
'respiratoryExists': False,
'respiratoryNasopharynxExists': False,
'respiratoryLarynxExists': False,
'respiratoryAirwaysExists': False,
'respiratoryLungExists': False,
'chestExists': False,
'chestExternalFeaturesExists': False,
'chestRibsSternumClaviclesAndScapulaeExists': False,
'chestBreastsExists': False,
'chestDiaphragmExists': False,
'abdomenExists': False,
'abdomenExternalFeaturesExists': False,
'abdomenLiverExists': False,
'abdomenPancreasExists': False,
'abdomenBiliaryTractExists': False,
'abdomenSpleenExists': False,
'abdomenGastrointestinalExists': False,
'genitourinaryExists': True,
'genitourinaryExternalGenitaliaMaleExists': False,
'genitourinaryExternalGenitaliaFemaleExists': False,
'genitourinaryInternalGenitaliaMaleExists': False,
'genitourinaryInternalGenitaliaFemaleExists': False,
'genitourinaryKidneysExists': False,
'genitourinaryUretersExists': False,
'genitourinaryBladderExists': False,
'skeletalExists': True,
'skeletalSkullExists': False,
'skeletalSpineExists': False,
'skeletalPelvisExists': False,
'skeletalLimbsExists': True,
'skeletalHandsExists': False,
'skeletalFeetExists': False,
'skinNailsHairExists': False,
'skinNailsHairSkinExists': False,
'skinNailsHairSkinHistologyExists': False,
'skinNailsHairSkinElectronMicroscopyExists': False,
'skinNailsHairNailsExists': False,
'skinNailsHairHairExists': False,
'muscleSoftTissueExists': False,
'neurologicExists': True,
'neurologicCentralNervousSystemExists': False,
'neurologicPeripheralNervousSystemExists': False,
'neurologicBehavioralPsychiatricManifestationsExists': False,
'voiceExists': False,
'metabolicFeaturesExists': False,
'endocrineFeaturesExists': False,
'hematologyExists': False,
'immunologyExists': False,
'neoplasiaExists': False,
'prenatalManifestationsExists': False,
'prenatalManifestationsMovementExists': False,
'prenatalManifestationsAmnioticFluidExists': False,
'prenatalManifestationsPlacentaAndUmbilicalCordExists': False,
'prenatalManifestationsMaternalExists': False,
'prenatalManifestationsDeliveryExists': False,
'laboratoryAbnormalitiesExists': True,
'miscellaneousExists': False,
'molecularBasisExists': False,
'matches': ''
}
} ]
}
} } | [
"[email protected]"
]
| |
417abc4b221a811744744bb40cacbdebce913473 | 77ce93cca6427101f18a06baba5f2ff04f0f77fb | /isce_geocode_tools.py | df5bb16edf0972fb91141d3d3e2808c01271ebd9 | []
| no_license | cherishing99/S1_batches | 6cd4c511d4bc63d8c25a9beb1f773a23b2c15deb | e4238203cd0898a6ef960607bff2e44166db300a | refs/heads/master | 2023-01-02T03:38:44.407786 | 2020-10-27T14:08:15 | 2020-10-27T14:08:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20,489 | py | # June 2020
# A series of functions to geocode isce images and los.rdr.geo in various formats
# Including the UAVSAR stacks
# And the UAVSAR ground range igram format from the JPL website
import numpy as np
import matplotlib.pyplot as plt
import sys, glob
from subprocess import call
import isce_read_write
import jpl_uav_read_write
import mask_and_interpolate
import netcdf_read_write as rwr
import haversine
from isce_read_write import get_xmin_xmax_xinc_from_xml
from lkv_trig_math import bearing_to_cartesian, complement_angle, cartesian_to_ccw_from_north, rotate_vector_by_angle, \
normalize_look_vector, calc_rdr_azimuth_incidence_from_lkv_plane_down
# ------------ UTILITY FUNCTIONS -------------- #
def cut_resampled_grid(outdir, filename, variable, config_params):
# This is for metadata like lon, lat, and lookvector
# Given an isce file and a set of bounds to cut the file,
# Produce the isce data and gmtsar netcdf that match each pixel.
temp = isce_read_write.read_scalar_data(outdir + "/" + filename);
print("Shape of the " + variable + " file: ", np.shape(temp));
xbounds = [float(config_params.xbounds.split(',')[0]), float(config_params.xbounds.split(',')[1])];
ybounds = [float(config_params.ybounds.split(',')[0]), float(config_params.ybounds.split(',')[1])];
cut_grid = mask_and_interpolate.cut_grid(temp, xbounds, ybounds, fractional=True, buffer_rows=3);
print("Shape of the cut lon file: ", np.shape(cut_grid));
nx = np.shape(cut_grid)[1];
ny = np.shape(cut_grid)[0];
isce_read_write.write_isce_data(cut_grid, nx, ny, "FLOAT", outdir + '/cut_' + variable + '.gdal');
rwr.produce_output_netcdf(np.array(range(0, nx)), np.array(range(0, ny)), cut_grid,
"degrees", outdir + '/cut_' + variable + '.nc');
return;
# ------------ GEOCODING FUNCTIONS FOR UAVSAR STACKS -------------- #
# Based on stacks of 3D netcdf's from the time series processing
def gmtsar_nc_stack_2_isce_stack(ts_file, output_dir, bands=2):
# Decompose a 3D time series object into a series of slices
# Write the slices into isce unwrapped format.
call(["mkdir", "-p", output_dir], shell=False);
tdata, xdata, ydata, zdata = rwr.read_3D_netcdf(ts_file);
for i in range(np.shape(zdata)[0]):
call(["mkdir", "-p", output_dir + "/scene_" + str(i)]);
temp = zdata[i, :, :];
# Write data out in isce format
ny, nx = np.shape(temp);
name = "ts_slice_" + str(i);
filename = output_dir + "/scene_" + str(i) + "/" + name + ".unw";
temp = np.float32(temp);
isce_read_write.write_isce_unw(temp, temp, nx, ny, "FLOAT", filename);
isce_read_write.plot_scalar_data(filename, band=bands, colormap='rainbow', datamin=-50, datamax=200,
aspect=1 / 5, outname=output_dir + "/scene_" + str(i) + "/isce_unw_band.png");
return;
def geocode_UAVSAR_stack(config_params, geocoded_folder):
# The goals here for UAVSAR:
# Load lon/lat grids and look vector grids
# Resample and cut the grids appropriately
# Write pixel-wise metadata out in the output folder
# All these grids have only single band.
call(["mkdir", "-p", geocoded_folder], shell=False);
llh_array = np.fromfile(config_params.llh_file, dtype=np.float32); # this is a vector.
lkv_array = np.fromfile(config_params.lkv_file, dtype=np.float32);
lon = [];
lat = [];
hgt = [];
lkv_e = [];
lkv_n = [];
lkv_u = [];
lat = llh_array[np.arange(0, len(llh_array), 3)]; # ordered array opened from the provided UAVSAR files
lon = llh_array[np.arange(1, len(llh_array), 3)];
hgt = llh_array[np.arange(2, len(llh_array), 3)];
lkv_e = lkv_array[np.arange(0, len(lkv_array), 3)]
lkv_n = lkv_array[np.arange(1, len(lkv_array), 3)]
lkv_u = lkv_array[np.arange(2, len(lkv_array), 3)]
example_igram = glob.glob("../Igrams/????????_????????/*.int")[0];
phase_array = isce_read_write.read_phase_data(example_igram);
print("Shape of the interferogram: ", np.shape(phase_array));
# Determine the shape of the llh array
# assuming there's a giant gap somewhere in the lat array
# that can tell us how many elements are in the gridded array
typical_gap = abs(lat[1] - lat[0]);
for i in range(1, len(lat)):
if abs(lat[i] - lat[i - 1]) > 100 * typical_gap:
print(lat[i] - lat[i - 1]);
print("There are %d columns in the lon/lat arrays" % i);
llh_pixels_range = i;
break;
llh_pixels_azimuth = int(len(lon) / llh_pixels_range);
print("llh_pixels_azimuth: ", llh_pixels_azimuth);
print("llh_pixels_range: ", llh_pixels_range);
# We turn the llh data into 2D arrays.
# The look vector is in meters from the aircraft to the ground.
lat_array = np.reshape(lat, (llh_pixels_azimuth, llh_pixels_range));
lon_array = np.reshape(lon, (llh_pixels_azimuth, llh_pixels_range));
lkve_array = np.reshape(lkv_e, (llh_pixels_azimuth, llh_pixels_range));
lkvn_array = np.reshape(lkv_n, (llh_pixels_azimuth, llh_pixels_range));
lkvu_array = np.reshape(lkv_u, (llh_pixels_azimuth, llh_pixels_range));
lkve_array, lkvn_array, lkvu_array = normalize_look_vector(lkve_array, lkvn_array, lkvu_array);
azimuth, incidence = calc_rdr_azimuth_incidence_from_lkv_plane_down(lkve_array, lkvn_array, lkvu_array);
# # write the data into a GDAL format.
isce_read_write.write_isce_data(lon_array, llh_pixels_range, llh_pixels_azimuth, "FLOAT",
geocoded_folder + "/lon_total.gdal");
isce_read_write.write_isce_data(lat_array, llh_pixels_range, llh_pixels_azimuth, "FLOAT",
geocoded_folder + "/lat_total.gdal");
isce_read_write.write_isce_data(azimuth, llh_pixels_range, llh_pixels_azimuth, "FLOAT",
geocoded_folder + "/azimuth_total.gdal");
isce_read_write.write_isce_data(incidence, llh_pixels_range, llh_pixels_azimuth, "FLOAT",
geocoded_folder + "/incidence_total.gdal");
# Resampling in GDAL to match the interferogram sampling
call(['gdalwarp', '-ts', str(np.shape(phase_array)[1]), str(np.shape(phase_array)[0]),
'-r', 'bilinear', '-to', 'SRC_METHOD=NO_GEOTRANSFORM',
'-to', 'DST_METHOD=NO_GEOTRANSFORM', geocoded_folder + '/lon_total.gdal',
geocoded_folder + '/lon_igram_res.tif'], shell=False);
call(['gdalwarp', '-ts', str(np.shape(phase_array)[1]), str(np.shape(phase_array)[0]),
'-r', 'bilinear', '-to', 'SRC_METHOD=NO_GEOTRANSFORM',
'-to', 'DST_METHOD=NO_GEOTRANSFORM', geocoded_folder + '/lat_total.gdal',
geocoded_folder + '/lat_igram_res.tif'], shell=False);
call(['gdalwarp', '-ts', str(np.shape(phase_array)[1]), str(np.shape(phase_array)[0]),
'-r', 'bilinear', '-to', 'SRC_METHOD=NO_GEOTRANSFORM',
'-to', 'DST_METHOD=NO_GEOTRANSFORM', geocoded_folder + '/incidence_total.gdal',
geocoded_folder + '/incidence_igram_res.tif'], shell=False);
call(['gdalwarp', '-ts', str(np.shape(phase_array)[1]), str(np.shape(phase_array)[0]),
'-r', 'bilinear', '-to', 'SRC_METHOD=NO_GEOTRANSFORM',
'-to', 'DST_METHOD=NO_GEOTRANSFORM', geocoded_folder + '/azimuth_total.gdal',
geocoded_folder + '/azimuth_igram_res.tif'], shell=False);
# Cut the data, and quality check.
# Writing the cut lon/lat into new files.
cut_resampled_grid(geocoded_folder, "lon_igram_res.tif", "lon", config_params);
cut_resampled_grid(geocoded_folder, "lat_igram_res.tif", "lat", config_params);
cut_resampled_grid(geocoded_folder, "incidence_igram_res.tif", "incidence", config_params);
cut_resampled_grid(geocoded_folder, "azimuth_igram_res.tif", "azimuth", config_params);
isce_read_write.plot_scalar_data(geocoded_folder + '/cut_lat.gdal',
colormap='rainbow', aspect=1 / 4,
outname=geocoded_folder + '/cut_lat_geocoded.png');
cut_lon = isce_read_write.read_scalar_data(geocoded_folder + '/cut_lon.gdal');
cut_lat = isce_read_write.read_scalar_data(geocoded_folder + '/cut_lat.gdal');
W, E = np.min(cut_lon), np.max(cut_lon);
S, N = np.min(cut_lat), np.max(cut_lat);
# This last thing may not work when finding the reference pixel, only when geocoding at the very last.
# Double checking the shape of the interferogram data (should match!)
signalspread = isce_read_write.read_scalar_data(config_params.ts_output_dir + '/signalspread_cut.nc');
print("For comparison, shape of cut data is: ", np.shape(signalspread));
return W, E, S, N;
def create_isce_stack_unw_geo(geocoded_dir, W, E, S, N):
# With pixel-wise lat and lon and lookvector information,
# Can we make isce geocoded unwrapped .unw.geo / .unw.geo.xml
# geocodeGdal.py -l cut_lat.gdal -L cut_lon.gdal -f cut_something.gdal -b "S N W E"
# After that, the BIL arrangement can be switched to BSQ,
# So I need to make an adjustment
folders = glob.glob(geocoded_dir + "/scene*");
i = 0;
for folder_i in folders:
# Run the geocode command.
# This places the geocoded .unw.geo into each sub-directory.
datafile = glob.glob(folder_i + "/*.unw");
datafile = datafile[0]
command = "geocodeGdal.py -l " + geocoded_dir + "/cut_lat.gdal -L " + geocoded_dir + "/cut_lon.gdal " + "-f " + datafile + " -b \"" + str(
S) + " " + str(N) + " " + str(W) + " " + str(E) + "\" -x 0.00025 -y 0.00025"
print(command);
print("\n");
call(command, shell=True);
# Unfortunately, after geocodeGdal, the files end up BSQ instead of BIL. This is necessary to reshape them.
# For making this more streamlined, I should definitely use a regular isce_write function in the future.
filename = datafile + ".geo"
isce_read_write.plot_scalar_data(filename, colormap='rainbow', datamin=-50, datamax=200,
outname='test_after_geocode.png', band=2);
print("DANGER! PLEASE FIGURE OUT A SIMPLE WRITE FUNCTION FOR THIS");
i = i + 1;
sys.exit(0);
return;
def create_isce_stack_rdr_geo(geocoded_dir, W, E, S, N):
# Create a geocoded azimuth and geocoded incidence file
# Then concatenate them into a two-band-file (los.rdr.geo)
# Then update the xml metadata.
print("Creating los.rdr.geo")
datafile = geocoded_dir + "/cut_azimuth.gdal"
command = "geocodeGdal.py -l " + geocoded_dir + "/cut_lat.gdal -L " + geocoded_dir + "/cut_lon.gdal " + "-f " + datafile + " -b \"" + str(
S) + " " + str(N) + " " + str(W) + " " + str(E) + "\" -x 0.00025 -y 0.00025"
print(command + "\n");
call(command, shell=True);
datafile = geocoded_dir + "/cut_incidence.gdal"
command = "geocodeGdal.py -l " + geocoded_dir + "/cut_lat.gdal -L " + geocoded_dir + "/cut_lon.gdal " + "-f " + datafile + " -b \"" + str(
S) + " " + str(N) + " " + str(W) + " " + str(E) + "\" -x 0.00025 -y 0.00025"
call(command, shell=True);
grid_inc = isce_read_write.read_scalar_data(geocoded_dir + "/cut_incidence.gdal.geo", flush_zeros=False);
grid_az = isce_read_write.read_scalar_data(geocoded_dir + "/cut_azimuth.gdal.geo", flush_zeros=False);
ny, nx = np.shape(grid_inc);
filename = geocoded_dir + "/los.rdr.geo"
isce_read_write.write_isce_unw(grid_inc, grid_az, nx, ny, "FLOAT", filename);
return;
def inspect_isce(geocoded_dir):
# What progress was made? Plot things.
folders = glob.glob(geocoded_dir + "/scene*");
for folder_i in folders:
datafile = glob.glob(folder_i + "/*.unw.geo");
datafile = datafile[0];
grid = isce_read_write.read_scalar_data(datafile, flush_zeros=False);
print("Statistics:")
print("shape: ", np.shape(grid))
print("max: ", np.nanmax(grid))
print("min: ", np.nanmin(grid))
isce_read_write.plot_scalar_data(datafile, colormap="rainbow", datamin=-50, datamax=200,
outname=folder_i + "/geocoded_data.png");
return;
def fix_hacky_BSQ_BIL_problem(geocoded_directory, mynum):
# August 2020
# This script is meant to un-do something that happened before on NoMachine
# The .unw.geo files ended up BSQ instead of BIL
# So we need to fix it.
# At the end, the new .unw.geo and xml should be properly formatted
# If we fix the end of the isce_geocode script for nomachine, than this should never be necessary.
# Find the files
unw_file = geocoded_directory + 'ts_slice_' + mynum + '.unw.geo';
unw_xml = unw_file + '.xml';
unw_file_final = geocoded_directory + 'BIL_correct/ts_slice_' + mynum + '.unw.geo';
# Read the problematic bands and get ready to package them into a real geocoded file.
data_top = isce_read_write.read_scalar_data(unw_file,
band=1); # I'm not even sure how I'm allowed to read band 2 (xml says 1 band).
data_bottom = isce_read_write.read_scalar_data(unw_file, band=2); # xml is clearly wrong.
data = np.vstack((data_top, data_bottom)); # each of these has a duplicate row by accident.
data_surviving = np.zeros(np.shape(data_top));
for i in range(np.shape(data)[0]):
counter = int(np.floor(i / 2.0));
data_surviving[counter, :] = data[i, :];
(ny, nx) = np.shape(data_surviving);
firstLon, firstLat, dE, dN, xmin, xmax = get_xmin_xmax_xinc_from_xml(unw_xml);
isce_read_write.write_isce_unw(data_surviving, data_surviving, nx, ny, "FLOAT", unw_file_final, firstLat=firstLat,
firstLon=firstLon, deltaLon=dE, deltaLat=dN, Xmin=xmin, Xmax=xmax);
return;
# ------------ JPL UAVSAR IGRAM FORMATS -------------- #
# A set of tools designed for handling of ground-range igrams
# from the JPL website for UAVSAR individual igram products
def cross_track_pos(target_lon, target_lat, nearrange_lon, nearrange_lat, heading_cartesian):
# Given the heading of a plane and the coordinates of one near-range point
# Get the cross-track position of point in a coordinate system centered at (nearrange_lon, nearrange_lat) with given heading
distance = haversine.distance((target_lat, target_lon), (nearrange_lat, nearrange_lon));
compass_bearing = haversine.calculate_initial_compass_bearing((nearrange_lat, nearrange_lon), (
target_lat, target_lon)); # this comes as CW from north
theta = bearing_to_cartesian(compass_bearing); # the angle of the position vector in cartesian coords
# heading_cartesian is the angle between the east unit vector and the flight direction
x0 = distance * np.cos(np.deg2rad(theta));
y0 = distance * np.sin(np.deg2rad(theta)); # in the east-north coordinate systeem
x_prime, y_prime = rotate_vector_by_angle(x0, y0, heading_cartesian);
return y_prime;
def incidence_angle_trig(xtp, cross_track_max, near_inc_angle, far_inc_angle):
# Using the incidence angles (to the vertical) at the upper and lower corners of the track,
# what's the incidence angle at some location in between (xtp=cross-track-position)?
# near_angle is the incidence angle between the viewing geometry and the vertical at the near-range.
# nearcomp is the complement of that angle.
# This function is kind of like linear interpolation, but a little bit curved
# It solves an equation I derived on paper from the two near-range and far-range triangles in July 2020
nearcomp = np.deg2rad(complement_angle(near_inc_angle));
farcomp = np.deg2rad(complement_angle(far_inc_angle)); # angles measured from the ground to the satellite
h = (np.tan(nearcomp) * np.tan(farcomp) * cross_track_max) / (np.tan(nearcomp) - np.tan(farcomp));
angle_to_horizontal = np.rad2deg(np.arctan(h / (xtp + (h / np.tan(nearcomp)))));
return complement_angle(angle_to_horizontal);
def get_geocoded_axes_from_ann(ann_file, cut_rowcol, looks_x, looks_y):
# Given .ann file and cutting/multilooking scheme, give us the ground-range points of the final pixels in two east-and-north axes
# cut_rowcol is an array specifying our cut range. Example: [2500, 5100, 7800, 13000] where 0-1 are rows and 2-3 are cols
# looks_x and looks_y were used in filtering.
num_rows, num_cols = jpl_uav_read_write.get_rows_cols(ann_file, 'ground');
start_lon, start_lat, lon_inc, lat_inc = jpl_uav_read_write.get_ground_range_corner_increment(ann_file);
x_orig = [start_lon + i * lon_inc for i in range(0, num_cols)];
y_orig = [start_lat + i * lat_inc for i in range(0, num_rows)];
x_cut = x_orig[cut_rowcol[2]: cut_rowcol[3]];
y_cut = y_orig[cut_rowcol[0]: cut_rowcol[1]]; # implement the grid cut
# next, implement the multilooking
x_filt = [];
y_filt = [];
counter = np.arange(0, len(x_cut), looks_x)
for i in range(len(counter)):
region = np.mean(x_cut[counter[i]:counter[i] + looks_x])
x_filt.append(region);
counter = np.arange(0, len(y_cut), looks_y);
for i in range(len(counter)):
region = np.mean(y_cut[counter[i]:counter[i] + looks_y])
y_filt.append(region);
return x_filt, y_filt;
def write_unwrapped_ground_range_displacements(ground_range_phase_file, output_file, x_axis, y_axis, wavelength):
# Given a file with ground range pixels in unwrapped phase,
# Multiply by wavelength
# Write the response into a unw.geo file with special xml
lon_inc = x_axis[1] - x_axis[0];
lat_inc = y_axis[1] - y_axis[0];
[_,_,unw] = rwr.read_netcdf4_xyz(ground_range_phase_file);
plt.figure(figsize=(11, 7), dpi=300)
X, Y = np.meshgrid(x_axis, y_axis);
plt.pcolormesh(X, Y, unw, cmap='jet', vmin=0, vmax=20);
plt.colorbar();
plt.savefig('unwrapped_geocoded_phase.png');
# CONVERT TO MM using the wavelength of UAVSAR
unw = np.multiply(unw, wavelength / (4 * np.pi));
(ny, nx) = np.shape(unw);
# ISCE UNW.GEO (IN MM)
isce_read_write.write_isce_unw(unw, unw, nx, ny, "FLOAT", output_file,
firstLat=max(y_axis), firstLon=min(x_axis), deltaLon=lon_inc, deltaLat=lat_inc,
Xmin=min(x_axis), Xmax=max(x_axis)); # 2 bands, floats
return;
def create_los_rdr_geo_from_ground_ann_file(ann_file, x_axis, y_axis):
# Make los.rdr.geo given .ann file from JPL website's UAVSAR interferograms and the ground-range sample points.
# x-axis and y-axis are the x and y arrays where los vectors will be extracted on a corresponding grid.
near_angle, far_angle, heading = jpl_uav_read_write.get_nearrange_farrange_heading_angles(ann_file);
heading_cartesian = bearing_to_cartesian(heading); # CCW from east
print("Heading is %f degrees CW from north" % heading);
print("Cartesian Heading is %f" % heading_cartesian)
# Get the upper and lower left corners, so we can compute the length of the across-track extent in km
ul_lon, ul_lat, ll_lon, ll_lat = jpl_uav_read_write.get_ground_range_left_corners(ann_file);
cross_track_max = haversine.distance((ll_lat, ll_lon), (ul_lat, ul_lon)); # in km
# Get the azimuth angle for the pixels looking up to the airplane
# My own documentation says CCW from north, even though that's really strange.
azimuth = heading_cartesian - 90; # 90 degrees to the right of the airplane heading (for the look vector from ground to plane)
azimuth = cartesian_to_ccw_from_north(azimuth); # degrees CCW from North
print("azimuth from ground to plane is:", azimuth)
[X, Y] = np.meshgrid(x_axis, y_axis);
(ny, nx) = np.shape(X);
grid_az = azimuth * np.ones(np.shape(X));
grid_inc = np.zeros(np.shape(X));
print("Computing incidence angles for all pixels")
for i in range(ny):
for j in range(nx):
xtp = cross_track_pos(X[i, j], Y[i, j], ll_lon, ll_lat,
heading_cartesian); # THIS WILL HAVE TO CHANGE FOR ASCENDING AND DESCENDING
inc = incidence_angle_trig(xtp, cross_track_max, near_angle, far_angle);
grid_inc[i, j] = inc;
# Finally, write the 2 bands for los.rdr.geo
isce_read_write.write_isce_unw(grid_inc, grid_az, nx, ny, "FLOAT", 'los.rdr.geo');
return;
| [
"[email protected]"
]
| |
94a882b3ad4cf2c8ce3f7d515284b7b95e0bbeda | 06ba98f4e71e2e6e04e9e381987333a743511818 | /history/migrations/0002_auto_20180803_0007.py | 80603c86d738101b7f32f908e7b49fa21ff1e7da | []
| no_license | AnEvilHerbivore/Django-Music | e99c6f7936088a3baa42abeaea4b46361fb415cb | 8f0b45d22053ca674f4dc8f963cb0da949469213 | refs/heads/master | 2022-12-10T10:08:35.831550 | 2018-08-03T19:12:42 | 2018-08-03T19:12:42 | 141,728,372 | 0 | 0 | null | 2021-06-10T20:43:27 | 2018-07-20T15:24:59 | Python | UTF-8 | Python | false | false | 1,053 | py | # Generated by Django 2.0.1 on 2018-08-03 00:07
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('history', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='artist',
name='biggest_hit',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='artist',
name='birth_date',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='artist',
name='name',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='song',
name='album',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='song',
name='title',
field=models.CharField(default='', max_length=100),
),
]
| [
"[email protected]"
]
| |
098396a34b3be24ef43d0b1428dcb079fb5c911a | c31ee8136a57a96649196081e1cfde0676c2a481 | /larcv/app/tests/test_matrixmult.py | 63a26af9a4138e0c87251de2c5104d965db970cd | [
"MIT"
]
| permissive | DeepLearnPhysics/larcv2 | b12b46168e5c6795c70461c9495e29b427cd88b5 | 31863c9b094a09db2a0286cfbb63ccd2f161e14d | refs/heads/develop | 2023-06-11T03:15:51.679864 | 2023-05-30T17:51:19 | 2023-05-30T17:51:19 | 107,551,725 | 16 | 19 | MIT | 2023-04-10T10:15:13 | 2017-10-19T13:42:39 | C++ | UTF-8 | Python | false | false | 1,073 | py | import os,sys
import ROOT
import numpy as np
from larcv import larcv
print larcv.Image2D
# TESTS MATRIX MULTIPLICATION FEATURE
a = np.random.rand(6,5)
b = np.random.rand(5,8)
aI = larcv.Image2D( a.shape[0], a.shape[1] )
bI = larcv.Image2D( b.shape[0], b.shape[1] )
arows = a.shape[0]
acols = a.shape[1]
brows = b.shape[0]
bcols = b.shape[1]
for r in range(0,arows):
for c in range(0,acols):
aI.set_pixel( r, c, a[r,c] )
for r in range(0,brows):
for c in range(0,bcols):
bI.set_pixel( r, c, b[r,c] )
C = np.dot(a,b)
CI = aI*bI
crows = CI.meta().rows()
ccols = CI.meta().cols()
print "A diff"
Adiff = np.zeros( a.shape )
for r in range(0,arows):
for c in range(0,acols):
Adiff[r,c] = aI.pixel(r,c)-a[r,c]
print Adiff
print "B diff"
Bdiff = np.zeros( b.shape )
for r in range(0,brows):
for c in range(0,bcols):
Bdiff[r,c] = bI.pixel(r,c)-b[r,c]
print Bdiff
print "CDiff"
Cdiff = np.zeros( C.shape )
for r in range(0,crows):
for c in range(0,ccols):
Cdiff[r,c] = CI.pixel(r,c)-C[r,c]
print Cdiff
| [
"[email protected]"
]
| |
078153fca42249d9d1fb37d3cd7526a82fef59bc | fa2ab3d980aeff387edc556121b124fd68078789 | /ConditionalPrograms/ShippingAccount.py | af9ed57cdbf6a844dd86373f191d63a1bd4db288 | [
"MIT"
]
| permissive | MiguelCF06/PythonProjects | 6e0a3323d3a44a893ec0afafcba7ec3882e62aa3 | dfa49203c3ed1081728c7f4e565f847629662d75 | refs/heads/master | 2022-10-17T23:22:04.357296 | 2020-06-10T18:03:38 | 2020-06-10T18:03:38 | 265,905,262 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,571 | py | print("Welcome to the Shipping Accounts Program\n")
username = ["mikeL", "Omar293", "JJlk", "JoelW"]
user = input("Hello, what is your username: ")
if user not in username:
print("Sorry, you do not have an account with us. Goodbye.")
else:
print("Hello {}. Welcome back to your account.".format(user))
print("Current shipping prices are as follows:\n")
print("Shipping orders 0 to 100:\t\t$5.10 each")
print("Shipping orders 100 to 500:\t\t$5.00 each")
print("Shipping orders 500 to 1000:\t$4.95 each")
print("Shipping orders over 1000:\t\t$4.80 each\n")
amount = int(input("How many items would you like to ship: "))
if amount <= 0:
print("Nothing to do.")
if amount > 0 and amount <= 100:
items = 5.10
price = items * amount
print("To ship {} items it will cost you ${} at $5.10 per item.".format(amount, price))
elif amount > 100 and amount <= 500:
items = 5.00
price = items * amount
print("To ship {} items it will cost you ${} at $5.00 per item.".format(amount, price))
elif amount > 500 and amount <= 1000:
items = 4.95
price = items * amount
print("To ship {} items it will cost you ${} at $4.95 per item.".format(amount, price))
else:
items = 4.80
price = items * amount
print("To ship {} items it will cost you ${} at $4.80 per item.".format(amount, price))
print()
answer = input("Would you like to place this order (y/n): ")
if answer == "n" or answer == "N":
print("Okay, no order is being placed at this time.")
elif answer == "y" or answer == "Y":
print("Okay. Shipping your {} items.".format(amount)) | [
"[email protected]"
]
| |
247f1bfd7c171ceabc5b83ac3633a773b93d0adf | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2895/60825/236016.py | 944fe88513a26187ad8223f032dfba9d3196d51e | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | a=input()
a=a[1:len(a)-1]
l=a.split(",")
l= list(map(int, l))
i=l[0]+1
res=l[0]
while i<=l[1]:
res&=i
i+=1
print(res) | [
"[email protected]"
]
| |
a3739687fd238c1cd2484eca5cf46e5c9c27e987 | de15d27440ceb922a8d12f8db5881ae1982592ec | /sampledb/models/migrations/publications_add_object_name.py | 3b07967f3ffd3718788e6af1f4c7eb96f5ccb804 | [
"MIT"
]
| permissive | maltedeckers/sampledb | 24f39f1adbe9bcc341309a4b6620768a8dc3857c | 30ad29f8df01290d4ff84a9b347f15a10856ac22 | refs/heads/master | 2023-08-22T04:25:47.826698 | 2021-05-07T09:07:02 | 2021-05-07T09:07:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | # coding: utf-8
"""
Add object_name column to object_publications table.
"""
import os
MIGRATION_INDEX = 27
MIGRATION_NAME, _ = os.path.splitext(os.path.basename(__file__))
def run(db):
# Skip migration by condition
column_names = db.session.execute("""
SELECT column_name
FROM information_schema.columns
WHERE table_name = 'object_publications'
""").fetchall()
if ('object_name',) in column_names:
return False
# Perform migration
db.session.execute("""
ALTER TABLE object_publications
ADD object_name TEXT NULL
""")
return True
| [
"[email protected]"
]
| |
b34775b5a3efbd0dda72ca1c924c1daa49d5995a | ac23f0e5bb60c3201ea16d92369f8defa50f574a | /0x0B-python-input_output/4-append_write.py | 6d1d834297f4e5478a9ff2f4ab4921ad9f4a8ea5 | []
| no_license | Nukemenonai/holbertonschool-higher_level_programming | 85ba3e61517ee48a2e73980c915e7033e8090f06 | 3c467bb8ab3fa38454709ed7eb9819e0eb445310 | refs/heads/master | 2020-09-29T00:21:47.583303 | 2020-08-30T22:40:59 | 2020-08-30T22:40:59 | 226,901,103 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | #!/usr/bin/python3
def append_write(filename="", text=""):
""" writes a string UTF8 to a text file
returns the number of characters written
filename: name of the file.
text: the text to insert
appends
"""
with open(filename, 'a') as f:
n = f.write(text)
f.close()
return n
| [
"[email protected]"
]
| |
24c072a5dea3b8bd6c343321376a8de0b7705640 | a6ffe7990cb5690a20566f64e343441e79d4d11a | /leetcode/10. 正则表达式匹配.py | 213fb81272bf7ae50cc592b1ef1bb296b8415fac | []
| no_license | ywcmaike/OJ_Implement_Python | 26b907da4aece49d3833382f80665a6263cbf0ec | 48e99509e675a6708a95a40912f0f0f022a08d73 | refs/heads/master | 2022-11-26T17:35:22.066443 | 2020-08-02T16:19:25 | 2020-08-02T16:19:25 | 72,869,628 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 503 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
# author:maike
# datetime:2020/7/22 下午6:44
import sys
if __name__ == "__main__":
# 读取第一行的n
n = int(sys.stdin.readline().strip())
ans = 0
for i in range(n):
# 读取每一行
line = sys.stdin.readline().strip()
# 把每一行的数字分隔后转化成int列表
values = list(map(int, line.split()))
for v in values:
ans += v
print(ans)
if __name__ == '__main__':
| [
"[email protected]"
]
| |
441d888c4903420479c5f874867acad5a6233fe8 | 5cf3f04bdee5a17d7e4b7e14294047ce3d1dc40a | /guess_dice/middleware/ipAddress.py | b57a807ddfe48e91f76edc72a7e66852d8f71596 | []
| no_license | gzgdouru/guess_dice_site | bc2e4b284d5c0399232247ecc7634341199b5ad7 | 03bfadef8412a8d1d7506c1bfb5e58aee68ba343 | refs/heads/master | 2020-04-06T12:45:09.757664 | 2018-12-29T14:15:41 | 2018-12-29T14:15:41 | 157,469,262 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 466 | py | from django.utils.deprecation import MiddlewareMixin
from analysis.models import ClientIp
class MarkVisitIpMiddleware(MiddlewareMixin):
def process_request(self, request):
try:
realIp = request.META["HTTP_X_FORWARDED_FOR"]
realIp = realIp.split(",")[0]
except:
realIp = request.META["REMOTE_ADDR"]
url = request.path
if realIp != "127.0.0.1":
ClientIp(ip=realIp, url=url).save()
| [
"[email protected]"
]
| |
dce507a9fac3425ea1b7d89d66f0e791ede9a0c5 | 5706c57ca2ba3faf94c286ec0dc969636c030b2e | /statistics.py | ab2463ab07632e16af0351ee91a2e721488d3b6b | []
| no_license | youfeng243/crawler_statistics | 5e92a8ef3e21a3d1762822bc051ab7b0a9154636 | f33c1e574ad9af328745d1c8851f641dfa93fd94 | refs/heads/master | 2021-01-21T12:32:15.553018 | 2017-09-18T08:36:15 | 2017-09-18T08:36:15 | 102,083,440 | 0 | 1 | null | 2017-09-02T03:22:35 | 2017-09-01T06:58:41 | Python | UTF-8 | Python | false | false | 11,057 | py | # coding=utf-8
"""
统计各个站点的入库数量
"""
import datetime
import json
import time
import MySQLdb
import click
import pandas
import pymongo
from config import MONGO_CONFIG, CHECK_DATES, CHECK_TOPIC, MYSQL_CONFIG, TABLE_NAME_LIST, TOPIC_NAME_LIST, \
FILTER_TABLE_LIST
from logger import Logger
log = Logger("statistics.log").get_logger()
# mongodb 初始化
client = pymongo.MongoClient(MONGO_CONFIG['host'], MONGO_CONFIG['port'])
mongo_db = client[MONGO_CONFIG['db']]
mongo_db.authenticate(MONGO_CONFIG['username'], MONGO_CONFIG['password'])
# mysql 初始化
mysql_db = MySQLdb.connect(MYSQL_CONFIG['host'],
MYSQL_CONFIG['username'],
MYSQL_CONFIG['password'],
MYSQL_CONFIG['db'], charset="utf8")
"""
获取当天的delta天之前的日期
"""
def get_delta_date(delta):
date_obj = datetime.datetime(int(time.strftime("%Y")), int(time.strftime("%m")), int(time.strftime("%d"))).date()
diff = datetime.timedelta(days=delta)
before_date = date_obj - diff
return before_date.strftime("%Y-%m-%d")
# 根据topic获取topic_id
def get_topic_id(topic):
topic_id = 0
cursor = mysql_db.cursor()
sql = "SELECT * FROM topic WHERE table_name = '%s' " % (topic)
try:
cursor.execute(sql)
one_topic = cursor.fetchone()
topic_id = one_topic[0]
except Exception as e:
log.error("Error: unable to fecth data")
log.exception(e)
cursor.close()
return topic_id
# 根据topic_id获取主题的所有站点
def get_sites_by_topic_id(topic_id):
res = []
cursor = mysql_db.cursor()
sql = "SELECT * FROM site"
try:
cursor.execute(sql)
rows = cursor.fetchall()
# 遍历sites
for row in rows:
label = row[10]
site = row[7]
topic_ids = label.split(",") if label else []
if str(topic_id) in topic_ids:
res.append(site)
except Exception as e:
log.error("Error: unable to fecth data")
log.exception(e)
cursor.close()
return res
# 获得配置文件中所有站点信息
def get_all_site_info(table_name_list):
all_site_dict = {}
total_site = 0
for table_name in table_name_list:
count_dict = {}
while True:
all_site_dict[table_name] = set()
if table_name not in CHECK_TOPIC:
log.info("表信息没有在配置文件中: {} 从数据库中进行加载...".format(table_name))
sites_str_list = get_sites_by_topic_id(get_topic_id(table_name))
for ss in sites_str_list:
all_site_dict[table_name].add(ss)
log.info("数据库中加载数目为: {} {} {}".format(
table_name, len(sites_str_list), sites_str_list))
break
table_dict = CHECK_TOPIC.get(table_name)
site_list = table_dict.get('sites')
assert site_list is not None
for site_dict in site_list:
site = site_dict.get('site')
assert site is not None
all_site_dict[table_name].add(site)
if site in count_dict:
count_dict[site] += 1
else:
count_dict[site] = 1
break
total_site += len(all_site_dict[table_name])
for key, value in count_dict.iteritems():
if value >= 2:
log.info("当前主题站点有重复: {} {} {}".format(table_name, key, value))
log.info("招行关注站点总数目: {}".format(total_site))
log.info(all_site_dict)
return all_site_dict
# 获得重点列表
def get_import_set():
data_set = set()
with open("import_site_list.txt") as p_file:
for line in p_file:
site = line.strip()
data_set.add(site)
log.info("重要列表站点数目: {}".format(len(data_set)))
return data_set
# 获取所有站点官方数据统计
def get_all_site_statistics():
site_list = []
data_list = []
with open("site_list.txt") as p_file:
for line in p_file:
site = line.strip()
site_list.append(site)
with open("site_all_crawl.txt") as p_file:
for line in p_file:
num = int(line.strip())
data_list.append(num)
if len(data_list) != len(site_list):
raise Exception("官方数据总量加载失败!")
return dict(zip(site_list, data_list))
# 统计
def statis(is_all, cur_time, days=CHECK_DATES):
sheet_one_col_list = [u"主题", u"站点"]
sheet_two_col_list = [u"主题"]
# 合并
table_name_list = TABLE_NAME_LIST
topic_name_list = TOPIC_NAME_LIST
sheet_one_list = []
import_sheet_list = []
sheet_two_list = []
start_date = get_delta_date(days)
end_date = time.strftime("%Y-%m-%d")
start_time = start_date + " 00:00:00"
end_time = end_date + " 23:59:59"
if is_all is False:
sheet_one_col_list.append(start_date + u"至" + start_date)
sheet_one_col_list.append(u"官方数量")
sheet_one_col_list.append(u"抓取占比")
sheet_one_col_list.append(u'招行站点')
sheet_two_col_list.append(start_date + u"至" + start_date)
import_site_name = "[{}]_{}_{}_import_sites.xls".format(
cur_time, start_date, end_date)
excel_name = "[{}]_{}_{}_utime_sites.xls".format(
cur_time, start_date, end_date)
log.info("当前统计的时间段为: {} - {}".format(start_time, end_time))
else:
sheet_one_col_list.append(u"全量统计")
sheet_one_col_list.append(u"官方数量")
sheet_one_col_list.append(u"抓取占比")
sheet_one_col_list.append(u'招行站点')
sheet_two_col_list.append(u"全量统计")
import_site_name = "[{}]_all_import_sites.xls".format(cur_time)
excel_name = "[{}]_all_utime_sites.xls".format(cur_time)
log.info("当前为全量统计...")
# 获得所有站点信息
all_site_dict = get_all_site_info(table_name_list)
# 获取站点官方数量
site_statistics_dict = get_all_site_statistics()
# 获得重要列表信息
import_site_set = get_import_set()
for index, table_name in enumerate(table_name_list):
if table_name in FILTER_TABLE_LIST:
log.info("当前topic不进行统计: {}".format(table_name))
continue
count = 0
log.info("当前统计的topic为: {}".format(table_name))
collection = mongo_db[table_name]
if is_all is False:
cursor = collection.find({'_utime': {'$gte': start_time, '$lte': end_time}},
['_src'],
no_cursor_timeout=True).batch_size(1000)
else:
cursor = collection.find({}, ['_src'], no_cursor_timeout=True).batch_size(1000)
# 站点与统计量的映射
site_count_map = {}
for item in cursor:
count += 1
if '_src' in item and \
isinstance(item['_src'], list) and \
len(item["_src"]) > 0:
src_set = set()
_src_list = item['_src']
for src_item in _src_list:
if 'site' not in src_item:
continue
src_set.add(src_item['site'].strip())
# 需统计全部站点抓取
for key in src_set:
site_count_map[key] = site_count_map[key] + 1 if key in site_count_map else 1
else:
_id = item.pop('_id')
log.warn("当前数据_src不符合条件: {} {} {}".format(
topic_name_list[index] + table_name, _id,
json.dumps(item, ensure_ascii=False)))
if count % 1000 == 0:
log.info("当前进度: {} {}".format(table_name, count))
log.info("总数据量: {} {}".format(table_name, count))
cursor.close()
# 添加招行站点
zhaohang_site_set = all_site_dict.get(table_name)
assert zhaohang_site_set is not None
for key in zhaohang_site_set:
if key in site_count_map:
continue
site_count_map[key] = 0
total_count = 0
sort_count_list = sorted(site_count_map.items(), key=lambda it: it[0])
for _site, site_count in sort_count_list:
total_count += site_count
item = {u"主题": topic_name_list[index] + table_name,
u"站点": _site,
sheet_one_col_list[2]: site_count}
# 3 官方数量
# 4 数据占比
# 如果站点有官方统计数目则进行占比计算
row_key = table_name + _site
if row_key in site_statistics_dict:
item[sheet_one_col_list[3]] = site_statistics_dict[row_key]
if site_statistics_dict[row_key] > 0:
item[sheet_one_col_list[4]] = site_count / (site_statistics_dict[row_key] * 1.0)
else:
item[sheet_one_col_list[4]] = 1.0
else:
item[sheet_one_col_list[3]] = 0
item[sheet_one_col_list[4]] = 1.0
if _site in zhaohang_site_set:
item[sheet_one_col_list[-1]] = u'是'
else:
item[sheet_one_col_list[-1]] = u'------'
log.info(json.dumps(item, ensure_ascii=False))
sheet_one_list.append(item)
if row_key in import_site_set:
import_sheet_list.append(item)
# 计算总量
total_item = {u"主题": topic_name_list[index] + table_name,
sheet_two_col_list[-1]: total_count}
sheet_two_list.append(total_item)
log.info(json.dumps(total_item, ensure_ascii=False))
df = pandas.DataFrame(sheet_one_list, columns=sheet_one_col_list)
df2 = pandas.DataFrame(sheet_two_list, columns=sheet_two_col_list)
with pandas.ExcelWriter(excel_name) as writer:
df.to_excel(writer, index=False)
df2.to_excel(writer, sheet_name="sheet2", index=False)
df3 = pandas.DataFrame(import_sheet_list, columns=sheet_one_col_list)
with pandas.ExcelWriter(import_site_name) as writer:
df3.to_excel(writer, index=False)
log.info('统计结束...')
@click.command()
@click.option("-w", "--whole", default="", help=u"全量统计")
def main(whole):
log.info("开始启动统计..")
cur_time = datetime.datetime.now().strftime('%Y-%m-%d_%H_%M_%S')
statis(False, cur_time, 1)
statis(False, cur_time, 3)
statis(False, cur_time, 7)
if whole == 'all':
statis(True, cur_time)
if __name__ == "__main__":
try:
main()
except Exception as ex:
log.error("程序异常退出:")
log.exception(ex)
| [
"[email protected]"
]
| |
7fe0b97d863104f488ad653d559526403da60608 | f090c3e0faa70cf0ef7c4be99cb894630bce2842 | /scripts/dataAnalysis/EnergyTransport/2013Aug04/individual_fits/function of heating time/fitter_script_dsplaced_2212_50_ion2.py | 76cd7f8d1e7e158da7d9e4d47a24dc31d87797e8 | []
| no_license | HaeffnerLab/resonator | 157d1dc455209da9b7de077157bda53b4883c8b7 | 7c2e377fdc45f6c1ad205f8bbc2e6607eb3fdc71 | refs/heads/master | 2021-01-09T20:48:03.587634 | 2016-09-22T18:40:17 | 2016-09-22T18:40:17 | 6,715,345 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,175 | py | import lmfit
import labrad
from labrad import types as T
from lamb_dicke import lamb_dicke
from rabi_flop_fitter import rabi_flop_time_evolution
import numpy as np
from matplotlib import pyplot
'''
script parameters
'''
info = ('Carrier Flops', ('2013Sep04','2212_50'))
ion_selection = 2
trap_frequency = T.Value(3.0, 'MHz')
projection_angle = 45 #degrees
offset_time = 0.0
sideband_order = -1
fitting_region = (0, 40) #microseconds
'''
compute lamb dicke parameter
'''
eta = lamb_dicke.lamb_dicke(trap_frequency, projection_angle)
print 'Lamb Dicke parameter: {0:.2f}'.format(eta)
'''
initialize the fitter
'''
flop = rabi_flop_time_evolution(sideband_order, eta)
'''
create fitting parameters
'''
params = lmfit.Parameters()
params.add('excitation_scaling', value = 1.0, vary = False)
params.add('detuning', value = 0, vary = 0) #units of rabi frequency
params.add('time_2pi', value = 1.532954, vary = 0) #microseconds
params.add('nbar', value = 3.699035, min = 0.0, max = 200.0, vary= 0)
params.add('alpha', value = 1.0, min = 0.0, max = 200.0, vary = 1)
'''
load the dataset
'''
dv = labrad.connect().data_vault
title,dataset = info
date,datasetName = dataset
dv.cd( ['','Experiments','Blue Heat RabiFlopping',date,datasetName] )
dv.open(1)
times,prob = dv.get().asarray.transpose()[[0, 1 + ion_selection],:]
print 'heat duration', dict(dv.get_parameters())['Heating.blue_heating_duration']
tmin,tmax = times.min(), times.max()
detailed_times = np.linspace(tmin, tmax, 1000)
'''
compute time evolution of the guessed parameters
'''
guess_evolution = flop.compute_evolution_coherent(params['nbar'].value , params['alpha'].value, params['detuning'].value, params['time_2pi'].value, detailed_times - offset_time, excitation_scaling = params['excitation_scaling'].value)
'''
define how to compare data to the function
'''
def rabi_flop_fit_thermal(params , t, data):
model = flop.compute_evolution_coherent(params['nbar'].value , params['alpha'].value, params['detuning'].value, params['time_2pi'].value, t - offset_time, excitation_scaling = params['excitation_scaling'].value)
return model - data
'''
perform the fit
'''
region = (fitting_region[0] <= times) * (times <= fitting_region[1])
result = lmfit.minimize(rabi_flop_fit_thermal, params, args = (times[region], prob[region]))
fit_values = flop.compute_evolution_coherent(params['nbar'].value , params['alpha'].value, params['detuning'].value, params['time_2pi'].value, detailed_times - offset_time, excitation_scaling = params['excitation_scaling'].value)
lmfit.report_errors(params)
'''
make the plot
'''
pyplot.figure()
pyplot.plot(detailed_times, guess_evolution, '--k', alpha = 0.5, label = 'initial guess')
pyplot.plot(times, prob, 'ob', label = 'data')
pyplot.plot(detailed_times, fit_values, 'r', label = 'fitted')
pyplot.legend()
pyplot.title(title)
pyplot.xlabel('time (us)')
pyplot.ylabel('D state occupation probability')
pyplot.text(max(times)*0.70,0.68, 'detuning = {0}'.format(params['detuning'].value))
pyplot.text(max(times)*0.70,0.73, 'nbar = {:.0f}'.format(params['nbar'].value))
pyplot.text(max(times)*0.70,0.78, '2 Pi Time = {:.1f} us'.format(params['time_2pi'].value))
pyplot.show() | [
"[email protected]"
]
| |
0c5dad8fd3938d30a3086f85c582ec0892a2191f | 3f46af2da32d9f02d1ebbdef6784ece1d64aace3 | /Production/python/PrivateSamples/EMJ_2016_mMed-1600_mDark-20_ctau-225_unflavored-down_cff.py | 108dad647638d680e89dd21aef1f1b6a9bff01af | []
| no_license | cms-svj/TreeMaker | 53bf4b1e35d2e2a4fa99c13c2c8b60a207676b6d | 0ded877bcac801a2a394ad90ed987a20caa72a4c | refs/heads/Run2_2017 | 2023-07-19T07:14:39.175712 | 2020-10-06T21:10:26 | 2020-10-06T21:10:26 | 305,753,513 | 0 | 0 | null | 2021-01-26T18:58:54 | 2020-10-20T15:32:19 | null | UTF-8 | Python | false | false | 1,892 | py | import FWCore.ParameterSet.Config as cms
maxEvents = cms.untracked.PSet( input = cms.untracked.int32(-1) )
readFiles = cms.untracked.vstring()
secFiles = cms.untracked.vstring()
source = cms.Source ("PoolSource",fileNames = readFiles, secondaryFileNames = secFiles)
readFiles.extend( [
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-1.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-2.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-3.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-4.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-5.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-6.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-7.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-8.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-9.root',
'gsiftp://hepcms-gridftp.umd.edu//mnt/hadoop/cms/store/group/EMJRunII/2016/step4_MINIAOD_mMed-1600_mDark-20_ctau-225_unflavored-down_n-500_part-10.root',
] )
| [
"[email protected]"
]
| |
32d00cbdf934957158d5c286facfeab2e5d2170f | af632a0d727cd350a3c95360bb1bb8a411051da7 | /mysite/reading/migrations/0005_auto__add_field_text_synopsis.py | 92b689114d31eff115b9e7fc5a753e368632936b | []
| no_license | rybesh/mysite | f760fec83f1b552abd62010cff4ada4c6fda66b0 | c091284d802ef719d7535d9c8790f4c6e458f905 | refs/heads/master | 2016-09-05T18:01:31.200290 | 2014-07-23T15:36:09 | 2014-07-23T15:36:09 | 1,242,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,481 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Text.synopsis'
db.add_column('reading_text', 'synopsis', self.gf('django.db.models.fields.TextField')(default=''), keep_default=False)
def backwards(self, orm):
# Deleting field 'Text.synopsis'
db.delete_column('reading_text', 'synopsis')
models = {
'reading.note': {
'Meta': {'object_name': 'Note'},
'created': ('django.db.models.fields.DateTimeField', [], {'unique': 'True', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'markdown': ('django.db.models.fields.TextField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'text': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'notes'", 'to': "orm['reading.Text']"})
},
'reading.text': {
'Meta': {'object_name': 'Text'},
'bibtex': ('django.db.models.fields.TextField', [], {}),
'citation_key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32', 'db_index': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'markdown': ('django.db.models.fields.TextField', [], {}),
'modified': ('django.db.models.fields.DateTimeField', [], {}),
'related_texts': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_texts_rel_+'", 'to': "orm['reading.Text']"}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '80', 'db_index': 'True'}),
'small_image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'synopsis': ('django.db.models.fields.TextField', [], {})
}
}
complete_apps = ['reading']
| [
"[email protected]"
]
| |
fd5cb1e3cc6d7bf3bc992db71056e2364fb1b3ab | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_6845.py | 25671db34c9e0643e32318d55e46d052ec86f703 | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 297 | py | # Efficient way to format string
"select %(tableName)s.somefield, count(*) from %(tableName)s WHERE %(tableName)s.TimeStamp > %(fromDate)s and %(tableName)s.EndTimeStamp < %(to_data)s group by %(tableName)s.ProviderUsername;" %{'tableName':tableName, 'fromDate':fromDate, 'to_data':to_data}
| [
"[email protected]"
]
| |
76cb32021bcffbb90f7204eb7683d786698f1d8a | cbedb18df0aaac810aeea87a2273edb15c1cf899 | /from Stephen/google list/752. Open the Lock (pass, bfs).py | 03123b3ab45fbe0c24776bf3c08be55ad02d55cd | []
| no_license | kanglicheng/CodeBreakersCode | 71b833bb9f4c96d520c26f0044365dc62137a940 | 31f7f730227a0e10951e7468bad1b995cf2eafcb | refs/heads/master | 2023-08-07T20:32:05.267695 | 2020-09-14T14:36:25 | 2020-09-14T14:36:25 | 265,978,034 | 0 | 0 | null | 2020-05-22T00:05:29 | 2020-05-22T00:05:29 | null | UTF-8 | Python | false | false | 2,272 | py | class Solution:
def openLock(self, deadends: List[str], target: str) -> int:
'''
shortest path -> BFS
'''
def toString(cur):
_str = ""
for v in cur:
_str += str(v)
return _str
def checkDeadEnds(deadendsSet, _str):
if _str in deadendsSet:
return False
return True
def findNextStep(deadendsSet, curLock, visited):
directions = [[1,0,0,0], [-1,0,0,0],
[0,1,0,0], [0,-1,0,0],
[0,0,1,0], [0,0,-1,0],
[0,0,0,1], [0,0,0,-1]]
nextSteps = []
for d in directions:
cur = [curLock[0] + d[0], curLock[1] + d[1], curLock[2] + d[2], curLock[3] + d[3]]
for i in range(0, 4):
if cur[i] == -1:
cur[i] = 9
elif cur[i] == 10:
cur[i] = 0
_str = toString(cur)
if checkDeadEnds(deadendsSet, _str) and _str not in visited:
nextSteps.append(cur)
visited.add(_str)
return nextSteps
deadendsSet = set()
for d in deadends:
deadendsSet.add(d)
lock = [0,0,0,0]
if toString(lock) in deadendsSet:
return -1
q = collections.deque()
q.append(lock)
moves = 0
visited = set()
while len(q) > 0:
curSize = len(q)
for i in range(0, curSize):
cur = q.pop()
if toString(cur) == target:
return moves
nextSteps = findNextStep(deadendsSet, cur, visited)
q.extendleft(nextSteps)
moves += 1
return -1
| [
"[email protected]"
]
| |
07aa0556223da2feccd58233234db58c8f18e439 | 35fff80627ad675bec1e429943cb2bbbaf141ca2 | /notebooks/Papers/paper2/packages/lc/base.py | 83a7f05be6d1e91398e9dfda3a61890825c177d8 | []
| no_license | ishrat2003/IS-Goldsmiths | bac3473b7ffde7cebfb952cd78aba510c8d72c6f | afae9525ceb62cd09eb14149ee2b88798c5ceb90 | refs/heads/master | 2020-04-27T09:24:10.399620 | 2019-10-16T21:23:13 | 2019-10-16T21:23:13 | 174,212,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,442 | py | import re, sys, numpy
from nltk import word_tokenize, pos_tag
from nltk.stem.porter import PorterStemmer
import utility
from sklearn.cluster import KMeans
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.pylab import rcParams
class Base():
def __init__(self, text, filterRate = 0):
self.rawText = text
self.text = self.__clean(text)
self.stopWords = utility.Utility.getStopWords()
self.stemmer = PorterStemmer()
self.wordInfo = {}
self.featuredWordInfo = {}
self.allowedPOSTypes = ['NN', 'NNP', 'NNS', 'NNPS']
self.minWordSize = 2
self.sentences = []
self.punctuationTypes = ['.', '?', '!']
self.maxCount = 1
self.maxScore = 0
self.filterRate = filterRate
self.topScorePercentage = filterRate
self.filteredWords = {}
self.contributors = []
return
'''
allOptions = ['NN', 'NNP', 'NNS', 'NNPS', 'JJ', 'JJR', 'JJS' 'RB', 'RBR', 'RBS', 'VB', 'VBD', 'VBG', 'VBN', 'VBP', 'VBZ']
'''
def setAllowedPosTypes(self, allowedPOSTypes):
self.allowedPOSTypes = allowedPOSTypes
return
def setFilterWords(self, filterRate = 0.2):
self.filterRate = filterRate
self.loadFilteredWords()
return
def setTopScorePercentage(self, topScorePercentage):
self.topScorePercentage = topScorePercentage
return
def getRawText(self):
return self.rawText
def getCleanText(self):
return self.text
def getContrinutors(self):
return self.contributors
def getWordInfo(self):
return self.wordInfo
def getSentences(self):
return self.sentences
def loadFilteredWords(self):
minAllowedScore = self.maxCount * self.filterRate
self.filteredWords = {}
for word in self.wordInfo:
if self.wordInfo[word]['count'] <= minAllowedScore:
continue
index = len(self.filteredWords)
self.filteredWords[word] = self.wordInfo[word]
self.filteredWords[word]['index'] = index
print('----------------------')
print("Total local vocab: ", len(self.wordInfo))
print("Filtered local vocab: ", len(self.filteredWords))
return self.filteredWords
def loadSentences(self, text):
words = self.__getWords(text, True)
self.wordInfo = {}
self.sentences = []
currentSentence = []
for word in words:
(word, type) = word
word = self.__cleanWord(word)
if type in self.punctuationTypes:
if len(currentSentence) > 1:
# If more than one word than add as sentence
self.sentences.append(currentSentence)
currentSentence = []
if len(word) < self.minWordSize:
continue
wordKey = self._addWordInfo(word, type)
if wordKey and (wordKey not in currentSentence):
currentSentence.append(wordKey)
# Processing last sentence
if len(currentSentence) > 1:
# If more than one word than add as sentence
self.sentences.append(currentSentence)
self.filteredWords = self.wordInfo
return self.sentences
def displayPlot(self, fileName):
#rcParams['figure.figsize']=15,10
mpl.rcParams.update({'font.size': 15})
points = self.getPoints()
if not points:
print('No points to display')
return
plt.figure(figsize=(20, 20)) # in inches(x, y, s=None, c=None, marker=None, cmap=None, norm=None, vmin=None, vmax=None, alpha=None, linewidths=None, verts=None, edgecolors=None, *, data=None, **kwargs)[source]
for point in points:
plt.scatter(point['x'], point['y'], c = point['color'])
plt.annotate(point['label'],
xy=(point['x'], point['y']),
xytext=(5, 2),
textcoords='offset points',
ha='right',
va='bottom')
plt.savefig(fileName)
print('After saving')
plt.show()
return
def getPoints(self):
if not len(self.wordInfo):
return None
topWordScores = self.maxScore * self.topScorePercentage
points = []
for word in self.filteredWords:
point = {}
point['x'] = self._getX(word)
point['y'] = self._getY(word)
point['color'] = 'green'
point['label'] = self.filteredWords[word]['pure_word']
point['type'] = self.filteredWords[word]['type']
if self.isTopic(word, topWordScores):
point['color'] = 'red'
self.contributors.append(word)
points.append(point)
return points
def isTopic(self, word, topWordScores):
return (self.filteredWords[word]['score'] >= topWordScores)
def _getX(self, word):
return 0
def _getY(self, word):
return 0
def _addWordInfo(self, word, type):
if not word or (type not in self.allowedPOSTypes):
return None
localWordInfo = {}
localWordInfo['pure_word'] = word
wordKey = self.stemmer.stem(word.lower())
localWordInfo['stemmed_word'] = wordKey
localWordInfo['type'] = type
if localWordInfo['stemmed_word'] in self.wordInfo.keys():
self.wordInfo[wordKey]['count'] += 1
if self.maxCount < self.wordInfo[wordKey]['count']:
self.maxCount = self.wordInfo[wordKey]['count']
return wordKey
localWordInfo['count'] = 1
localWordInfo['index'] = len(self.wordInfo)
self.wordInfo[wordKey] = localWordInfo
return wordKey
def __getWords(self, text, tagPartsOfSpeach = False):
words = word_tokenize(text)
if tagPartsOfSpeach:
return pos_tag(words)
return words
def __cleanWord(self, word):
return re.sub('[^a-zA-Z0-9]+', '', word)
def __clean(self, text):
text = re.sub('<.+?>', '. ', text)
text = re.sub('&.+?;', '', text)
text = re.sub('[\']{1}', '', text)
text = re.sub('[^a-zA-Z0-9\s_\-\?:;\.,!\(\)\"]+', ' ', text)
text = re.sub('\s+', ' ', text)
text = re.sub('(\.\s*)+', '. ', text)
return text
| [
"[email protected]"
]
| |
fb21402d6d4ce50e9817e6581d7ccee2fb038894 | 01d982d22d214265eeb7a00b2b8bdd8c869d9064 | /tests/test_wave_energy.py | 50c95237a276a5ff48fd56e293cfe45c07a90c64 | [
"BSD-3-Clause",
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | hkotaro1215/invest | ad6874ea1a9ac73813292fb88c138d13279988b5 | 1ba08bd746977bfa8a4600ad8c821fc43598c421 | refs/heads/master | 2022-11-12T06:06:22.826122 | 2018-03-26T21:08:18 | 2018-03-26T21:08:18 | 142,378,565 | 0 | 1 | NOASSERTION | 2022-10-15T06:47:29 | 2018-07-26T02:36:20 | Python | UTF-8 | Python | false | false | 26,222 | py | """Module for Testing the InVEST Wave Energy module."""
import unittest
import tempfile
import shutil
import os
import csv
import natcap.invest.pygeoprocessing_0_3_3.testing
from natcap.invest.pygeoprocessing_0_3_3.testing import scm
from natcap.invest.pygeoprocessing_0_3_3.testing import sampledata
import numpy
import numpy.testing
from shapely.geometry import Polygon
from shapely.geometry import Point
from osgeo import gdal
from osgeo import osr
SAMPLE_DATA = os.path.join(
os.path.dirname(__file__), '..', 'data', 'invest-data')
REGRESSION_DATA = os.path.join(
os.path.dirname(__file__), '..', 'data', 'invest-test-data', 'wave_energy')
class WaveEnergyUnitTests(unittest.TestCase):
"""Unit tests for the Wave Energy module."""
def setUp(self):
"""Overriding setUp function to create temp workspace directory."""
# this lets us delete the workspace after its done no matter the
# the rest result
self.workspace_dir = tempfile.mkdtemp()
def tearDown(self):
"""Overriding tearDown function to remove temporary directory."""
shutil.rmtree(self.workspace_dir)
def test_pixel_size_transform(self):
"""WaveEnergy: testing pixel size transform helper function.
Function name is : 'pixel_size_based_on_coordinate_transform'.
"""
from natcap.invest.wave_energy import wave_energy
temp_dir = self.workspace_dir
srs = sampledata.SRS_WILLAMETTE
srs_wkt = srs.projection
spat_ref = osr.SpatialReference()
spat_ref.ImportFromWkt(srs_wkt)
# Define a Lat/Long WGS84 projection
epsg_id = 4326
reference = osr.SpatialReference()
proj_result = reference.ImportFromEPSG(epsg_id)
# Get projection as WKT
latlong_proj = reference.ExportToWkt()
# Set origin to use for setting up geometries / geotransforms
latlong_origin = (-70.5, 42.5)
# Pixel size helper for defining lat/long pixel size
pixel_size = lambda x: (x, -1. * x)
# Get a point from the clipped data object to use later in helping
# determine proper pixel size
matrix = numpy.array([[1, 1, 1, 1], [1, 1, 1, 1]])
input_path = os.path.join(temp_dir, 'input_raster.tif')
# Create raster to use as testing input
raster_uri = natcap.invest.pygeoprocessing_0_3_3.testing.create_raster_on_disk(
[matrix], latlong_origin, latlong_proj, -1.0,
pixel_size(0.033333), filename=input_path)
raster_gt = natcap.invest.pygeoprocessing_0_3_3.geoprocessing.get_geotransform_uri(
raster_uri)
point = (raster_gt[0], raster_gt[3])
raster_wkt = latlong_proj
# Create a Spatial Reference from the rasters WKT
raster_sr = osr.SpatialReference()
raster_sr.ImportFromWkt(raster_wkt)
# A coordinate transformation to help get the proper pixel size of
# the reprojected raster
coord_trans = osr.CoordinateTransformation(raster_sr, spat_ref)
# Call the function to test
result = wave_energy.pixel_size_based_on_coordinate_transform(
raster_uri, coord_trans, point)
expected_res = (5553.933, 1187.371)
# Compare
for res, exp in zip(result, expected_res):
natcap.invest.pygeoprocessing_0_3_3.testing.assert_close(res, exp)
def test_count_pixels_groups(self):
"""WaveEnergy: testing 'count_pixels_groups' function."""
from natcap.invest.wave_energy import wave_energy
temp_dir = self.workspace_dir
raster_uri = os.path.join(temp_dir, 'pixel_groups.tif')
srs = sampledata.SRS_WILLAMETTE
group_values = [1, 3, 5, 7]
matrix = numpy.array([[1, 3, 5, 9], [3, 7, 1, 5], [2, 4, 5, 7]])
# Create raster to use for testing input
raster_uri = natcap.invest.pygeoprocessing_0_3_3.testing.create_raster_on_disk(
[matrix], srs.origin, srs.projection, -1, srs.pixel_size(100),
datatype=gdal.GDT_Int32, filename=raster_uri)
results = wave_energy.count_pixels_groups(raster_uri, group_values)
expected_results = [2, 2, 3, 2]
for res, exp_res in zip(results, expected_results):
natcap.invest.pygeoprocessing_0_3_3.testing.assert_close(res, exp_res, 1e-9)
def test_calculate_percentiles_from_raster(self):
"""WaveEnergy: testing 'calculate_percentiles_from_raster' function."""
from natcap.invest.wave_energy import wave_energy
temp_dir = self.workspace_dir
raster_uri = os.path.join(temp_dir, 'percentile.tif')
srs = sampledata.SRS_WILLAMETTE
matrix = numpy.arange(1, 101)
matrix = matrix.reshape(10, 10)
raster_uri = natcap.invest.pygeoprocessing_0_3_3.testing.create_raster_on_disk(
[matrix], srs.origin, srs.projection, -1, srs.pixel_size(100),
datatype=gdal.GDT_Int32, filename=raster_uri)
percentiles = [0, 25, 50, 75]
results = wave_energy.calculate_percentiles_from_raster(
raster_uri, percentiles)
expected_results = [1, 26, 51, 76]
for res, exp_res in zip(results, expected_results):
self.assertEqual(res, exp_res)
def test_create_percentile_ranges(self):
"""WaveEnergy: testing 'create_percentile_ranges' function."""
from natcap.invest.wave_energy import wave_energy
percentiles = [20, 40, 60, 80]
units_short = " m/s"
units_long = " speed of a bullet in m/s"
start_value = "5"
result = wave_energy.create_percentile_ranges(
percentiles, units_short, units_long, start_value)
exp_result = ["5 - 20 speed of a bullet in m/s",
"20 - 40 m/s", "40 - 60 m/s", "60 - 80 m/s",
"Greater than 80 m/s"]
for res, exp_res in zip(result, exp_result):
self.assertEqual(res, exp_res)
def test_calculate_distance(self):
"""WaveEnergy: testing 'calculate_distance' function."""
from natcap.invest.wave_energy import wave_energy
srs = sampledata.SRS_WILLAMETTE
pos_x = srs.origin[0]
pos_y = srs.origin[1]
set_one = numpy.array([
[pos_x, pos_y], [pos_x, pos_y - 100], [pos_x, pos_y - 200]])
set_two = numpy.array([
[pos_x + 100, pos_y], [pos_x + 100, pos_y - 100],
[pos_x + 100, pos_y - 200]])
result_dist, result_id = wave_energy.calculate_distance(
set_one, set_two)
expected_result_dist = [100, 100, 100]
expected_result_id = [0, 1, 2]
for res, exp_res in zip(result_dist, expected_result_dist):
self.assertEqual(res, exp_res)
for res, exp_res in zip(result_id, expected_result_id):
self.assertEqual(res, exp_res)
@scm.skip_if_data_missing(SAMPLE_DATA)
@scm.skip_if_data_missing(REGRESSION_DATA)
def test_clip_datasource_layer_polygons(self):
"""WaveEnergy: testing clipping polygons from polygons."""
from natcap.invest.wave_energy import wave_energy
temp_dir = self.workspace_dir
srs = sampledata.SRS_WILLAMETTE
aoi_path = os.path.join(REGRESSION_DATA, 'aoi_proj_to_extract.shp')
extract_path = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'WaveData',
'Global_extract.shp')
result_path = os.path.join(temp_dir, 'aoi_proj_clipped.shp')
wave_energy.clip_datasource_layer(aoi_path, extract_path, result_path)
expected_path = os.path.join(REGRESSION_DATA, 'aoi_proj_clipped.shp')
natcap.invest.pygeoprocessing_0_3_3.testing.assert_vectors_equal(
result_path, expected_path)
def test_clip_datasource_layer_points(self):
"""WaveEnergy: testing clipping points from polygons."""
from natcap.invest.wave_energy import wave_energy
temp_dir = self.workspace_dir
srs = sampledata.SRS_WILLAMETTE
pos_x = srs.origin[0]
pos_y = srs.origin[1]
fields_pt = {'id': 'int', 'myattr': 'string'}
attrs_one = [
{'id': 1, 'myattr': 'hello'}, {'id': 2, 'myattr': 'bye'},
{'id': 3, 'myattr': 'highbye'}]
fields_poly = {'id': 'int'}
attrs_poly = [{'id': 1}]
# Create geometry for the points, which will get clipped
geom_one = [
Point(pos_x + 20, pos_y - 20), Point(pos_x + 40, pos_y - 20),
Point(pos_x + 100, pos_y - 20)]
# Create geometry for the polygons, which will be used to clip
geom_two = [Polygon(
[(pos_x, pos_y), (pos_x + 60, pos_y), (pos_x + 60, pos_y - 60),
(pos_x, pos_y - 60), (pos_x, pos_y)])]
shape_to_clip_uri = os.path.join(temp_dir, 'shape_to_clip.shp')
# Create the point shapefile
shape_to_clip_uri = natcap.invest.pygeoprocessing_0_3_3.testing.create_vector_on_disk(
geom_one, srs.projection, fields_pt, attrs_one,
vector_format='ESRI Shapefile', filename=shape_to_clip_uri)
binding_shape_uri = os.path.join(temp_dir, 'binding_shape.shp')
# Create the polygon shapefile
binding_shape_uri = natcap.invest.pygeoprocessing_0_3_3.testing.create_vector_on_disk(
geom_two, srs.projection, fields_poly, attrs_poly,
vector_format='ESRI Shapefile', filename=binding_shape_uri)
output_path = os.path.join(temp_dir, 'vector.shp')
# Call the function to test
wave_energy.clip_datasource_layer(
shape_to_clip_uri, binding_shape_uri, output_path)
# Create the expected point shapefile
fields_pt = {'id': 'int', 'myattr': 'string'}
attrs_one = [{'id': 1, 'myattr': 'hello'}, {'id': 2, 'myattr': 'bye'}]
geom_three = [Point(pos_x + 20, pos_y - 20),
Point(pos_x + 40, pos_y - 20)]
# Need to save the expected shapefile in a sub folder since it must
# have the same layer name / filename as what it will be compared
# against.
if not os.path.isdir(os.path.join(temp_dir, 'exp_vector')):
os.mkdir(os.path.join(temp_dir, 'exp_vector'))
expected_uri = os.path.join(temp_dir, 'exp_vector', 'vector.shp')
expected_shape = natcap.invest.pygeoprocessing_0_3_3.testing.create_vector_on_disk(
geom_three, srs.projection, fields_pt, attrs_one,
vector_format='ESRI Shapefile', filename=expected_uri)
natcap.invest.pygeoprocessing_0_3_3.testing.assert_vectors_equal(
output_path, expected_shape)
def test_clip_datasouce_layer_no_intersection(self):
"""WaveEnergy: testing 'clip_datasource_layer' w/ no intersection."""
from natcap.invest.wave_energy import wave_energy
temp_dir = self.workspace_dir
srs = sampledata.SRS_WILLAMETTE
pos_x = srs.origin[0]
pos_y = srs.origin[1]
fields_pt = {'id': 'int', 'myattr': 'string'}
attrs_one = [{'id': 1, 'myattr': 'hello'}]
fields_poly = {'id': 'int'}
attrs_poly = [{'id': 1}]
# Create geometry for the points, which will get clipped
geom_one = [
Point(pos_x + 220, pos_y - 220)]
# Create geometry for the polygons, which will be used to clip
geom_two = [Polygon(
[(pos_x, pos_y), (pos_x + 60, pos_y), (pos_x + 60, pos_y - 60),
(pos_x, pos_y - 60), (pos_x, pos_y)])]
shape_to_clip_uri = os.path.join(temp_dir, 'shape_to_clip.shp')
# Create the point shapefile
shape_to_clip_uri = natcap.invest.pygeoprocessing_0_3_3.testing.create_vector_on_disk(
geom_one, srs.projection, fields_pt, attrs_one,
vector_format='ESRI Shapefile', filename=shape_to_clip_uri)
binding_shape_uri = os.path.join(temp_dir, 'binding_shape.shp')
# Create the polygon shapefile
binding_shape_uri = natcap.invest.pygeoprocessing_0_3_3.testing.create_vector_on_disk(
geom_two, srs.projection, fields_poly, attrs_poly,
vector_format='ESRI Shapefile', filename=binding_shape_uri)
output_path = os.path.join(temp_dir, 'vector.shp')
# Call the function to test
self.assertRaises(
wave_energy.IntersectionError, wave_energy.clip_datasource_layer,
shape_to_clip_uri, binding_shape_uri, output_path)
def test_create_attribute_csv_table(self):
"""WaveEnergy: testing 'create_attribute_csv_table' function."""
from natcap.invest.wave_energy import wave_energy
temp_dir = self.workspace_dir
table_uri = os.path.join(temp_dir, 'att_csv_file.csv')
fields = ['id', 'height', 'length']
data = {1: {'id': 1, 'height': 10, 'length': 15},
0: {'id': 0, 'height': 10, 'length': 15},
2: {'id': 2, 'height': 10, 'length': 15}}
wave_energy.create_attribute_csv_table(table_uri, fields, data)
exp_rows = [{'id': '0', 'height': '10', 'length': '15'},
{'id': '1', 'height': '10', 'length': '15'},
{'id': '2', 'height': '10', 'length': '15'}]
result_file = open(table_uri, 'rU')
csv_reader = csv.DictReader(result_file)
for row, exp_row in zip(csv_reader, exp_rows):
self.assertDictEqual(row, exp_row)
result_file.close()
@scm.skip_if_data_missing(REGRESSION_DATA)
def test_load_binary_wave_data(self):
"""WaveEnergy: testing 'load_binary_wave_data' function."""
from natcap.invest.wave_energy import wave_energy
wave_file_uri = os.path.join(REGRESSION_DATA, 'example_ww3_binary.bin')
result = wave_energy.load_binary_wave_data(wave_file_uri)
exp_res = {'periods': numpy.array(
[.375, 1, 1.5, 2.0], dtype=numpy.float32),
'heights': numpy.array([.375, 1], dtype=numpy.float32),
'bin_matrix': {
(102, 370): numpy.array(
[[0, 0, 0, 0], [0, 9, 3, 30]], dtype=numpy.float32),
(102, 371): numpy.array(
[[0, 0, 0, 0], [0, 0, 3, 27]], dtype=numpy.float32)}
}
for key in ['periods', 'heights']:
numpy.testing.assert_array_equal(result[key], exp_res[key])
for key in [(102, 370), (102, 371)]:
numpy.testing.assert_array_equal(
result['bin_matrix'][key], exp_res['bin_matrix'][key])
class WaveEnergyRegressionTests(unittest.TestCase):
"""Regression tests for the Wave Energy module."""
def setUp(self):
"""Overriding setUp function to create temp workspace directory."""
# this lets us delete the workspace after its done no matter the
# the rest result
self.workspace_dir = tempfile.mkdtemp()
def tearDown(self):
"""Overriding tearDown function to remove temporary directory."""
shutil.rmtree(self.workspace_dir)
@staticmethod
def generate_base_args(workspace_dir):
"""Generate an args list that is consistent across regression tests."""
args = {
'workspace_dir': workspace_dir,
'wave_base_data_uri': os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'WaveData'),
'analysis_area_uri': 'West Coast of North America and Hawaii',
'machine_perf_uri': os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input',
'Machine_Pelamis_Performance.csv'),
'machine_param_uri': os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input',
'Machine_Pelamis_Parameter.csv'),
'dem_uri': os.path.join(
SAMPLE_DATA, 'Base_Data', 'Marine', 'DEMs', 'global_dem')
}
return args
@scm.skip_if_data_missing(SAMPLE_DATA)
@scm.skip_if_data_missing(REGRESSION_DATA)
def test_valuation(self):
"""WaveEnergy: testing valuation component."""
from natcap.invest.wave_energy import wave_energy
args = WaveEnergyRegressionTests.generate_base_args(self.workspace_dir)
args['aoi_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'AOI_WCVI.shp')
args['valuation_container'] = True
args['land_gridPts_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'LandGridPts_WCVI.csv')
args['machine_econ_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'Machine_Pelamis_Economic.csv')
args['number_of_machines'] = 28
wave_energy.execute(args)
raster_results = [
'wp_rc.tif', 'wp_kw.tif', 'capwe_rc.tif', 'capwe_mwh.tif',
'npv_rc.tif', 'npv_usd.tif']
for raster_path in raster_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_rasters_equal(
os.path.join(args['workspace_dir'], 'output', raster_path),
os.path.join(REGRESSION_DATA, 'valuation', raster_path),
1e-9)
vector_results = ['GridPts_prj.shp', 'LandPts_prj.shp']
for vector_path in vector_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_vectors_equal(
os.path.join(args['workspace_dir'], 'output', vector_path),
os.path.join(REGRESSION_DATA, 'valuation', vector_path))
table_results = ['capwe_rc.csv', 'wp_rc.csv', 'npv_rc.csv']
for table_path in table_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_csv_equal(
os.path.join(args['workspace_dir'], 'output', table_path),
os.path.join(REGRESSION_DATA, 'valuation', table_path))
@scm.skip_if_data_missing(SAMPLE_DATA)
@scm.skip_if_data_missing(REGRESSION_DATA)
def test_biophysical_aoi(self):
"""WaveEnergy: testing Biophysical component with an AOI."""
from natcap.invest.wave_energy import wave_energy
args = WaveEnergyRegressionTests.generate_base_args(self.workspace_dir)
args['aoi_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'AOI_WCVI.shp')
wave_energy.execute(args)
raster_results = [
'wp_rc.tif', 'wp_kw.tif', 'capwe_rc.tif', 'capwe_mwh.tif']
for raster_path in raster_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_rasters_equal(
os.path.join(args['workspace_dir'], 'output', raster_path),
os.path.join(REGRESSION_DATA, 'aoi', raster_path),
1e-9)
table_results = ['capwe_rc.csv', 'wp_rc.csv']
for table_path in table_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_csv_equal(
os.path.join(args['workspace_dir'], 'output', table_path),
os.path.join(REGRESSION_DATA, 'aoi', table_path),
1e-9)
@scm.skip_if_data_missing(SAMPLE_DATA)
@scm.skip_if_data_missing(REGRESSION_DATA)
def test_biophysical_no_aoi(self):
"""WaveEnergy: testing Biophysical component with no AOI."""
from natcap.invest.wave_energy import wave_energy
args = WaveEnergyRegressionTests.generate_base_args(self.workspace_dir)
wave_energy.execute(args)
raster_results = [
'wp_rc.tif', 'wp_kw.tif', 'capwe_rc.tif', 'capwe_mwh.tif']
for raster_path in raster_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_rasters_equal(
os.path.join(args['workspace_dir'], 'output', raster_path),
os.path.join(REGRESSION_DATA, 'noaoi', raster_path),
1e-9)
table_results = ['capwe_rc.csv', 'wp_rc.csv']
for table_path in table_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_csv_equal(
os.path.join(args['workspace_dir'], 'output', table_path),
os.path.join(REGRESSION_DATA, 'noaoi', table_path),
1e-9)
@scm.skip_if_data_missing(SAMPLE_DATA)
@scm.skip_if_data_missing(REGRESSION_DATA)
def test_valuation_suffix(self):
"""WaveEnergy: testing suffix through Valuation."""
from natcap.invest.wave_energy import wave_energy
args = WaveEnergyRegressionTests.generate_base_args(self.workspace_dir)
args['aoi_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'AOI_WCVI.shp')
args['valuation_container'] = True
args['land_gridPts_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'LandGridPts_WCVI.csv')
args['machine_econ_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'Machine_Pelamis_Economic.csv')
args['number_of_machines'] = 28
args['suffix'] = 'val'
wave_energy.execute(args)
raster_results = [
'wp_rc_val.tif', 'wp_kw_val.tif', 'capwe_rc_val.tif',
'capwe_mwh_val.tif', 'npv_rc_val.tif', 'npv_usd_val.tif']
for raster_path in raster_results:
self.assertTrue(os.path.exists(
os.path.join(args['workspace_dir'], 'output', raster_path)))
vector_results = ['GridPts_prj_val.shp', 'LandPts_prj_val.shp']
for vector_path in vector_results:
self.assertTrue(os.path.exists(
os.path.join(args['workspace_dir'], 'output', vector_path)))
table_results = ['capwe_rc_val.csv', 'wp_rc_val.csv', 'npv_rc_val.csv']
for table_path in table_results:
self.assertTrue(os.path.exists(
os.path.join(args['workspace_dir'], 'output', table_path)))
@scm.skip_if_data_missing(SAMPLE_DATA)
@scm.skip_if_data_missing(REGRESSION_DATA)
def test_valuation_suffix_underscore(self):
"""WaveEnergy: testing suffix with an underscore through Valuation."""
from natcap.invest.wave_energy import wave_energy
args = WaveEnergyRegressionTests.generate_base_args(self.workspace_dir)
args['aoi_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'AOI_WCVI.shp')
args['valuation_container'] = True
args['land_gridPts_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'LandGridPts_WCVI.csv')
args['machine_econ_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'Machine_Pelamis_Economic.csv')
args['number_of_machines'] = 28
args['suffix'] = '_val'
wave_energy.execute(args)
raster_results = [
'wp_rc_val.tif', 'wp_kw_val.tif', 'capwe_rc_val.tif',
'capwe_mwh_val.tif', 'npv_rc_val.tif', 'npv_usd_val.tif']
for raster_path in raster_results:
self.assertTrue(os.path.exists(
os.path.join(args['workspace_dir'], 'output', raster_path)))
vector_results = ['GridPts_prj_val.shp', 'LandPts_prj_val.shp']
for vector_path in vector_results:
self.assertTrue(os.path.exists(
os.path.join(args['workspace_dir'], 'output', vector_path)))
table_results = ['capwe_rc_val.csv', 'wp_rc_val.csv', 'npv_rc_val.csv']
for table_path in table_results:
self.assertTrue(os.path.exists(
os.path.join(args['workspace_dir'], 'output', table_path)))
@scm.skip_if_data_missing(SAMPLE_DATA)
@scm.skip_if_data_missing(REGRESSION_DATA)
def test_removing_filenames(self):
"""WaveEnergy: testing file paths which already exist are removed."""
from natcap.invest.wave_energy import wave_energy
workspace_dir = 'test_removing_filenames'
args = WaveEnergyRegressionTests.generate_base_args(workspace_dir)#self.workspace_dir)
args['aoi_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'AOI_WCVI.shp')
args['valuation_container'] = True
args['land_gridPts_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'LandGridPts_WCVI.csv')
args['machine_econ_uri'] = os.path.join(
SAMPLE_DATA, 'WaveEnergy', 'input', 'Machine_Pelamis_Economic.csv')
args['number_of_machines'] = 28
wave_energy.execute(args)
# Run through the model again, which should mean deleting
# shapefiles that have already been made, but which need
# to be created again.
wave_energy.execute(args)
raster_results = [
'wp_rc.tif', 'wp_kw.tif', 'capwe_rc.tif', 'capwe_mwh.tif',
'npv_rc.tif', 'npv_usd.tif']
for raster_path in raster_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_rasters_equal(
os.path.join(args['workspace_dir'], 'output', raster_path),
os.path.join(REGRESSION_DATA, 'valuation', raster_path),
1e-9)
vector_results = ['GridPts_prj.shp', 'LandPts_prj.shp']
for vector_path in vector_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_vectors_equal(
os.path.join(args['workspace_dir'], 'output', vector_path),
os.path.join(REGRESSION_DATA, 'valuation', vector_path))
table_results = ['capwe_rc.csv', 'wp_rc.csv', 'npv_rc.csv']
for table_path in table_results:
natcap.invest.pygeoprocessing_0_3_3.testing.assert_csv_equal(
os.path.join(args['workspace_dir'], 'output', table_path),
os.path.join(REGRESSION_DATA, 'valuation', table_path))
| [
"[email protected]"
]
| |
9f7e0a4bea9a8a75e861d8d24be4d82b99bb4997 | 3052941bbde225a1ececbeb628c05c47bcd7c494 | /ProgressiveNN_atari_2.py | 4c4812de0830e998441a1ed816cfe2719899c5c4 | []
| no_license | LKH-1/A3C-tensorflow | 32cffb66f07b1903dd774e49aee43bd966b6abd8 | 807e545194534f12a3a1d5838343df5a248bd833 | refs/heads/master | 2021-06-03T03:12:10.595718 | 2016-10-11T02:00:01 | 2016-10-11T02:00:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,299 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# author: [email protected]
import cv2
import re
import gym
import signal
import threading
import scipy.signal
from tensorflow.python.ops.rnn_cell import BasicLSTMCell
from common import *
tf.app.flags.DEFINE_string("game", "Breakout-v0", "gym environment name")
tf.app.flags.DEFINE_string("old_train_dir", "./models/experiment_pnn/column_1", "gym environment name")
tf.app.flags.DEFINE_string("train_dir", "./models/experiment_pnn/column_2", "gym environment name")
tf.app.flags.DEFINE_integer("gpu", 0, "gpu id")
tf.app.flags.DEFINE_bool("use_lstm", False, "use LSTM layer")
tf.app.flags.DEFINE_integer("t_max", 8, "episode max time step")
tf.app.flags.DEFINE_integer("t_train", 1e7, "train max time step")
tf.app.flags.DEFINE_integer("jobs", 8, "parallel running thread number")
tf.app.flags.DEFINE_integer("frame_skip", 4, "number of frame skip")
tf.app.flags.DEFINE_integer("frame_seq", 4, "number of frame sequence")
tf.app.flags.DEFINE_string("opt", "rms", "choice in [rms, adam, sgd]")
tf.app.flags.DEFINE_float("learn_rate", 5e-4, "param of smooth")
tf.app.flags.DEFINE_float("eps", 1e-8, "param of smooth")
tf.app.flags.DEFINE_float("scalar", 1.0, "param of scaler for lateral connect")
tf.app.flags.DEFINE_float("entropy_beta", 1e-2, "param of policy entropy weight")
tf.app.flags.DEFINE_float("gamma", 0.95, "discounted ratio")
tf.app.flags.DEFINE_float("train_step", 0, "train step. unchanged")
flags = tf.app.flags.FLAGS
class AtariEnv(object):
def __init__(self, env, screen_size=(84, 84)):
self.env = env
# constants
self.screen_size = screen_size
self.frame_skip = flags.frame_skip
self.frame_seq = flags.frame_seq
# local variables
self.state = np.zeros(self.state_shape, dtype=np.float32)
@property
def state_shape(self):
return [self.screen_size[0], self.screen_size[1], self.frame_seq]
@property
def action_dim(self):
return self.env.action_space.n
def precess_image(self, image):
image = cv2.cvtColor(cv2.resize(image, self.screen_size), cv2.COLOR_BGR2GRAY)
image = np.divide(image, 256.0)
return image
def reset_env(self):
obs = self.env.reset()
self.state[:, :, :-1] = 0
self.state[:, :, -1] = self.precess_image(obs)
return self.state
def forward_action(self, action):
obs, reward, done = None, None, None
for _ in xrange(self.frame_skip):
obs, reward, done, _ = self.env.step(action)
if done:
break
obs = self.precess_image(obs)
obs = np.reshape(obs, newshape=list(self.screen_size) + [1]) / 256.0
self.state = np.append(self.state[:, :, 1:], obs, axis=2)
return self.state, reward, done
class A3CNet(object):
def __init__(self, state_shape, action_dim, scope, column_id=2):
self.state_shape = state_shape
self.action_dim = action_dim
self.column_id = column_id
with tf.device("/gpu:%d" % flags.gpu):
# placeholder
self.state = tf.placeholder(tf.float32, shape=[None] + list(state_shape), name="state") # (None, 84, 84, 4)
self.action = tf.placeholder(tf.float32, shape=[None, action_dim], name="action") # (None, actions)
self.target_q = tf.placeholder(tf.float32, shape=[None])
with tf.variable_scope(scope) as scope:
# column layers
col_1_layers, col_1_vars = self.layers(column_id=1)
col_2_layers, col_2_vars = self.layers(column_id=2)
# lateral connect
col_1, col_2 = col_1_layers, col_2_layers
col_2[3], lateral_vars = self.lateral_connect("lateral_l1", range(2), [col_1[2], col_2[2]], col_2[3])
self.shared_vars = col_1_vars + col_2_vars + lateral_vars
shared_out = col_2[3]
# policy parts
with tf.variable_scope("col_%d_policy" % column_id):
pi_fc_1, self.pi_w1, self.pi_b1 = full_connect(shared_out, (256, 256), "pi_fc1", with_param=True)
pi_fc_2, self.pi_w2, self.pi_b2 = full_connect(pi_fc_1, (256, action_dim), "pi_fc2", activate=None,
with_param=True)
self.policy_out = tf.nn.softmax(pi_fc_2, name="pi_out")
# value parts
with tf.variable_scope("col_%d_value" % column_id):
v_fc_1, self.v_w1, self.v_b1 = full_connect(shared_out, (256, 256), "v_fc1", with_param=True)
v_fc_2, self.v_w2, self.v_b2 = full_connect(v_fc_1, (256, 1), "v_fc2", activate=None,
with_param=True)
self.value_out = tf.reshape(v_fc_2, [-1], name="v_out")
# loss values
with tf.op_scope([self.policy_out, self.value_out], "col_%d_loss" % column_id):
self.entropy = - tf.reduce_sum(self.policy_out * tf.log(self.policy_out + flags.eps))
time_diff = self.target_q - self.value_out
policy_prob = tf.log(tf.reduce_sum(tf.mul(self.policy_out, self.action), reduction_indices=1))
self.policy_loss = - tf.reduce_sum(policy_prob * time_diff)
self.value_loss = tf.reduce_sum(tf.square(time_diff))
self.total_loss = self.policy_loss + self.value_loss * 0.5 + self.entropy * flags.entropy_beta
def layers(self, column_id):
# shared parts
with tf.variable_scope("col_%d_shared" % column_id):
conv1, w1, b1 = conv2d(self.state, (8, 8, self.state_shape[-1], 16), "conv_1", stride=4,
padding="VALID", with_param=True) # (None, 20, 20, 16)
conv1 = NetTools.batch_normalized(conv1)
conv2, w2, b2 = conv2d(conv1, (4, 4, 16, 32), "conv_2", stride=2,
padding="VALID", with_param=True) # (None, 9, 9, 32)
conv2 = NetTools.batch_normalized(conv2)
flat1 = tf.reshape(conv2, (-1, 9 * 9 * 32), name="flat1")
fc_1, w3, b3 = full_connect(flat1, (9 * 9 * 32, 256), "fc1", activate=None, with_param=True)
return [conv1, conv2, flat1, fc_1], [w1, b1, w2, b2, w3, b3]
def lateral_connect(self, scope, col_ids, h_col_lowers, h_col_upper):
with tf.variable_scope(scope):
h_col_upper_dim = h_col_upper.get_shape().as_list()[1]
lateral_units = [h_col_upper]
lateral_vars = []
for col, h_col in zip(col_ids[:-1], h_col_lowers[:-1]):
scalar = tf.get_variable("scalar_%d_%d" % (col, col_ids[-1]), shape=[],
initializer=tf.constant_initializer(flags.scalar), trainable=True)
h_col_dim_1 = h_col.get_shape().as_list()[1]
h_col_dim_2 = h_col_lowers[-1].get_shape().as_list()[1]
lateral_fc_v, v_w = full_connect(tf.mul(h_col, scalar), (h_col_dim_1, h_col_dim_2),
"fc_u_%d_%d" % (col, col_ids[-1]), with_param=True, with_bias=False)
lateral_fc_u, u_w = full_connect(lateral_fc_v, (h_col_dim_2, h_col_upper_dim),
"fc_v_%d_%d" % (col, col_ids[-1]), with_param=True, with_bias=False,
activate=False)
lateral_units.append(lateral_fc_u)
lateral_vars += [scalar, v_w, u_w]
h_col_upper = tf.nn.relu(tf.add_n(lateral_units), name="lateral_combine_%d" % col_ids[-1])
return h_col_upper, lateral_vars
def get_policy(self, sess, state):
return sess.run(self.policy_out, feed_dict={self.state: [state]})[0]
def get_value(self, sess, state):
return sess.run(self.value_out, feed_dict={self.state: [state]})[0]
def get_vars(self):
return self.shared_vars + [self.pi_w1, self.pi_b1, self.pi_w2, self.pi_b2,
self.v_w1, self.v_b1, self.v_w2, self.v_b2]
class A3CLSTMNet(object):
def __init__(self, state_shape, action_dim, scope):
class InnerLSTMCell(BasicLSTMCell):
def __init__(self, num_units, forget_bias=1.0, input_size=None):
BasicLSTMCell.__init__(self, num_units, forget_bias=forget_bias, input_size=input_size)
self.matrix, self.bias = None, None
def __call__(self, inputs, state, scope=None):
"""
Long short-term memory cell (LSTM).
implement from BasicLSTMCell.__call__
"""
with tf.variable_scope(scope or type(self).__name__): # "BasicLSTMCell"
# Parameters of gates are concatenated into one multiply for efficiency.
c, h = tf.split(1, 2, state)
concat = self.linear([inputs, h], 4 * self._num_units, True)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = tf.split(1, 4, concat)
new_c = c * tf.sigmoid(f + self._forget_bias) + tf.sigmoid(i) * tf.tanh(j)
new_h = tf.tanh(new_c) * tf.sigmoid(o)
return new_h, tf.concat(1, [new_c, new_h])
def linear(self, args, output_size, bias, bias_start=0.0, scope=None):
"""
Linear map: sum_i(args[i] * W[i]), where W[i] is a variable.
implement from function of tensorflow.python.ops.rnn_cell.linear()
"""
if args is None or (isinstance(args, (list, tuple)) and not args):
raise ValueError("`args` must be specified")
if not isinstance(args, (list, tuple)):
args = [args]
# Calculate the total size of arguments on dimension 1.
total_arg_size = 0
shapes = [a.get_shape().as_list() for a in args]
for shape in shapes:
if len(shape) != 2:
raise ValueError("Linear is expecting 2D arguments: %s" % str(shapes))
if not shape[1]:
raise ValueError("Linear expects shape[1] of arguments: %s" % str(shapes))
else:
total_arg_size += shape[1]
# Now the computation.
with tf.variable_scope(scope or "Linear"):
matrix = tf.get_variable("Matrix", [total_arg_size, output_size])
if len(args) == 1:
res = tf.matmul(args[0], matrix)
else:
res = tf.matmul(tf.concat(1, args), matrix)
if not bias:
return res
bias_term = tf.get_variable(
"Bias", [output_size],
initializer=tf.constant_initializer(bias_start))
self.matrix = matrix
self.bias = bias_term
return res + bias_term
with tf.device("/gpu:%d" % flags.gpu):
# placeholder
self.state = tf.placeholder(tf.float32, shape=[None] + list(state_shape), name="state") # (None, 84, 84, 4)
self.action = tf.placeholder(tf.float32, shape=[None, action_dim], name="action") # (None, actions)
self.target_q = tf.placeholder(tf.float32, shape=[None])
# shared parts
with tf.variable_scope("%s_shared" % scope):
conv1, self.w1, self.b1 = conv2d(self.state, (8, 8, state_shape[-1], 16), "conv_1", stride=4,
padding="VALID", with_param=True) # (None, 20, 20, 16)
conv2, self.w2, self.b2 = conv2d(conv1, (4, 4, 16, 32), "conv_2", stride=2,
padding="VALID", with_param=True) # (None, 9, 9, 32)
flat1 = tf.reshape(conv2, (9 * 9 * 32, 256), name="flat1")
fc_1, self.w3, self.b3 = full_connect(flat1, (9 * 9 * 32, 256), "fc1", with_param=True)
# rnn parts
with tf.variable_scope("%s_rnn" % scope) as scope:
h_flat1 = tf.reshape(fc_1, (1, -1, 256))
self.lstm = InnerLSTMCell(256)
self.initial_lstm_state = tf.placeholder(tf.float32, shape=[1, self.lstm.state_size])
self.sequence_length = tf.placeholder(tf.float32, [1])
lstm_outputs, self.lstm_state = tf.nn.dynamic_rnn(self.lstm, h_flat1,
initial_state=self.initial_lstm_state,
sequence_length=self.sequence_length,
time_major=False,
scope=scope)
lstm_outputs = tf.reshape(lstm_outputs, [-1, 256])
# policy parts
with tf.variable_scope("%s_policy" % scope):
pi_fc_1, self.pi_w1, self.pi_b1 = full_connect(lstm_outputs, (256, 256), "pi_fc1", with_param=True)
pi_fc_2, self.pi_w2, self.pi_b2 = full_connect(pi_fc_1, (256, action_dim), "pi_fc2", activate=None,
with_param=True)
self.policy_out = tf.nn.softmax(pi_fc_2, name="pi_out")
# value parts
with tf.variable_scope("%s_value" % scope):
v_fc_1, self.v_w1, self.v_b1 = full_connect(lstm_outputs, (256, 256), "v_fc1", with_param=True)
v_fc_2, self.v_w2, self.v_b2 = full_connect(v_fc_1, (256, 1), "v_fc2", activate=None, with_param=True)
self.value_out = tf.reshape(v_fc_2, [-1], name="v_out")
# loss values
with tf.op_scope([self.policy_out, self.value_out], "%s_loss" % scope):
self.entropy = - tf.reduce_mean(self.policy_out * tf.log(self.policy_out + flags.eps))
time_diff = self.target_q - self.value_out
policy_prob = tf.log(tf.reduce_sum(tf.mul(self.policy_out, self.action), reduction_indices=1))
self.policy_loss = - tf.reduce_sum(policy_prob * time_diff, reduction_indices=1)
self.value_loss = tf.square(time_diff)
self.total_loss = self.policy_loss + self.value_loss * 0.5 + self.entropy * flags.entropy_beta
# lstm state
self.lstm_state_out = np.zeros((1, self.lstm.state_size), dtype=np.float32)
def reset_lstm_state(self):
self.lstm_state_out = np.zeros((1, self.lstm.state_size), dtype=np.float32)
def get_policy(self, sess, state):
policy_out, self.lstm_state_out = sess.run([self.policy_out, self.lstm_state],
feed_dict={self.state: [state],
self.initial_lstm_state: self.lstm_state,
self.sequence_length: [1]})
return policy_out[0]
def get_value(self, sess, state):
value_out, _ = sess.run([self.value_out, self.lstm_state], feed_dict={self.state: [state],
self.initial_lstm_state: self.lstm_state,
self.sequence_length: [1]})[0]
return value_out[0]
def get_vars(self):
return [self.w1, self.b1, self.w2, self.b2, self.w3, self.b3,
self.lstm.matrix, self.lstm.bias,
self.pi_w1, self.pi_b1, self.pi_w2, self.pi_b2,
self.v_w1, self.v_b1, self.v_w2, self.v_b2]
class A3CSingleThread(threading.Thread):
def __init__(self, thread_id, master):
self.thread_id = thread_id
threading.Thread.__init__(self, name="thread_%d" % thread_id)
self.env = AtariEnv(gym.make(flags.game))
self.master = master
# local network
if flags.use_lstm:
self.local_net = A3CLSTMNet(self.env.state_shape, self.env.action_dim, scope="local_net_%d" % thread_id)
else:
self.local_net = A3CNet(self.env.state_shape, self.env.action_dim, scope="local_net_%d" % thread_id)
# sync network
self.sync = self.sync_network(master.shared_net)
# accumulate gradients
self.accum_grads = self.create_accumulate_gradients()
self.do_accum_grads_ops = self.do_accumulate_gradients()
self.reset_accum_grads_ops = self.reset_accumulate_gradients()
# collect summaries for debugging
summaries = list()
summaries.append(tf.scalar_summary("entropy/%d" % self.thread_id, self.local_net.entropy))
summaries.append(tf.scalar_summary("policy_loss/%d" % self.thread_id, self.local_net.policy_loss))
summaries.append(tf.scalar_summary("value_loss/%d" % self.thread_id, self.local_net.value_loss))
summaries.append(tf.scalar_summary("total_loss/%d" % self.thread_id, self.local_net.total_loss))
# apply accumulated gradients
with tf.device("/gpu:%d" % flags.gpu):
self.apply_gradients = master.shared_opt.apply_gradients(
zip(self.accum_grads, master.shared_net.get_vars()), global_step=master.global_step)
self.summary_op = tf.merge_summary(summaries)
def sync_network(self, source_net):
sync_ops = []
with tf.device("/gpu:%d" % flags.gpu):
with tf.op_scope([], name="sync_ops_%d" % self.thread_id):
for (target_var, source_var) in zip(self.local_net.get_vars(), source_net.get_vars()):
ops = tf.assign(target_var, source_var)
sync_ops.append(ops)
return tf.group(*sync_ops, name="sync_group_%d" % self.thread_id)
def create_accumulate_gradients(self):
accum_grads = []
with tf.device("/gpu:%d" % flags.gpu):
with tf.op_scope([self.local_net], name="create_accum_%d" % self.thread_id):
for var in self.local_net.get_vars():
zero = tf.zeros(var.get_shape().as_list(), dtype=var.dtype)
name = var.name.replace(":", "_") + "_accum_grad"
accum_grad = tf.Variable(zero, name=name, trainable=False)
accum_grads.append(accum_grad.ref())
return accum_grads
def do_accumulate_gradients(self):
net = self.local_net
accum_grad_ops = []
with tf.device("/gpu:%d" % flags.gpu):
with tf.op_scope([net], name="grad_ops_%d" % self.thread_id):
var_refs = [v.ref() for v in net.get_vars()]
grads = tf.gradients(net.total_loss, var_refs, gate_gradients=False,
aggregation_method=None,
colocate_gradients_with_ops=False)
with tf.op_scope([], name="accum_ops_%d" % self.thread_id):
for (grad, var, accum_grad) in zip(grads, net.get_vars(), self.accum_grads):
name = var.name.replace(":", "_") + "_accum_grad_ops"
accum_ops = tf.assign_add(accum_grad, grad, name=name)
accum_grad_ops.append(accum_ops)
return tf.group(*accum_grad_ops, name="accum_group_%d" % self.thread_id)
def reset_accumulate_gradients(self):
net = self.local_net
reset_grad_ops = []
with tf.device("/gpu:%d" % flags.gpu):
with tf.op_scope([net], name="reset_grad_ops_%d" % self.thread_id):
for (var, accum_grad) in zip(net.get_vars(), self.accum_grads):
zero = tf.zeros(var.get_shape().as_list(), dtype=var.dtype)
name = var.name.replace(":", "_") + "_reset_grad_ops"
reset_ops = tf.assign(accum_grad, zero, name=name)
reset_grad_ops.append(reset_ops)
return tf.group(*reset_grad_ops, name="reset_accum_group_%d" % self.thread_id)
def weighted_choose_action(self, pi_probs):
r = random.uniform(0, sum(pi_probs))
upto = 0
for idx, prob in enumerate(pi_probs):
if upto + prob >= r:
return idx
upto += prob
return len(pi_probs) - 1
def forward_explore(self, train_step):
terminal = False
t_start = train_step
rollout_path = {"state": [], "action": [], "rewards": [], "done": []}
while not terminal and (train_step - t_start <= flags.t_max):
pi_probs = self.local_net.get_policy(self.master.sess, self.env.state)
action = self.weighted_choose_action(pi_probs)
_, reward, terminal = self.env.forward_action(action)
train_step += 1
rollout_path["state"].append(self.env.state)
one_hot_action = np.zeros(self.env.action_dim)
one_hot_action[action] = 1
rollout_path["action"].append(one_hot_action)
rollout_path["rewards"].append(reward)
rollout_path["done"].append(terminal)
return train_step, rollout_path
def discount(self, x):
return scipy.signal.lfilter([1], [1, -flags.gamma], x[::-1], axis=0)[::-1]
def run(self):
sess = self.master.sess
self.env.reset_env()
loop = 0
while flags.train_step <= flags.t_train:
train_step = 0
loop += 1
# reset gradients
sess.run(self.reset_accum_grads_ops)
# sync variables
sess.run(self.sync)
# forward explore
train_step, rollout_path = self.forward_explore(train_step)
# rollout for discounted R values
if rollout_path["done"][-1]:
rollout_path["rewards"][-1] = 0
self.env.reset_env()
if flags.use_lstm:
self.local_net.reset_lstm_state()
else:
rollout_path["rewards"][-1] = self.local_net.get_value(sess, rollout_path["state"][-1])
rollout_path["returns"] = self.discount(rollout_path["rewards"])
# accumulate gradients
lc_net = self.local_net
fetches = [self.do_accum_grads_ops, self.master.global_step]
if loop % 10 == 0:
fetches.append(self.summary_op)
res = sess.run(fetches, feed_dict={lc_net.state: rollout_path["state"],
lc_net.action: rollout_path["action"],
lc_net.target_q: rollout_path["returns"]})
if loop % 10 == 0:
global_step, summary_str = res[1], res[2]
self.master.summary_writer.add_summary(summary_str, global_step=global_step)
self.master.global_step_val = int(global_step)
# async update grads to global network
sess.run(self.apply_gradients)
flags.train_step += train_step
# evaluate
if loop % 10 == 0 and self.thread_id == 1:
self.test_phase()
if loop % 1000 and self.thread_id == 1:
save_model(self.master.sess, flags.train_dir, self.master.saver, "a3c_model",
global_step=self.master.global_step_val)
def test_phase(self, episode=10, max_step=1e3):
rewards = []
start_time = time.time()
while episode > 0:
terminal = False
self.env.reset_env()
episode_reward = 0
test_step = 0
while not terminal and test_step < max_step:
pi_probs = self.local_net.get_policy(self.master.sess, self.env.state)
action = self.weighted_choose_action(pi_probs)
_, reward, terminal = self.env.forward_action(action)
test_step += 1
episode_reward += reward
rewards.append(episode_reward)
episode -= 1
elapsed_time = int(time.time() - start_time)
avg_reward = float(np.mean(rewards))
mid_reward = float(np.median(rewards))
std_reward = float(np.std(rewards))
logger.info("game=%s, train_step=%d, episode=%d, reward(avg:%.2f, mid:%.2f, std:%.2f), time=%d(s)" % (
flags.game, flags.train_step, len(rewards), avg_reward, mid_reward, std_reward, elapsed_time))
class A3CAtari(object):
def __init__(self):
self.env = AtariEnv(gym.make(flags.game))
self.graph = tf.get_default_graph()
# shared network
if flags.use_lstm:
self.shared_net = A3CLSTMNet(self.env.state_shape, self.env.action_dim, scope="columns")
else:
self.shared_net = A3CNet(self.env.state_shape, self.env.action_dim, scope="columns")
# shared optimizer
self.shared_opt, self.global_step, self.summary_writer = self.shared_optimizer()
self.global_step_val = 0
# local training threads
self.jobs = []
for thread_id in xrange(flags.jobs):
job = A3CSingleThread(thread_id, self)
self.jobs.append(job)
# session
self.sess = tf.Session(config=tf.ConfigProto(log_device_placement=False,
allow_soft_placement=True))
self.sess.run(tf.initialize_all_variables())
# saver
self.saver = tf.train.Saver(var_list=self.shared_net.get_vars(), max_to_keep=3)
restore_model(self.sess, flags.train_dir, self.saver)
def shared_optimizer(self):
with tf.device("/gpu:%d" % flags.gpu):
# optimizer
if flags.opt == "rms":
optimizer = tf.train.RMSPropOptimizer(flags.learn_rate, name="global_optimizer")
elif flags.opt == "adam":
optimizer = tf.train.AdamOptimizer(flags.learn_rate, name="global_optimizer")
else:
logger.error("invalid optimizer", to_exit=True)
global_step = tf.get_variable("global_step", [], initializer=tf.constant_initializer(0), trainable=False)
summary_writer = tf.train.SummaryWriter(flags.train_dir, graph_def=self.graph)
return optimizer, global_step, summary_writer
def train(self):
flags.train_step = 0
signal.signal(signal.SIGINT, signal_handler)
for job in self.jobs:
job.start()
for job in self.jobs:
job.join()
def signal_handler():
sys.exit(0)
def main(_):
# mkdir
if not os.path.isdir(flags.train_dir):
os.makedirs(flags.train_dir)
# remove old tfevents files
for f in os.listdir(flags.train_dir):
if re.search(".*tfevents.*", f):
os.remove(os.path.join(flags.train_dir, f))
# model
model = A3CAtari()
model.train()
if __name__ == "__main__":
tf.app.run()
| [
"[email protected]"
]
| |
39f48dfeed3f3313c308862c8550119fc3bc1641 | fc43470de13ff8f03105efc2a3660a1ed6a1a553 | /BAEKJOON/2504_괄호의값.py | 4cb3ea2ff67361cd93e8308808eef08938034270 | []
| no_license | youseop/Problem_solutions | 5a05597f188b4ef8f7d8483b46bf05fbf2158d01 | 1fba638d9520bca4354bca01f194f80b159e26aa | refs/heads/master | 2023-06-24T05:12:45.060086 | 2021-07-24T14:22:33 | 2021-07-24T14:22:33 | 298,317,735 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 964 | py | import sys
read=sys.stdin.readline
bracket = read().strip()
stack = []
for i in bracket:
if i =='(' or i =='[': stack.append(i)
elif i == ']':
sum = 0
while stack:
tmp = stack.pop()
if type(tmp) == type(1):
sum+=tmp
elif tmp == '[':
if sum==0: stack.append(3)
else: stack.append(sum*3)
break
else:
print(0)
exit()
else:
sum = 0
while stack:
tmp = stack.pop()
if type(tmp) == type(1):
sum+=tmp
elif tmp == '(':
if sum==0: stack.append(2)
else: stack.append(sum*2)
break
else:
print(0)
exit()
br=['(',')','[',']']
if any(i in stack for i in br):
print(0)
else:
sum=0
for i in stack:
sum+=i
print(sum)
| [
"[email protected]"
]
| |
b11c66101b1e09ca12e76d8ce55d6ede96feff43 | 75f28905cc9d87d82be68a37a18beee8d6f21869 | /user_messages/urls.py | 732cdef2d4079ff7476434539d352d3c7f549367 | []
| no_license | Pavlenkovv/Cafe-heroku | f189bbd817a8736b43531bc5d73895fa436a8040 | e0e9b67247a3375e6f599b2dfcd77b1ccce9e5fb | refs/heads/main | 2023-02-27T14:48:24.549261 | 2021-01-29T12:55:19 | 2021-01-29T12:55:19 | 329,387,838 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 173 | py | from django.urls import path
from .views import *
urlpatterns = [
path ('', home, name='messages_info'),
path('update/<int:pk>/', update_messages, name='update')
] | [
"[email protected]"
]
| |
6689f6b14bb0bd93a13ac8db5478c886fab76e6a | 2372281d6e08dfc517c60d5a0cce678f15f904db | /experiments/output_perturbation/scikit-learn/examples/preprocessing/plot_discretization_strategies.py | 9ef211a83ccf307e6861b3c49fdf16fd08a4849d | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | amzn/differential-privacy-bayesian-optimization | b647b8281be0c2ea335104c9c92e717ce07ce63f | 07fec631d00bf3381ca24f7d73757aef6dfda9d3 | refs/heads/master | 2023-03-13T04:10:22.753152 | 2022-10-03T19:26:44 | 2022-10-03T19:26:44 | 252,813,133 | 27 | 19 | Apache-2.0 | 2022-10-03T19:26:45 | 2020-04-03T18:45:17 | Python | UTF-8 | Python | false | false | 3,052 | py | # -*- coding: utf-8 -*-
"""
==========================================================
Demonstrating the different strategies of KBinsDiscretizer
==========================================================
This example presents the different strategies implemented in KBinsDiscretizer:
- 'uniform': The discretization is uniform in each feature, which means that
the bin widths are constant in each dimension.
- quantile': The discretization is done on the quantiled values, which means
that each bin has approximately the same number of samples.
- 'kmeans': The discretization is based on the centroids of a KMeans clustering
procedure.
The plot shows the regions where the discretized encoding is constant.
"""
# Author: Tom Dupré la Tour
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.preprocessing import KBinsDiscretizer
from sklearn.datasets import make_blobs
print(__doc__)
strategies = ['uniform', 'quantile', 'kmeans']
n_samples = 200
centers_0 = np.array([[0, 0], [0, 5], [2, 4], [8, 8]])
centers_1 = np.array([[0, 0], [3, 1]])
# construct the datasets
random_state = 42
X_list = [
np.random.RandomState(random_state).uniform(-3, 3, size=(n_samples, 2)),
make_blobs(n_samples=[n_samples // 10, n_samples * 4 // 10,
n_samples // 10, n_samples * 4 // 10],
cluster_std=0.5, centers=centers_0,
random_state=random_state)[0],
make_blobs(n_samples=[n_samples // 5, n_samples * 4 // 5],
cluster_std=0.5, centers=centers_1,
random_state=random_state)[0],
]
figure = plt.figure(figsize=(14, 9))
i = 1
for ds_cnt, X in enumerate(X_list):
ax = plt.subplot(len(X_list), len(strategies) + 1, i)
ax.scatter(X[:, 0], X[:, 1], edgecolors='k')
if ds_cnt == 0:
ax.set_title("Input data", size=14)
xx, yy = np.meshgrid(
np.linspace(X[:, 0].min(), X[:, 0].max(), 300),
np.linspace(X[:, 1].min(), X[:, 1].max(), 300))
grid = np.c_[xx.ravel(), yy.ravel()]
ax.set_xlim(xx.min(), xx.max())
ax.set_ylim(yy.min(), yy.max())
ax.set_xticks(())
ax.set_yticks(())
i += 1
# transform the dataset with KBinsDiscretizer
for strategy in strategies:
enc = KBinsDiscretizer(n_bins=4, encode='ordinal', strategy=strategy)
enc.fit(X)
grid_encoded = enc.transform(grid)
ax = plt.subplot(len(X_list), len(strategies) + 1, i)
# horizontal stripes
horizontal = grid_encoded[:, 0].reshape(xx.shape)
ax.contourf(xx, yy, horizontal, alpha=.5)
# vertical stripes
vertical = grid_encoded[:, 1].reshape(xx.shape)
ax.contourf(xx, yy, vertical, alpha=.5)
ax.scatter(X[:, 0], X[:, 1], edgecolors='k')
ax.set_xlim(xx.min(), xx.max())
ax.set_ylim(yy.min(), yy.max())
ax.set_xticks(())
ax.set_yticks(())
if ds_cnt == 0:
ax.set_title("strategy='%s'" % (strategy, ), size=14)
i += 1
plt.tight_layout()
plt.show()
| [
"[email protected]"
]
| |
980895b0e1bce4169f2bdcb8aa270a1ae9dd834c | 24b2f3f5f49ed19cf7fd3dcd433d6b72806e08cf | /python/array/0054_Spiral_Matrix.py | 72264e152eccf0198675fc6229fbbc7746b9e527 | []
| no_license | lizzzcai/leetcode | 97089e4ca8c3c53b5a4a50de899591be415bac37 | 551cd3b4616c16a6562eb7c577ce671b419f0616 | refs/heads/master | 2021-06-23T05:59:56.928042 | 2020-12-07T03:07:58 | 2020-12-07T03:07:58 | 162,840,861 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,282 | py | """
18/09/2019
54. Spiral Matrix - Medium
Tag: Array
Given a matrix of m x n elements (m rows, n columns), return all elements of the matrix in spiral order.
Example 1:
Input:
[
[ 1, 2, 3 ],
[ 4, 5, 6 ],
[ 7, 8, 9 ]
]
Output: [1,2,3,6,9,8,7,4,5]
Example 2:
Input:
[
[1, 2, 3, 4],
[5, 6, 7, 8],
[9,10,11,12]
]
Output: [1,2,3,4,8,12,11,10,9,5,6,7]
"""
from typing import List
class Solution:
def spiralOrder(self, matrix: List[List[int]]) -> List[int]:
"""
Time: O(n*m)
Space: O(n*m)
"""
if not matrix:
return []
d = [(1,0), (0,1), (-1,0), (0,-1)]
m, n = len(matrix[0]), len(matrix)
record = [[False for _ in range(m)] for _ in range(n)]
print(f"m: {m}, n: {n}")
idx = 0
x, y = 0, 0
res = []
#record[0][0] = True
for _ in range(m*n):
print(f"x: {x}, y: {y}")
record[y][x] = True
res.append(matrix[y][x])
print(f"value: {matrix[y][x]}")
next_x, next_y = x + d[idx%4][0], y + d[idx%4][1]
print(f"next_x: {next_x}, next_y: {next_y}")
if (next_x < 0 or next_x > m-1) or (next_y < 0 or next_y > n-1) or record[next_y][next_x]:
idx += 1
print(f"idx: {idx}")
#if (0<= next_x <= m-1) and (0 <= next_y <= n-1) and not record[next_y][next_x]:
x, y = x + d[idx%4][0], y + d[idx%4][1]
return res
# Unit Test
import unittest
class spiralOrderCase(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_spiralOrder(self):
func = Solution().spiralOrder
self.assertEqual(func([
[ 1, 2, 3 ],
[ 4, 5, 6 ],
[ 7, 8, 9 ]
]), [1,2,3,6,9,8,7,4,5])
self.assertEqual(func([
[1, 2, 3, 4],
[5, 6, 7, 8],
[9,10,11,12]
]), [1,2,3,4,8,12,11,10,9,5,6,7])
self.assertEqual(func([
]), [])
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
e291db95fbe82bddb3fcaf8c6ce1776e9741bdfa | 6bbcf512837bdcdfec3091b3337d54a8c455c7b9 | /practice/Operators.py | af03cb45e6fa25e1e62edaac81ae702144e24ec8 | []
| no_license | sachinlokesh05/Python-Core-Programs | a342ebdc7be070b66254e505df044fdaf03f147f | 8eec5595b51203d559e1d6f0e40646e63ad3645a | refs/heads/master | 2022-04-28T02:32:07.101993 | 2020-04-09T12:03:01 | 2020-04-09T12:03:01 | 249,491,090 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 751 | py | class Operators:
a=None
b=None
c=None
def __init__(self, a,b,c):
self.__aa=a
self.__bb=b
self.__cc=c
def operation(self):
choice=int(input("enter choice of your: "))
if choice == 1:
return (self.__aa + self.__bb * self.__cc)
elif choice == 2:
return (self.__cc + self.__aa / self.__cc)
elif choice == 3:
return (self.__aa % self.__bb + self.__cc)
elif choice == 4 :
return (self.__aa * self.__bb + self.__cc)
else:
print("Wrong choice")
return self.operation()
ap=Operators(1,2,5)
print('''
1. a + b * c
3. c + a / b
2. a % b + c
4. a * b + c
''')
print(ap.operation())
| [
"[email protected]"
]
| |
2332e49aafbb6a86f7089ed417df63d5469e95b3 | c16ea32a4cddb6b63ad3bacce3c6db0259d2bacd | /google/ads/googleads/v5/googleads-py/google/ads/googleads/v5/services/services/invoice_service/transports/base.py | 0fb63b0af23dd6693ac5a32d3f79a22b0b3379ff | [
"Apache-2.0"
]
| permissive | dizcology/googleapis-gen | 74a72b655fba2565233e5a289cfaea6dc7b91e1a | 478f36572d7bcf1dc66038d0e76b9b3fa2abae63 | refs/heads/master | 2023-06-04T15:51:18.380826 | 2021-06-16T20:42:38 | 2021-06-16T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,628 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import typing
import pkg_resources
import google.auth # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.ads.googleads.v5.services.types import invoice_service
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
'google-ads-googleads',
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
class InvoiceServiceTransport(metaclass=abc.ABCMeta):
"""Abstract transport class for InvoiceService."""
AUTH_SCOPES = (
'https://www.googleapis.com/auth/adwords',
)
def __init__(
self, *,
host: str = 'googleads.googleapis.com',
credentials: ga_credentials.Credentials = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
"""
# Save the hostname. Default to port 443 (HTTPS) if none is specified.
if ':' not in host:
host += ':443'
self._host = host
# If no credentials are provided, then determine the appropriate
# defaults.
if credentials is None:
credentials, _ = google.auth.default(scopes=self.AUTH_SCOPES)
# Save the credentials.
self._credentials = credentials
# Lifted into its own function so it can be stubbed out during tests.
self._prep_wrapped_messages(client_info)
def _prep_wrapped_messages(self, client_info):
# Precomputed wrapped methods
self._wrapped_methods = {
self.list_invoices: gapic_v1.method.wrap_method(
self.list_invoices,
default_timeout=None,
client_info=client_info,
),
}
@property
def list_invoices(self) -> typing.Callable[
[invoice_service.ListInvoicesRequest],
invoice_service.ListInvoicesResponse]:
raise NotImplementedError
__all__ = (
'InvoiceServiceTransport',
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
]
| bazel-bot-development[bot]@users.noreply.github.com |
55096585b424ea3637fe9d43bddf009256acc018 | 8890925319a25dc3df29f53d0d8125d347680f68 | /looker_client_31/looker_sdk/oidc_user_attribute_write.py | 919915817144e74ca57abbc588f370b45226b734 | [
"MIT"
]
| permissive | ContrastingSounds/looker_sdk_31 | f5d300ae54aee1cc5a2621b36b49541db24ed248 | f973434049fff1b605b10086ab8b84f2f62e3489 | refs/heads/master | 2020-03-19T20:31:24.785373 | 2018-06-11T09:41:36 | 2018-06-11T09:41:36 | 136,802,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,870 | py | # coding: utf-8
"""
Experimental Looker API 3.1 Preview
This API 3.1 is in active development. Breaking changes are likely to occur to some API functions in future Looker releases until API 3.1 is officially launched and upgraded to beta status. If you have time and interest to experiment with new or modified services exposed in this embryonic API 3.1, we welcome your participation and feedback! For large development efforts or critical line-of-business projects, we strongly recommend you stick with the API 3.0 while API 3.1 is under construction. # noqa: E501
OpenAPI spec version: 3.1.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class OIDCUserAttributeWrite(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'name': 'str',
'required': 'bool',
'user_attribute_ids': 'list[int]',
'url': 'str',
'can': 'dict(str, bool)'
}
attribute_map = {
'name': 'name',
'required': 'required',
'user_attribute_ids': 'user_attribute_ids',
'url': 'url',
'can': 'can'
}
def __init__(self, name=None, required=None, user_attribute_ids=None, url=None, can=None): # noqa: E501
"""OIDCUserAttributeWrite - a model defined in Swagger""" # noqa: E501
self._name = None
self._required = None
self._user_attribute_ids = None
self._url = None
self._can = None
self.discriminator = None
if name is not None:
self.name = name
if required is not None:
self.required = required
if user_attribute_ids is not None:
self.user_attribute_ids = user_attribute_ids
if url is not None:
self.url = url
if can is not None:
self.can = can
@property
def name(self):
"""Gets the name of this OIDCUserAttributeWrite. # noqa: E501
Name of User Attribute in OIDC # noqa: E501
:return: The name of this OIDCUserAttributeWrite. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this OIDCUserAttributeWrite.
Name of User Attribute in OIDC # noqa: E501
:param name: The name of this OIDCUserAttributeWrite. # noqa: E501
:type: str
"""
self._name = name
@property
def required(self):
"""Gets the required of this OIDCUserAttributeWrite. # noqa: E501
Required to be in OIDC assertion for login to be allowed to succeed # noqa: E501
:return: The required of this OIDCUserAttributeWrite. # noqa: E501
:rtype: bool
"""
return self._required
@required.setter
def required(self, required):
"""Sets the required of this OIDCUserAttributeWrite.
Required to be in OIDC assertion for login to be allowed to succeed # noqa: E501
:param required: The required of this OIDCUserAttributeWrite. # noqa: E501
:type: bool
"""
self._required = required
@property
def user_attribute_ids(self):
"""Gets the user_attribute_ids of this OIDCUserAttributeWrite. # noqa: E501
Looker User Attribute Ids # noqa: E501
:return: The user_attribute_ids of this OIDCUserAttributeWrite. # noqa: E501
:rtype: list[int]
"""
return self._user_attribute_ids
@user_attribute_ids.setter
def user_attribute_ids(self, user_attribute_ids):
"""Sets the user_attribute_ids of this OIDCUserAttributeWrite.
Looker User Attribute Ids # noqa: E501
:param user_attribute_ids: The user_attribute_ids of this OIDCUserAttributeWrite. # noqa: E501
:type: list[int]
"""
self._user_attribute_ids = user_attribute_ids
@property
def url(self):
"""Gets the url of this OIDCUserAttributeWrite. # noqa: E501
Link to oidc config # noqa: E501
:return: The url of this OIDCUserAttributeWrite. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this OIDCUserAttributeWrite.
Link to oidc config # noqa: E501
:param url: The url of this OIDCUserAttributeWrite. # noqa: E501
:type: str
"""
self._url = url
@property
def can(self):
"""Gets the can of this OIDCUserAttributeWrite. # noqa: E501
Operations the current user is able to perform on this object # noqa: E501
:return: The can of this OIDCUserAttributeWrite. # noqa: E501
:rtype: dict(str, bool)
"""
return self._can
@can.setter
def can(self, can):
"""Sets the can of this OIDCUserAttributeWrite.
Operations the current user is able to perform on this object # noqa: E501
:param can: The can of this OIDCUserAttributeWrite. # noqa: E501
:type: dict(str, bool)
"""
self._can = can
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, OIDCUserAttributeWrite):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
939acab1fd12f3f792ee8ed5327b4a92cbb34516 | 49c2e3ebf7f5d2f79af6e26c44b4d07ec14a20d5 | /Hello World/venv/Lib/site-packages/pip/_vendor/progress/__init__.py | ffcb82afcfdd11034008e84488dfde21c6399cc7 | []
| no_license | TaylorHoll/Python_Projects | a0d86642463bdc5b3ea67dae0146c115185c1db2 | a8285b058ed0b4e0a366753d61526056dab23cd3 | refs/heads/master | 2020-06-13T09:04:29.666639 | 2020-01-07T03:40:25 | 2020-01-07T03:40:25 | 194,608,692 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,859 | py | # Copyright (c) 2012 Giorgos Verigakis <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import division, print_function
from datetime import timedelta
from math import ceil
from sys import stderr
from collections import deque
try:
from time import monotonic
except ImportError:
from time import time as monotonic
__version__ = '1.5'
HIDE_CURSOR = '\x1b[?25l'
SHOW_CURSOR = '\x1b[?25h'
class Infinite(object):
file = stderr
sma_window = 10 # Simple Moving Average window
check_tty = True
hide_cursor = True
def __init__(self, message='', **kwargs):
self.index = 0
self.start_ts = monotonic()
self.avg = 0
self._avg_update_ts = self.start_ts
self._ts = self.start_ts
self._xput = deque(maxlen=self.sma_window)
for key, val in kwargs.items():
setattr(self, key, val)
self._width = 0
self.message = message
if self.file and self.is_tty():
if self.hide_cursor:
print(HIDE_CURSOR, end='', file=self.file)
print(self.message, end='', file=self.file)
self.file.flush()
def __getitem__(self, key):
if key.startswith('_'):
return None
return getattr(self, key, None)
@property
def elapsed(self):
return int(monotonic() - self.start_ts)
@property
def elapsed_td(self):
return timedelta(seconds=self.elapsed)
def update_avg(self, n, dt):
if n > 0:
xput_len = len(self._xput)
self._xput.append(dt / n)
now = monotonic()
# update when we're still filling _xput, then after every second
if (xput_len < self.sma_window or
now - self._avg_update_ts > 1):
self.avg = sum(self._xput) / len(self._xput)
self._avg_update_ts = now
def update(self):
pass
def start(self):
pass
def clearln(self):
if self.file and self.is_tty():
print('\r\x1b[K', end='', file=self.file)
def write(self, s):
if self.file and self.is_tty():
line = self.message + s.ljust(self._width)
print('\r' + line, end='', file=self.file)
self._width = max(self._width, len(s))
self.file.flush()
def writeln(self, line):
if self.file and self.is_tty():
self.clearln()
print(line, end='', file=self.file)
self.file.flush()
def finish(self):
if self.file and self.is_tty():
print(file=self.file)
if self.hide_cursor:
print(SHOW_CURSOR, end='', file=self.file)
def is_tty(self):
return self.file.isatty() if self.check_tty else True
def next(self, n=1):
now = monotonic()
dt = now - self._ts
self.update_avg(n, dt)
self._ts = now
self.index = self.index + n
self.update()
def iter(self, it):
with self:
for x in it:
yield x
self.next()
def __enter__(self):
self.start()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.finish()
class Progress(Infinite):
def __init__(self, *args, **kwargs):
super(Progress, self).__init__(*args, **kwargs)
self.max = kwargs.get('max', 100)
@property
def eta(self):
return int(ceil(self.avg * self.remaining))
@property
def eta_td(self):
return timedelta(seconds=self.eta)
@property
def percent(self):
return self.progress * 100
@property
def progress(self):
return min(1, self.index / self.max)
@property
def remaining(self):
return max(self.max - self.index, 0)
def start(self):
self.update()
def goto(self, index):
incr = index - self.index
self.next(incr)
def iter(self, it):
try:
self.max = len(it)
except TypeError:
pass
with self:
for x in it:
yield x
self.next()
| [
"[email protected]"
]
| |
af4646c0c8d791591845ff4194705e2db2ffaf58 | 08dfaf714830a6310742dcd50848790d595e838e | /中级班/chapter03/code_06_rotateMatrix.py | 77d7da7638bb7ab3952d12a1d9398029625a3035 | []
| no_license | Tokyo113/leetcode_python | d9e0fb96a76efaadcec7aad08f5ef542d898d434 | e86b3fb26aef1cf63727e3e5c9fd4ddc9bedb7f1 | refs/heads/master | 2020-08-10T15:36:10.364714 | 2020-04-13T08:28:53 | 2020-04-13T08:28:53 | 214,369,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,144 | py | #coding:utf-8
'''
@Time: 2020/2/13 16:58
@author: Tokyo
@file: code_06_rotateMatrix.py
@desc:
给定一个正方形矩阵,只用有限几个变量,实现矩阵中每个位置的数顺时针转动
90度,比如如下的矩阵
0 1 2 3
4 5 6 7
8 9 10 11
12 13 14 15
矩阵应该被调整为:
12 8 4 0
13 9 5 1
14 10 6 2
15 11 7 3
'''
def rotateMatrix(arr):
n = len(arr)
m = len(arr[0])
lr,lc = 0, 0
rr, rc = n-1, m-1
while lr < rr:
rotateEdge(arr, lr,lc,rr,rc)
lr += 1
lc += 1
rr -= 1
rc -= 1
def rotateEdge(arr, lr,lc, rr,rc):
for i in range(rc-lc):
pp = arr[lr][lc+i]
arr[lr][lc+i] = arr[rr-i][lc]
arr[rr-i][lc] = arr[rr][rc-i]
arr[rr][rc-i] = arr[lr+i][rc]
arr[lr+i][rc] = pp
return
def printMatrix(arr):
for i in range(len(arr)):
for j in range(len(arr[0])):
print(arr[i][j], end="\t")
print("")
if __name__ == '__main__':
a = [[1, 2, 3, 4], [5, 6, 7, 8], [9, 10, 11, 12], [13,14,15,16]]
printMatrix(a)
print("=================")
rotateMatrix(a)
printMatrix(a)
| [
"[email protected]"
]
| |
c0005faed0405a4c138d03792873eedbc657de80 | 062e43d41c6daa6943bfad8f4510d19e43840f96 | /src/yaml/xml.py | b2e06652a02cedabe4dfa5a2964201b3e9a8934b | []
| no_license | allefant/land | b61d60106d8224e01a34d7504e1310b5fb0bd373 | f78f0da9c57884be8819f022c6e6442f90d4434c | refs/heads/main | 2023-06-22T06:15:09.938629 | 2023-06-20T20:08:13 | 2023-06-20T20:08:13 | 47,664,545 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,850 | py | import land.land
import land.yaml
import global ctype
static enum XmlState:
Outside
ElementName
Attributes
AttributeName
AttributeStart
AttributeValue
static class XmlParser:
XmlState state
bool closing
LandBuffer *value
LandYaml *yaml
static def scalar(XmlParser *x):
land_buffer_add_char(x.value, 0)
land_yaml_add_scalar(x.yaml, land_strdup(x.value.buffer))
land_buffer_clear(x.value)
static def opt_scalar(XmlParser *x):
if x.value.n:
scalar(x)
static def discard_scalar(XmlParser *x):
land_buffer_clear(x.value)
#
# <a x="2">b<c>d</c>e<f y="3"/></a>
#
# [{"<":"a", "x":"2", ">":["b", {"<":"c", ">":["d"]}, "e", {"<":f", "y":"3"}]}]
#
#
def land_yaml_load_xml(str filename) -> LandYaml *:
LandFile *f = land_file_new(filename, "rb")
if not f:
land_log_message("Failed opening %s\n", filename)
return None
land_log_message("Parsing yaml %s\n", filename)
XmlParser x_
XmlParser *x = &x_
x.yaml = land_yaml_new(filename)
x.value = land_buffer_new()
x.state = Outside
x.closing = False
land_yaml_add_sequence(x.yaml) # root list of elements
while True:
int c = land_file_getc(f)
if c < 0:
break
if x.state == Outside:
if c == '<':
opt_scalar(x)
x.state = ElementName
continue
elif x.state == ElementName:
if c == '/':
x.closing = True
continue
elif c == '>':
if x.closing:
discard_scalar(x)
close_tag(x)
land_yaml_done(x.yaml) # content
else:
create_tag(x)
open_tag(x) # no attributes
continue
elif isspace(c):
create_tag(x)
x.state = Attributes
continue
elif x.state == Attributes:
if isspace(c):
continue
elif c == '/':
x.closing = True
continue
elif c == '?': # to deal with the XML header
x.closing = True
continue
elif c == '>':
if x.closing:
close_tag(x)
else:
open_tag(x)
continue
elif c == '=':
scalar(x)
x.state = AttributeStart
continue
elif x.state == AttributeStart:
if c == '"':
x.state = AttributeValue
continue
elif x.state == AttributeValue:
if c == '"':
x.state = Attributes
scalar(x)
continue
add_char(x, c)
land_yaml_done(x.yaml) # root list of elements
land_file_destroy(f)
land_buffer_destroy(x.value)
return x.yaml
static def add_char(XmlParser *x, char c):
land_buffer_add_char(x.value, c)
static def create_tag(XmlParser *x):
land_yaml_add_mapping(x.yaml) # tag mapping
land_yaml_add_scalar(x.yaml, land_strdup("<"))
scalar(x)
static def open_tag(XmlParser *x):
x.state = Outside
land_yaml_add_scalar(x.yaml, land_strdup(">"))
land_yaml_add_sequence(x.yaml) # content
static def close_tag(XmlParser *x):
land_yaml_done(x.yaml) # tag mapping
x.state = Outside
x.closing = False
# saving XML
static def xml_write(YamlParser *p, char const *s, bool can_break_before):
int n = strlen(s)
if can_break_before and p.line_length + n > 80:
land_file_write(p.file, "\n", 1)
p.line_length = 0
land_file_write(p.file, s, n)
int i = land_find(s, "\n")
if i >= 0:
p.line_length = n - 1 - i
else:
p.line_length += n
static def xml_save_mapping(LandYamlEntry *e, YamlParser *p) -> bool:
str name = land_yaml_get_entry_scalar(e, "<")
if not name: return False
xml_write(p, "<", False)
xml_write(p, name, False)
for char const *key in LandArray *e.sequence:
if land_equals(key, "<") or land_equals(key, ">"): continue
xml_write(p, " ", False)
xml_write(p, key, True)
xml_write(p, "=\"", False)
str value = land_yaml_get_entry_scalar(e, key)
xml_write(p, value, False)
xml_write(p, "\"", False)
LandYamlEntry *contents = land_yaml_get_entry(e, ">")
if contents:
xml_write(p, ">", True)
xml_save_sequence(contents, p)
xml_write(p, "</", False)
xml_write(p, name, False)
xml_write(p, ">", True)
else:
xml_write(p, " />", True)
return True
static def xml_save_sequence(LandYamlEntry *e, YamlParser *p) -> bool:
for LandYamlEntry *e2 in LandArray *e.sequence:
xml_save_entry(e2, p)
return True
static def xml_save_scalar(LandYamlEntry *e, YamlParser *p) -> bool:
xml_write(p, e.scalar, False)
return True
static def xml_save_entry(LandYamlEntry *e, YamlParser *p) -> bool:
if e.type == YamlMapping:
return xml_save_mapping(e, p)
elif e.type == YamlSequence:
return xml_save_sequence(e, p)
elif e.type == YamlScalar:
return xml_save_scalar(e, p)
return false
def land_yaml_save_xml(LandYaml *yaml):
LandFile *f = land_file_new(yaml.filename, "wb")
if not f:
goto error
YamlParser p
memset(&p, 0, sizeof p)
p.file = f
if not xml_save_entry(yaml.root, &p): goto error
label error
if f: land_file_destroy(f)
def _xml(LandYaml *yaml):
if not yaml.root or not yaml.parent:
land_yaml_add_sequence(yaml)
elif yaml.parent->type == YamlMapping:
land_yaml_add_scalar(yaml, ">")
land_yaml_add_sequence(yaml)
def land_yaml_xml_tag(LandYaml *yaml, str name):
_xml(yaml)
land_yaml_add_mapping(yaml)
land_yaml_add_scalar(yaml, "<")
land_yaml_add_scalar(yaml, name)
def land_yaml_xml_tag_with_content(LandYaml *yaml, str name, str content):
land_yaml_xml_tag(yaml, name)
land_yaml_xml_content(yaml, content)
land_yaml_xml_end(yaml)
def land_yaml_xml_content(LandYaml *yaml, str content):
_xml(yaml)
land_yaml_add_scalar(yaml, content)
def land_yaml_xml_attribute(LandYaml *yaml, str key, value):
land_yaml_add_scalar(yaml, key)
land_yaml_add_scalar(yaml, value)
def land_yaml_xml_end(LandYaml *yaml):
land_yaml_done(yaml)
# If we close a tag, we close the mapping, so additional children
# can be added. When we close the parent, we just closed the
# sequence, but we also need to close the mapping. Basically we
# always need to be in a sequence after this function returns.
if yaml.parent and yaml.parent->type == YamlMapping:
land_yaml_done(yaml)
| [
"[email protected]"
]
| |
f0d1317a953a4569a174b0fc00a48f7a62f38d1b | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-cbs/huaweicloudsdkcbs/v1/model/collect_key_words_request.py | f6637441f094ae9a0789038a9ec0137977bbd716 | [
"Apache-2.0"
]
| permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 5,869 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class CollectKeyWordsRequest:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'qabot_id': 'str',
'start_time': 'str',
'end_time': 'str',
'top': 'int'
}
attribute_map = {
'qabot_id': 'qabot_id',
'start_time': 'start_time',
'end_time': 'end_time',
'top': 'top'
}
def __init__(self, qabot_id=None, start_time=None, end_time=None, top=None):
"""CollectKeyWordsRequest
The model defined in huaweicloud sdk
:param qabot_id: qabot编号,UUID格式。
:type qabot_id: str
:param start_time: 查询的起始时间,long,UTC时间,默认值为0。
:type start_time: str
:param end_time: 查询的结束时间,long,UTC时间,默认值为当前时间的毫秒数。
:type end_time: str
:param top: 关键词最多显示的个数,默认值为10,取值范围0-50。
:type top: int
"""
self._qabot_id = None
self._start_time = None
self._end_time = None
self._top = None
self.discriminator = None
self.qabot_id = qabot_id
if start_time is not None:
self.start_time = start_time
if end_time is not None:
self.end_time = end_time
if top is not None:
self.top = top
@property
def qabot_id(self):
"""Gets the qabot_id of this CollectKeyWordsRequest.
qabot编号,UUID格式。
:return: The qabot_id of this CollectKeyWordsRequest.
:rtype: str
"""
return self._qabot_id
@qabot_id.setter
def qabot_id(self, qabot_id):
"""Sets the qabot_id of this CollectKeyWordsRequest.
qabot编号,UUID格式。
:param qabot_id: The qabot_id of this CollectKeyWordsRequest.
:type qabot_id: str
"""
self._qabot_id = qabot_id
@property
def start_time(self):
"""Gets the start_time of this CollectKeyWordsRequest.
查询的起始时间,long,UTC时间,默认值为0。
:return: The start_time of this CollectKeyWordsRequest.
:rtype: str
"""
return self._start_time
@start_time.setter
def start_time(self, start_time):
"""Sets the start_time of this CollectKeyWordsRequest.
查询的起始时间,long,UTC时间,默认值为0。
:param start_time: The start_time of this CollectKeyWordsRequest.
:type start_time: str
"""
self._start_time = start_time
@property
def end_time(self):
"""Gets the end_time of this CollectKeyWordsRequest.
查询的结束时间,long,UTC时间,默认值为当前时间的毫秒数。
:return: The end_time of this CollectKeyWordsRequest.
:rtype: str
"""
return self._end_time
@end_time.setter
def end_time(self, end_time):
"""Sets the end_time of this CollectKeyWordsRequest.
查询的结束时间,long,UTC时间,默认值为当前时间的毫秒数。
:param end_time: The end_time of this CollectKeyWordsRequest.
:type end_time: str
"""
self._end_time = end_time
@property
def top(self):
"""Gets the top of this CollectKeyWordsRequest.
关键词最多显示的个数,默认值为10,取值范围0-50。
:return: The top of this CollectKeyWordsRequest.
:rtype: int
"""
return self._top
@top.setter
def top(self, top):
"""Sets the top of this CollectKeyWordsRequest.
关键词最多显示的个数,默认值为10,取值范围0-50。
:param top: The top of this CollectKeyWordsRequest.
:type top: int
"""
self._top = top
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, CollectKeyWordsRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
648b0989580d47d21becb42dd15835780632c9e9 | 7af9841dfdeb7192cee9f5bc5ae24ebabeeebdcc | /article/admin.py | a36223f706dc91c9321ddeaf13c4ce75ccbd793c | []
| no_license | dimansion/bepy | 513d1d6b8c6f679ce97f46741b50b73dabf20484 | dd92999b9fb0d65e9479372718409785a8d26d26 | refs/heads/master | 2020-06-28T11:27:02.204255 | 2016-11-14T11:26:32 | 2016-11-14T11:26:32 | 67,694,755 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | from django.contrib import admin
# Register your models here.
from .models import Article
class ArticleModelAdmin(admin.ModelAdmin):
list_display = ["title", "updated", "timestamp"]
list_display_links = ["updated"]
list_editable = ["title"]
list_filter = ["updated", "timestamp"]
search_fields = ["title", "content"]
class Meta:
model = Article
admin.site.register(Article, ArticleModelAdmin) | [
"[email protected]"
]
| |
bc07ca5cbd963e7bdc3369aae466a3c181a9c7bb | 50de54517ef5e157b43598e412c477fd66890a3e | /Assignment 04/Problem 12.py | 324d03a9fc0e2eb71aefe5e681cb03cda366f9f8 | []
| no_license | Shihabsarker93/BRACU-CSE111 | f530be247bebaaee9cc5e85948dc070adae0c6ae | 17c95c76f84abffe9d9bdcb5861fbacbc510b5a6 | refs/heads/main | 2023-08-13T15:33:57.331850 | 2021-10-07T10:56:09 | 2021-10-07T10:56:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,226 | py | class ParcelKoro:
def __init__(self, name=None, product_weight=None) -> None:
self.name = name
self.product_weight = product_weight
if self.product_weight == None:
self.product_weight = 0
def calculateFee(self, location=None):
self.location = location
if location == None:
location_charge = 50
else:
location_charge = 100
if self.product_weight == 0:
self.total_fee = 0
else:
self.total_fee = (self.product_weight * 20) + location_charge
def printDetails(self):
if self.name == None:
self.name = "No name set"
print(f"Customer Name: {self.name}")
print(f"Product Weight: {self.product_weight}")
print(f"Total fee: {self.total_fee}")
print("**********************")
p1 = ParcelKoro()
p1.calculateFee()
p1.printDetails()
print("**********************")
p2 = ParcelKoro("Bob The Builder")
p2.calculateFee()
p2.printDetails()
print("----------------------------")
p2.product_weight = 15
p2.calculateFee()
p2.printDetails()
print("**********************")
p3 = ParcelKoro("Dora The Explorer", 10)
p3.calculateFee("Dhanmondi")
p3.printDetails()
| [
"[email protected]"
]
| |
bdb495a4aaf1752cb932eda410fe95cca71f3510 | b0d5e423f09181a322a0166b06bf7fe45a3befc0 | /MetioTube/profiles/forms.py | b17499317bd2ae783e5fe56a9baa71d145f4935f | [
"MIT"
]
| permissive | Sheko1/MetioTube | f5da4184bb1590565ba34cef2fff02b379ab3e56 | c1c36d00ea46fc37cc7f3c0c9c0cae6e89b2113c | refs/heads/main | 2023-07-04T12:54:57.500778 | 2021-08-14T19:41:56 | 2021-08-14T19:41:56 | 383,907,948 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 375 | py | from django import forms
from MetioTube.profiles.models import Profile
class ProfileForm(forms.ModelForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.fields['profile_picture'].widget.attrs['accept'] = 'image/jpg, image/png, image/jpeg'
class Meta:
model = Profile
exclude = ('user', 'subscribers')
| [
"[email protected]"
]
| |
61087362a9a48013cc2326b5551e5baebf27170f | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_27043.py | 3aaaa48d0d67afac68945c2289c5a5b2445efbf4 | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 101 | py | # How to replace () and , from the fatched data of mysql database
path,=cursor.fetchone()
print path
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.