blob_id
stringlengths
40
40
directory_id
stringlengths
40
40
path
stringlengths
3
616
content_id
stringlengths
40
40
detected_licenses
sequencelengths
0
112
license_type
stringclasses
2 values
repo_name
stringlengths
5
115
snapshot_id
stringlengths
40
40
revision_id
stringlengths
40
40
branch_name
stringclasses
777 values
visit_date
timestamp[us]date
2015-08-06 10:31:46
2023-09-06 10:44:38
revision_date
timestamp[us]date
1970-01-01 02:38:32
2037-05-03 13:00:00
committer_date
timestamp[us]date
1970-01-01 02:38:32
2023-09-06 01:08:06
github_id
int64
4.92k
681M
star_events_count
int64
0
209k
fork_events_count
int64
0
110k
gha_license_id
stringclasses
22 values
gha_event_created_at
timestamp[us]date
2012-06-04 01:52:49
2023-09-14 21:59:50
gha_created_at
timestamp[us]date
2008-05-22 07:58:19
2023-08-21 12:35:19
gha_language
stringclasses
149 values
src_encoding
stringclasses
26 values
language
stringclasses
1 value
is_vendor
bool
2 classes
is_generated
bool
2 classes
length_bytes
int64
3
10.2M
extension
stringclasses
188 values
content
stringlengths
3
10.2M
authors
sequencelengths
1
1
author_id
stringlengths
1
132
72ff8c3550a9d80982b6ab69f568c80f8e65b8c8
df8c19c952f02e3527c0e06c1d74ed578ca0684b
/account/tasks.py
73d9501a1106ab6067bb90300b4c9dafed75e5bc
[]
no_license
knkemree/msrugs_backend
ac8c703fb691c9a796028cb2c38abaf8ed98c98c
850026b2884e7618404ecfa030beccec93bb4596
refs/heads/master
2023-04-03T04:31:13.557762
2021-04-05T17:26:34
2021-04-05T17:26:34
354,910,411
0
0
null
null
null
null
UTF-8
Python
false
false
494
py
from celery import shared_task from django.core.mail import send_mail, mail_admins from django.template import loader from django.conf import settings from celery.utils.log import get_task_logger logger = get_task_logger(__name__) @shared_task def info_admins(email): subject = "New Customer Registered" message= "New customer" mail_sent = mail_admins(subject, message, html_message="A new customer registered! Customer's email address is "+str(email)) return mail_sent
910afd2efa42ada5dd22923f2a6c53a466fb5a1f
c577d0d804ef826131a8a959ed10879d3fdfef2a
/profiles/migrations/0008_auto_20190206_2142.py
88de9108773cc666cfb035325fb04299faba7c2f
[]
no_license
AlexFrundin/four
3d561e2f33e007e93b975b247752c06b392b1945
c937e0dc159f380abbcb5e646ebdf2a7a8fd6be0
refs/heads/master
2020-04-30T10:55:31.020357
2019-03-20T18:06:26
2019-03-20T18:06:26
176,788,979
0
0
null
null
null
null
UTF-8
Python
false
false
395
py
# Generated by Django 2.1.3 on 2019-02-06 19:42 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('profiles', '0007_auto_20190206_1917'), ] operations = [ migrations.AlterField( model_name='profile', name='birth', field=models.DateField(blank=True, null=True), ), ]
c344e931dbc0bea19cfd8ade72f0e23e73c9944c
6bf7149077f539ab599db1f717c93aca82724512
/workshop/fig_games/project/game.py
28aac4f8ed638b7cc05a55b9da86ae835c56af25
[]
no_license
KalinHar/OOP-Python-SoftUni
8b53e8b734b364878c5372525c4249fdd32f0899
9787eea7ab5101e887ed4aaeb0a8b3b80efcfdd7
refs/heads/master
2023-07-09T08:15:59.765422
2021-08-16T06:01:08
2021-08-16T06:01:19
380,813,294
0
1
null
null
null
null
UTF-8
Python
false
false
2,577
py
from workshop.fig_games.project.suitcase import Suitcase from workshop.fig_games.project.battle.area_battle import AreaBattle from workshop.fig_games.project.battle.circumfernce_battle import CircumferenceBattle from workshop.fig_games.project.battle.relative_battle import RelativeBattle from workshop.fig_games.project.figure.triangle import Triangle from workshop.fig_games.project.figure.circle import Circle from workshop.fig_games.project.figure.square import Square from workshop.fig_games.project.figure.rectangle import Rectangle class Game: def __init__(self): self.figures = Suitcase() def area_battle(self, fig1, fig2): result = AreaBattle().battle(fig1, fig2) if result: return result.name return None def circumference_battle(self, fig1, fig2): result = CircumferenceBattle().battle(fig1, fig2) if result: return result.name return None def relative_battle(self, fig1, fig2): result = RelativeBattle().battle(fig1, fig2) if result: return result.name return None def total_battle(self): while len(self.figures.repository) > 1: fig1 = self.figures.repository.pop() # take first two figs fig2 = self.figures.repository.pop() # take first two figs result = [self.area_battle(fig1, fig2)] result.append(self.circumference_battle(fig1, fig2)) result = [fig for fig in result if fig] # list with only wins results result = set(result) result = list(result) if len(result) == 1: # check for winner self.figures.add([f for f in [fig1, fig2] if f.name == result[0]][0]) # return the winner back continue result = self.relative_battle(fig1, fig2) self.figures.add([f for f in (fig1, fig2) if f.name == result][0]) # return the winner back return self.figures.repository[0] def __str__(self): return f"The winner is:\n{str(self.total_battle())}" tri1 = Triangle("tri1", 9, 4, 6.5, 4.5) tri2 = Triangle("tri2", 5, 2.4, 3, 4) cir1 = Circle("cir1", 3) rec1 = Rectangle("rec1", 1, 7) squ1 = Square("squ1", 6) g = Game() print(g.figures.add(tri1)) print(g.figures.add(tri2)) print(g.figures.add(cir1)) print(g.figures.add(rec1)) print(g.figures.add(squ1)) print(g.area_battle(cir1, tri1)) print(g.circumference_battle(cir1, tri1)) print(g.relative_battle(cir1, tri1)) print(g.figures.remove("squ1")) print(g.figures) print("-------------") print(g)
c150cb2b3d525912e69339d0c70b0420d9e05cc5
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02577/s522201630.py
dee13346a32585c238531954da820bc0241080a6
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
88
py
x = input() if sum([int(i) for i in x]) % 9 == 0: print("Yes") else: print("No")
802695a099e500fec46d761a6bc06d2bd5ff349a
4ba4ae24e734686f28fe395390a2d0210eb88458
/programas/galeria/orbita de .5 sobre la logística/orbita_0.5_logistica.py
c2fdeeaff5bbbd9bcf411db25878759de40fc3e1
[]
no_license
fhca/Sistemas-Complejos-1
a7578ae981c694ae2500a847adfe79c6c58c69ae
a2b70adf3859dd6a7318da98228adef78698f9c5
refs/heads/master
2021-01-15T19:59:49.778497
2017-12-16T03:34:51
2017-12-16T03:34:51
99,835,884
0
1
null
null
null
null
UTF-8
Python
false
false
690
py
__author__ = 'fhca' import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import axes3d from matplotlib import cm def logistic(c, x): return c * x * (1 - x) def evaluate(n, x0, c, f): res = x0 for _ in range(n): res = f(c, res) return res fig = plt.figure() ax = fig.add_subplot(111, projection='3d') C = np.linspace(0, 4, 200) N = np.arange(1, 2001) X, Y = np.meshgrid(C, N) #Z = evaluate(Y, .5, X, logistic) Z = np.empty_like(X) for i in range(X.shape[0]): for j in range(X.shape[1]): Z[i, j] = evaluate(Y[i, j], .5, X[i, j], logistic) ax.plot_surface(X, Y, Z, rstride=10, cstride=10, cmap=cm.jet, linewidth=0,) plt.show()
cff3e9148fe021dbca2f36fd24270a1aace86027
d9aa525b6a359378572fa7e48bd4fb8529b9ce23
/monitoring/services/tests.py
2e6a0fa69ed320fbbe1fcc4f7506227fdb4949ab
[ "Apache-2.0" ]
permissive
naanal/monasca-ui
cb5b7c279836d31809392d5b4572536fbea3634e
37d8926015e35f8949606183469d532924ab58c2
refs/heads/master
2020-02-26T13:04:25.471867
2016-08-16T05:39:24
2016-08-16T05:39:24
64,387,546
0
0
null
2016-07-28T10:46:54
2016-07-28T10:46:53
null
UTF-8
Python
false
false
1,595
py
# coding=utf-8 from django.core import urlresolvers from django.test import RequestFactory from mock import patch, call # noqa from monitoring.test import helpers from monitoring.services import constants from monitoring.services import views INDEX_URL = urlresolvers.reverse( constants.URL_PREFIX + 'index') class ServicesTest(helpers.TestCase): def test_index_get(self): res = self.client.get(INDEX_URL) self.assertTemplateUsed( res, 'monitoring/services/index.html') self.assertTemplateUsed(res, 'monitoring/services/monitor.html') class KibanaProxyViewTest(helpers.TestCase): def setUp(self): super(KibanaProxyViewTest, self).setUp() self.view = views.KibanaProxyView() self.request_factory = RequestFactory() def test_get_relative_url_with_unicode(self): """Tests if it properly converts multibyte characters""" import urlparse self.view.request = self.request_factory.get( '/', data={'a': 1, 'b': 2} ) expected_path = ('/elasticsearch/.kibana/search' '/New-Saved-Search%E3%81%82') expected_qs = {'a': ['1'], 'b': ['2']} url = self.view.get_relative_url( u'/elasticsearch/.kibana/search/New-Saved-Searchあ' ) # order of query params may change parsed_url = urlparse.urlparse(url) actual_path = parsed_url.path actual_qs = urlparse.parse_qs(parsed_url.query) self.assertEqual(actual_path, expected_path) self.assertEqual(actual_qs, expected_qs)
50c0db940d1a81503c735523e554c2f9aa4e8c25
be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1
/Gauss_v45r10p1/Gen/DecFiles/options/31503010.py
a0d860e470e74eab4f359406d68c4a1e5df58223
[]
no_license
Sally27/backup_cmtuser_full
34782102ed23c6335c48650a6eaa901137355d00
8924bebb935b96d438ce85b384cfc132d9af90f6
refs/heads/master
2020-05-21T09:27:04.370765
2018-12-12T14:41:07
2018-12-12T14:41:07
185,989,173
0
0
null
null
null
null
UTF-8
Python
false
false
762
py
# file /home/hep/ss4314/cmtuser/Gauss_v45r10p1/Gen/DecFiles/options/31503010.py generated: Wed, 25 Jan 2017 15:25:30 # # Event Type: 31503010 # # ASCII decay Descriptor: [tau- -> pi- pi+ pi- nu_tau]cc # from Configurables import Generation Generation().EventType = 31503010 Generation().SampleGenerationTool = "SignalPlain" from Configurables import SignalPlain Generation().addTool( SignalPlain ) Generation().SignalPlain.ProductionTool = "PythiaProduction" from Configurables import ToolSvc from Configurables import EvtGenDecay ToolSvc().addTool( EvtGenDecay ) ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/tau_pi-pi+pi-nu=DecProdCut.dec" Generation().SignalPlain.CutTool = "DaughtersInLHCb" Generation().SignalPlain.SignalPIDList = [ 15,-15 ]
464fd32e21826eb900ab44421bba05a56e8c869a
6935f8334f17e59f0691695e1404ce2fa3c18b98
/src/signali_notification/migrations/0001_initial.py
f541895c83cd237322504219131944b4e337173d
[]
no_license
obshtestvo/signali
a7b74746cba487ea3dacedbd2eda4876b972056e
f0c64bea9e151023612e33e476d0723819924e19
refs/heads/master
2016-08-05T17:13:55.793165
2016-01-20T16:39:36
2016-01-20T16:39:36
34,040,168
0
3
null
null
null
null
UTF-8
Python
false
false
1,159
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('signali_contact', '0004_auto_20150903_0029'), ] operations = [ migrations.CreateModel( name='Subscriber', fields=[ ('id', models.AutoField(auto_created=True, serialize=False, primary_key=True, verbose_name='ID')), ('email', models.EmailField(max_length=250, verbose_name='email')), ('last_notified_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Last notified at')), ('created_at', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Added at')), ('contactpoint', models.ForeignKey(related_name='subscribers', to='signali_contact.ContactPoint')), ], options={ 'abstract': False, 'verbose_name_plural': 'subscriber', 'verbose_name': 'subscriber', }, bases=(models.Model,), ), ]
170ccd24c50321c37c6335c0652314406ed7802a
22a7161361089b84a09457b46d79ce2bd87f5b2c
/tests/urls.py
bb83d09b1ab5c7ac9d1103f51acf97588d336b6b
[]
no_license
matthew-a-dunlap/django-inspectional-registration
56fb9b9945d41de069034fd066e3b92b388b8498
d6dd945718e5f7ac09966763c83104c4966cb775
refs/heads/master
2020-09-09T17:33:18.128626
2016-05-23T19:57:19
2016-05-23T19:57:19
221,512,520
0
0
null
2019-11-13T17:13:08
2019-11-13T17:13:07
null
UTF-8
Python
false
false
399
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals try: from django.conf.urls import url, patterns, include except ImportError: from django.conf.urls.defaults import url, patterns, include from django.contrib import admin admin.autodiscover() urlpatterns = patterns('', url(r'^admin/', include(admin.site.urls)), url(r'^registration/', include('registration.urls')), )
4c19d1c4ca2143d49702c83bd6fe7486af618b32
6df4a4cbdaf59009838b2c70b518e66633c67de0
/user_portrait/cron/recommentation_in/filter_rules.py
c4a58b227bbab39a9fd134e2bd2cc7cfcf24955f
[]
no_license
jianjian0dandan/user_portrait
ccf5f43f0aca2d40581faae215fdda1db997a354
3114ca2fcec23a7039887cca953793ef34cb7f72
refs/heads/master
2021-01-15T19:59:16.286276
2016-05-18T03:30:37
2016-05-18T03:30:37
42,869,391
0
0
null
2015-09-21T13:56:51
2015-09-21T13:56:51
null
UTF-8
Python
false
false
4,731
py
# -*- coding: UTF-8 -*- import sys import csv import json import time reload(sys) sys.path.append('../../') from global_utils import R_CLUSTER_FLOW2 as r_cluster from global_utils import R_DICT, es_retweet, retweet_index_name_pre, retweet_index_type from time_utils import datetime2ts, ts2datetime from parameter import DAY from parameter import RUN_TYPE, RUN_TEST_TIME from parameter import RECOMMEND_IN_ACTIVITY_THRESHOLD as activity_threshold from parameter import RECOMMEND_IN_IP_THRESHOLD as ip_threshold from parameter import RECOMMEND_IN_RETWEET_THRESHOLD as retweet_threshold from parameter import RECOMMEND_IN_MENTION_THRESHOLD as mention_threshold from cron.detect.cron_detect import get_db_num csvfile = open('/home/ubuntu8/huxiaoqian/user_portrait/user_portrait/cron/recommentation_in/filter_uid_list.csv', 'wb') writer = csv.writer(csvfile) def filter_activity(user_set): results = [] #run_type if RUN_TYPE == 1: now_date = ts2datetime(time.time()) else: now_date = RUN_TEST_TIME ts = datetime2ts(now_date) - DAY date = ts2datetime(ts) timestamp = datetime2ts(date) for user in user_set: over_count = 0 for i in range(0,7): ts = timestamp - DAY*i result = r_cluster.hget('activity_'+str(ts), str(user)) if result: items_dict = json.loads(result) for item in items_dict: weibo_count = items_dict[item] if weibo_count > activity_threshold: over_count += 1 if over_count == 0: results.append(user) else: writer.writerow([user, 'activity']) return results def filter_ip(user_set): results = [] #run_type if RUN_TYPE == 1: now_date = ts2datetime(time.time()) else: now_date = RUN_TEST_TIME ts = datetime2ts(now_date) - DAY for user in user_set: ip_set = set() for i in range(0,7): timestamp = ts - DAY*i ip_result = r_cluster.hget('ip_'+str(ts), str(user)) if ip_result: result_dict = json.loads(ip_result) else: result_dict = {} for ip in result_dict: ip_set.add(ip) if len(ip_set) < ip_threshold: results.append(user) else: writer.writerow([user, 'ip']) return results def filter_retweet_count(user_set): FILTER_ITER_COUNT = 100; results = [] now_ts = time.time() db_number = get_db_num(now_ts) retweet_index_name = retweet_index_name_pre + str(db_number) # test search_user_count = len(user_set); iter_search_count = 0 while iter_search_count < search_user_count: iter_search_user_list = user_set[iter_search_count:iter_search_count + FILTER_ITER_COUNT] try: retweet_result = es_retweet.mget(index = retweet_index_name, doc_type = retweet_index_type,\ body = {'ids':iter_search_user_list}, _source=True)['docs'] except: retweet_result = [] for retweet_item in retweet_result: if retweet_item['found']: retweet_set = set() user = retweet_item['_id'] per_retweet_result = json.loads(retweet_item['_source']['uid_retweet']) for retweet_user in per_retweet_result: retweet_set.add(retweet_user) if len(retweet_set) < retweet_threshold: results.append(user) else: writer.writerow([user, 'retweet']) else: user = retweet_item['_id'] results.append(user) iter_search_count += FILTER_ITER_COUNT return results def filter_mention(user_set): results = [] #run_type if RUN_TYPE == 1: now_date = ts2datetime(time.time()) else: now_date = RUN_TEST_TIME timestamp = datetime2ts(now_date) - DAY date = ts2datetime(timestamp) for user in user_set: mention_set = set() for i in range(0,7): ts = timestamp - DAY*i result = r_cluster.hget('at_'+str(ts), str(user)) if result: item_dict = json.loads(result) for at_user in item_dict: mention_set.add(at_user) at_count = len(mention_set) if at_count < mention_threshold: results.append(user) else: writer.writerow([user, 'mention']) return results
1e44f68b4293ce9210579097294860acfc7ebac2
f8f2536fa873afa43dafe0217faa9134e57c8a1e
/aliyun-python-sdk-ons/aliyunsdkons/request/v20190214/UntagResourcesRequest.py
3e59f17cdafd0c517f61e6e5a9daaacecfc58d26
[ "Apache-2.0" ]
permissive
Sunnywillow/aliyun-openapi-python-sdk
40b1b17ca39467e9f8405cb2ca08a85b9befd533
6855864a1d46f818d73f5870da0efec2b820baf5
refs/heads/master
2022-12-04T02:22:27.550198
2020-08-20T04:11:34
2020-08-20T04:11:34
288,944,896
1
0
NOASSERTION
2020-08-20T08:04:01
2020-08-20T08:04:01
null
UTF-8
Python
false
false
2,302
py
# Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # # http://www.apache.org/licenses/LICENSE-2.0 # # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from aliyunsdkcore.request import RpcRequest from aliyunsdkons.endpoint import endpoint_data class UntagResourcesRequest(RpcRequest): def __init__(self): RpcRequest.__init__(self, 'Ons', '2019-02-14', 'UntagResources','ons') self.set_method('POST') if hasattr(self, "endpoint_map"): setattr(self, "endpoint_map", endpoint_data.getEndpointMap()) if hasattr(self, "endpoint_regional"): setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional()) def get_All(self): return self.get_query_params().get('All') def set_All(self,All): self.add_query_param('All',All) def get_ResourceIds(self): return self.get_query_params().get('ResourceIds') def set_ResourceIds(self, ResourceIds): for depth1 in range(len(ResourceIds)): if ResourceIds[depth1] is not None: self.add_query_param('ResourceId.' + str(depth1 + 1) , ResourceIds[depth1]) def get_ResourceType(self): return self.get_query_params().get('ResourceType') def set_ResourceType(self,ResourceType): self.add_query_param('ResourceType',ResourceType) def get_InstanceId(self): return self.get_query_params().get('InstanceId') def set_InstanceId(self,InstanceId): self.add_query_param('InstanceId',InstanceId) def get_TagKeys(self): return self.get_query_params().get('TagKeys') def set_TagKeys(self, TagKeys): for depth1 in range(len(TagKeys)): if TagKeys[depth1] is not None: self.add_query_param('TagKey.' + str(depth1 + 1) , TagKeys[depth1])
45620672ed607a3171dae6cf19a63dea278a32ce
e23a4f57ce5474d468258e5e63b9e23fb6011188
/125_algorithms/_exercises/templates/_algorithms_challenges/edabit/_Edabit-Solutions-master/Odd Up, Even Down/solution.py
280dd4e50c5e27743667091935a4280519266904
[]
no_license
syurskyi/Python_Topics
52851ecce000cb751a3b986408efe32f0b4c0835
be331826b490b73f0a176e6abed86ef68ff2dd2b
refs/heads/master
2023-06-08T19:29:16.214395
2023-05-29T17:09:11
2023-05-29T17:09:11
220,583,118
3
2
null
2023-02-16T03:08:10
2019-11-09T02:58:47
Python
UTF-8
Python
false
false
149
py
___ transform(lst output # list ___ i __ lst: __ i % 2 __ 0: ?.a.. i-1) ____ ?.a.. i+1) r.. ?
b74fd5349fcc910ed9dcad8717e15620b73eb4be
c516df2118000e3abaa61527de7badb94680081e
/utilities/common.py
b3a2eae746c77898ae21aed1623a0194a141bbd9
[ "MIT" ]
permissive
xod442/paw
82c8d54af052edaea05ed36a0846fe9722f047f3
f55df04dd7af7a1b25844c809187a99cfb24b813
refs/heads/main
2023-04-16T12:19:50.849945
2021-04-26T20:06:51
2021-04-26T20:06:51
360,992,530
0
1
null
null
null
null
UTF-8
Python
false
false
1,345
py
import time import boto3 from flask import current_app import datetime import arrow import bleach def utc_now_ts(): return int(time.time()) def utc_now_ts_ms(): return lambda: int(round(time.time() * 1000)) def ms_stamp_humanize(ts): ts = datetime.datetime.fromtimestamp(ts/1000.0) return arrow.get(ts).humanize() def linkify(text): text = bleach.clean(text, tags=[], attributes={}, styles=[], strip=True) return bleach.linkify(text) def email(to_email, subject, body_html, body_text): # don't run this if we're running a test or setting is False if current_app.config.get('TESTING') or not current_app.config.get('AWS_SEND_MAIL'): return False client = boto3.client('ses') return client.send_email( Source='[email protected]', Destination={ 'ToAddresses': [ to_email, ] }, Message={ 'Subject': { 'Data': subject, 'Charset': 'UTF-8' }, 'Body': { 'Text': { 'Data': body_text, 'Charset': 'UTF-8' }, 'Html': { 'Data': body_html, 'Charset': 'UTF-8' }, } } )
1fe26b687dbd81149de336083512b6e7129e88d1
2eb779146daa0ba6b71344ecfeaeaec56200e890
/python/oneflow/compatible/single_client/test/ops/test_transpose.py
02117e32289d49fe2caa7a1e4f230115958caf6e
[ "Apache-2.0" ]
permissive
hxfxjun/oneflow
ee226676cb86f3d36710c79cb66c2b049c46589b
2427c20f05543543026ac9a4020e479b9ec0aeb8
refs/heads/master
2023-08-17T19:30:59.791766
2021-10-09T06:58:33
2021-10-09T06:58:33
414,906,649
0
0
Apache-2.0
2021-10-09T06:15:30
2021-10-08T08:29:45
C++
UTF-8
Python
false
false
4,027
py
""" Copyright 2020 The OneFlow Authors. All rights reserved. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import os import unittest from collections import OrderedDict import numpy as np import tensorflow as tf import test_global_storage from test_util import GenArgList import oneflow.compatible.single_client.unittest from oneflow.compatible import single_client as flow from oneflow.compatible.single_client import typing as tp gpus = tf.config.experimental.list_physical_devices("GPU") for gpu in gpus: tf.config.experimental.set_memory_growth(gpu, True) def compare_with_tensorflow(device_type, input_shape, perm): assert device_type in ["gpu", "cpu"] flow.clear_default_session() func_config = flow.FunctionConfig() func_config.default_data_type(flow.float) @flow.global_function(type="train", function_config=func_config) def TransposeJob(): with flow.scope.placement(device_type, "0:0"): x = flow.get_variable( "input", shape=input_shape, dtype=flow.float, initializer=flow.random_uniform_initializer(minval=2, maxval=5), trainable=True, ) loss = flow.transpose(x, perm) flow.optimizer.SGD( flow.optimizer.PiecewiseConstantScheduler([], [0.0001]), momentum=0 ).minimize(loss) flow.watch(x, test_global_storage.Setter("x")) flow.watch_diff(x, test_global_storage.Setter("x_diff")) flow.watch(loss, test_global_storage.Setter("loss")) flow.watch_diff(loss, test_global_storage.Setter("loss_diff")) return loss of_out = TransposeJob().get() with tf.GradientTape(persistent=True) as tape: x = tf.Variable(test_global_storage.Get("x")) tf_out = tf.transpose(x, perm) loss_diff = test_global_storage.Get("loss_diff") tf_x_diff = tape.gradient(tf_out, x, loss_diff) assert np.allclose(of_out.numpy(), tf_out.numpy(), rtol=1e-05, atol=1e-05) assert np.allclose( test_global_storage.Get("x_diff"), tf_x_diff.numpy(), rtol=1e-05, atol=1e-05 ) @flow.unittest.skip_unless_1n1d() class TestTranspose(flow.unittest.TestCase): def test_transpose(test_case): arg_dict = OrderedDict() arg_dict["device_type"] = ["gpu", "cpu"] arg_dict["input_shape"] = [(10, 11, 12, 13)] arg_dict["perm"] = [(2, 0, 1, 3), (1, 0, 2, 3), (3, 2, 1, 0), (3, 1, 2, 0)] for arg in GenArgList(arg_dict): compare_with_tensorflow(*arg) def test_transpose2(test_case): arg_dict = OrderedDict() arg_dict["device_type"] = ["gpu", "cpu"] arg_dict["input_shape"] = [(10, 11, 12)] arg_dict["perm"] = [(2, 0, 1), (1, 0, 2), (2, 1, 0), (1, 2, 0)] for arg in GenArgList(arg_dict): compare_with_tensorflow(*arg) def test_transpose3(test_case): arg_dict = OrderedDict() arg_dict["device_type"] = ["gpu", "cpu"] arg_dict["input_shape"] = [(10, 11)] arg_dict["perm"] = [(1, 0), (0, 1)] for arg in GenArgList(arg_dict): compare_with_tensorflow(*arg) def test_transpose_dim6(test_case): arg_dict = OrderedDict() arg_dict["device_type"] = ["gpu", "cpu"] arg_dict["input_shape"] = [(2, 3, 4, 5, 6, 7)] arg_dict["perm"] = [(2, 0, 1, 3, 5, 4)] for arg in GenArgList(arg_dict): compare_with_tensorflow(*arg) if __name__ == "__main__": unittest.main()
af132b52a2a42c7eb350dd3fad4e62461d23f6d3
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p03548/s330099741.py
473abd697eb7a23e0c786ec6a00b442861d84293
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
118
py
#!/usr/bin/python # -*- Coding: utf-8 -*- x, y, z = (int(i) for i in input().split()) n = int((x-z)/(y+z)) print(n)
60e077f3f23f697b0b33bdeca4b24594f3478247
626b14ce13986b6d5e03143e151004247659625a
/Day01-15/code/Day08/student.py
7d753978b63ee1f015b2f6a12f70198e93ff89bc
[]
no_license
Focavn/Python-100-Days
c7586ecf7ae3f1fd42f024558bb998be23ee9df8
d8de6307aeff9fe31fd752bd7725b9cc3fbc084b
refs/heads/master
2021-08-08T17:57:02.025178
2020-09-17T11:58:04
2020-09-17T11:58:04
220,427,144
0
0
null
2019-11-08T08:59:43
2019-11-08T08:59:41
null
UTF-8
Python
false
false
1,108
py
""" 定义和使用学生类 Version: 0.1 Author: 骆昊 Date: 2018-03-08 """ def _foo(): print('test') class Student(object): # __init__是一个特殊方法用于在创建对象时进行初始化操作 # 通过这个方法我们可以为学生对象绑定name和age两个属性 def __init__(self, name, age): self.name = name self.age = age def study(self, course_name): print('%s正在学习%s.' % (self.name, course_name)) # PEP 8要求标识符的名字用全小写多个单词用下划线连接 # 但是很多程序员和公司更倾向于使用驼峰命名法(驼峰标识) def watch_av(self): if self.age < 18: print('%s只能观看《熊出没》.' % self.name) else: print('%s正在观看岛国大电影.' % self.name) def main(): stu1 = Student('骆昊', 38) stu1.study('Python程序设计') stu1.watch_av() stu2 = Student('王大锤', 15) stu2.study('思想品德') stu2.watch_av() if __name__ == '__main__': main()
96b72c3e8d75f73087aa5d785f9d688d43fba4a9
920b9cb23d3883dcc93b1682adfee83099fee826
/iam/meta.py
1c14641c524eaf4af143f55b16491ad41f993154
[ "MIT", "LGPL-2.1-or-later", "LGPL-3.0-only" ]
permissive
TencentBlueKing/bk-itsm
f817fb166248d3059857b57d03e8b5ec1b78ff5b
2d708bd0d869d391456e0fb8d644af3b9f031acf
refs/heads/master
2023-08-31T23:42:32.275836
2023-08-22T08:17:54
2023-08-22T08:17:54
391,839,825
100
86
MIT
2023-09-14T08:24:54
2021-08-02T06:35:16
Python
UTF-8
Python
false
false
1,689
py
# -*- coding: utf-8 -*- """ Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2020 THL A29 Limited, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ _SYSTEM = "system" _RESOURCES = "resources" _ACTIONS = "actions" __meta_info__ = {_SYSTEM: {}, _RESOURCES: {}, _ACTIONS: {}} def setup_system(system_id, system_name): __meta_info__[_SYSTEM].setdefault(system_id, {})["name"] = system_name def get_system_name(system_id): return __meta_info__[_SYSTEM].get(system_id, {}).get("name") def setup_resource(system_id, resource_id, resource_name): __meta_info__[_RESOURCES].setdefault(system_id, {}).setdefault(resource_id, {})["name"] = resource_name def get_resource_name(system_id, resource_id): return __meta_info__[_RESOURCES].get(system_id, {}).get(resource_id, {}).get("name") def setup_action(system_id, action_id, action_name): __meta_info__[_ACTIONS].setdefault(system_id, {}).setdefault(action_id, {})["name"] = action_name def get_action_name(system_id, action_id): return __meta_info__[_ACTIONS].get(system_id, {}).get(action_id, {}).get("name")
20a9aad0196588ee85844a524186b9c74f485d9b
63862669b6b428ef23e2733e50b47ef7a11ceb60
/basic info/varia.py
ce978e8d87db873269e2593e3bcd2404f720095d
[]
no_license
CyborgVillager/Learning_py_info
961fde2cdba7ec0b7e1aacd437aeba99083cd192
a1504ab4610f88ae2de738a49ac6513c3358a177
refs/heads/master
2020-11-25T22:17:33.966387
2020-02-11T04:34:29
2020-02-11T04:34:29
228,869,781
0
0
null
null
null
null
UTF-8
Python
false
false
620
py
#variables def story_example(): name = "John" age = 25 para0 = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. In interdum, odio et feugiat auctor, ante leo t" \ "incidunt tortor, sed lacinia leo augue vel lorem. In rutrum est libero" para1 = "Nunc euismod magna in diam finibus sollicitudin. Aliquam commodo tortor lorem, in tincidunt quam dapibus " \ "fringilla. Duis vitae sem ut ligula efficitur varius." print(name, 'is age', str(age), para0, '\n', name, para1) def story_start(): story_example() story_start()
249c8be8faca60c67ed1827c6122bee07e1fa8ac
a5a4cee972e487512275c34f308251e6cc38c2fa
/tests_old/tests_openmpi/test_hello_world/test_hello_world.py
b5a6b03915a4a452b63bb6efdf8838d172ecddf7
[ "MIT" ]
permissive
eragasa/pypospack
4f54983b33dcd2dce5b602bc243ea8ef22fee86b
21cdecaf3b05c87acc532d992be2c04d85bfbc22
refs/heads/master
2021-06-16T09:24:11.633693
2019-12-06T16:54:02
2019-12-06T16:54:02
99,282,824
4
1
null
null
null
null
UTF-8
Python
false
false
706
py
from subprocess import call import os # possibly the greatest hack of a test to ever be written def test_hello_world(): call(["sbatch", "runjob_hipergator.sh"]) while True: r0 = os.path.exists("rank_0") r1 = os.path.exists("rank_1") r2 = os.path.exists("rank_2") r3 = os.path.exists("rank_3") err = os.path.exists("job.err") if all([r0, r1, r2, r3]): os.remove("rank_0") os.remove("rank_1") os.remove("rank_2") os.remove("rank_3") assert True return if err: os.remove("job.err") os.remove("job.out") assert False return
e39ec3b82e0551f31532345f993df0e4da0ee93f
459185e0e12d486e91fcfff3e6d6174afbdf70db
/JEX-V4/Exploits/wpinstall.py
84eb5f88c61670e0b117e32045f691f557ac28bc
[]
no_license
Hdiaktoros/laravel-dorks
e42a1be938b0fdbbf17e6689d50c7f8bcf30c464
a9ae0af4a27b522f939b5c1627db3b98f18bb5c3
refs/heads/main
2023-07-05T02:59:17.032717
2021-08-21T16:30:42
2021-08-21T16:30:42
398,522,099
2
1
null
null
null
null
UTF-8
Python
false
false
3,456
py
# coding=utf-8 from Exploits import printModule import requests from random import sample from BruteForce import Wordpress # ----------------==---- MY USER AGent ----==---------------- Headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0'} # -------------------- MYSQl SERVER INFO -------------------- HOST = '31.210.78.238' USER = 'francesco_res' PASS = 'L30zDTZDTP'[::-1] DB = 'francesco_reservex' # ----------------==---- WpInstall Info ----==---------------- username = 'u1337' password = 'uAdmin@123' # ------------------------------------------------------------ def RandomGenerator(lenth): return ''.join(sample('abcdefghijklmnopqrstuvwxyz', lenth)) def WpInstall(site, Email): session = requests.Session() RandomStringForPREFIX = str('wp_' + str(RandomGenerator(8)) + '_') try: DATA = { 'dbname': DB, 'uname': USER, 'pwd': PASS, 'dbhost': HOST, 'prefix': RandomStringForPREFIX, 'language': 'en_US', 'submit': 'Submit' } A = session.post('http://' + site + '/wp-admin/setup-config.php?step=2', data=DATA, headers=Headers, timeout=10) if 'install.php' in str(A.content): POSTDATA_Install = { 'weblog_title': 'installed|jex', 'user_name': username, 'admin_password': password, 'pass1-text': password, 'admin_password2': password, 'pw_weak': 'on', 'admin_email': Email, 'Submit': 'Install+WordPress', 'language': 'en_US' } session.post('http://' + site + '/wp-admin/install.php?step=2', data=POSTDATA_Install, headers=Headers, timeout=25) except: pass try: source = session.get('http://' + site + '/wp-login.php', timeout=10, headers=Headers).content if 'installed|jex' in str(source): with open('result/Wp-Installed.txt', 'a') as writer: writer.write(site + '/wp-login.php\n Username: {}\n' ' Password: {}\n------------------------------------------\n' .format(username, password)) Login = Wordpress.Wordpress() Login.BruteForce(site, password, username) return printModule.returnYes(site, 'N/A', 'Wp-Install', 'Wordpress') else: with open('result/Wp-SetupFound.txt', 'a') as writer: writer.write('{}/wp-admin/setup-config.php\n'.format(site)) return printModule.returnNo(site, 'N/A', 'Wp-Install', 'Wordpress') except: return printModule.returnNo(site, 'N/A', 'Wp-Install', 'Wordpress') def Check(site, email): try: PATHz = [ '', '/wordpress', '/wp', '/blog', '/test', '/site' ] x = 0 for path in PATHz: C = requests.get('http://' + site + path + '/wp-admin/setup-config.php?step=0') if 'setup-config.php?step=1' in str(C.content): x += 1 return WpInstall(site + path, email) if x == 0: return printModule.returnNo(site, 'N/A', 'Wp-Install', 'Wordpress') except: return printModule.returnNo(site, 'N/A', 'Wp-Install', 'Wordpress')
b057a3a5c3f3098da54c67a78d50a565061a32c3
0dfa97730b9ad9c077868a045d89cc0d4b09f433
/tests/integration/goldens/credentials/samples/generated_samples/iamcredentials_generated_credentials_v1_iam_credentials_sign_blob_sync.py
ff60375d5635f5825a6f29c36bb0c61572147d95
[ "Apache-2.0" ]
permissive
anukaal/gapic-generator-python
546c303aaf2e722956133b07abb0fb1fe581962f
e3b06895fa179a2038ee2b28e43054e1df617975
refs/heads/master
2023-08-24T23:16:32.305652
2021-10-09T15:12:14
2021-10-09T15:12:14
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,567
py
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Generated code. DO NOT EDIT! # # Snippet for SignBlob # NOTE: This snippet has been automatically generated for illustrative purposes only. # It may require modifications to work in your environment. # To install the latest published package dependency, execute the following: # python3 -m pip install google-iam-credentials # [START iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync] from google.iam import credentials_v1 def sample_sign_blob(): """Snippet for sign_blob""" # Create a client client = credentials_v1.IAMCredentialsClient() # Initialize request argument(s) request = credentials_v1.SignBlobRequest( name="projects/{project}/serviceAccounts/{service_account}", payload=b'payload_blob', ) # Make the request response = client.sign_blob(request=request) # Handle response print(response) # [END iamcredentials_generated_credentials_v1_IAMCredentials_SignBlob_sync]
b3cdcb8ef497d5e18564d7b7f47262c537e111e3
b89ec2839b4a6bd4e2d774f64be9138f4b71a97e
/dataent/website/doctype/website_settings/website_settings.py
4fc4412d550622749995427444bce0dbc835241c
[ "MIT" ]
permissive
dataent/dataent
ec0e9a21d864bc0f7413ea39670584109c971855
c41bd5942ffe5513f4d921c4c0595c84bbc422b4
refs/heads/master
2022-12-14T08:33:48.008587
2019-07-09T18:49:21
2019-07-09T18:49:21
195,729,981
0
0
MIT
2022-12-09T17:23:49
2019-07-08T03:26:28
Python
UTF-8
Python
false
false
4,869
py
# Copyright (c) 2015, Dataent Technologies Pvt. Ltd. and Contributors # MIT License. See license.txt from __future__ import unicode_literals import dataent from dataent import _ from dataent.utils import get_request_site_address, encode from dataent.model.document import Document from six.moves.urllib.parse import quote from dataent.website.router import resolve_route from dataent.website.doctype.website_theme.website_theme import add_website_theme class WebsiteSettings(Document): def validate(self): self.validate_top_bar_items() self.validate_footer_items() self.validate_home_page() def validate_home_page(self): if dataent.flags.in_install: return if self.home_page and not resolve_route(self.home_page): dataent.msgprint(_("Invalid Home Page") + " (Standard pages - index, login, products, blog, about, contact)") self.home_page = '' def validate_top_bar_items(self): """validate url in top bar items""" for top_bar_item in self.get("top_bar_items"): if top_bar_item.parent_label: parent_label_item = self.get("top_bar_items", {"label": top_bar_item.parent_label}) if not parent_label_item: # invalid item dataent.throw(_("{0} does not exist in row {1}").format(top_bar_item.parent_label, top_bar_item.idx)) elif not parent_label_item[0] or parent_label_item[0].url: # parent cannot have url dataent.throw(_("{0} in row {1} cannot have both URL and child items").format(top_bar_item.parent_label, top_bar_item.idx)) def validate_footer_items(self): """validate url in top bar items""" for footer_item in self.get("footer_items"): if footer_item.parent_label: parent_label_item = self.get("footer_items", {"label": footer_item.parent_label}) if not parent_label_item: # invalid item dataent.throw(_("{0} does not exist in row {1}").format(footer_item.parent_label, footer_item.idx)) elif not parent_label_item[0] or parent_label_item[0].url: # parent cannot have url dataent.throw(_("{0} in row {1} cannot have both URL and child items").format(footer_item.parent_label, footer_item.idx)) def on_update(self): self.clear_cache() def clear_cache(self): # make js and css # clear web cache (for menus!) dataent.clear_cache(user = 'Guest') from dataent.website.render import clear_cache clear_cache() # clears role based home pages dataent.clear_cache() def get_website_settings(): hooks = dataent.get_hooks() context = dataent._dict({ 'top_bar_items': get_items('top_bar_items'), 'footer_items': get_items('footer_items'), "post_login": [ {"label": _("My Account"), "url": "/me"}, # {"class": "divider"}, {"label": _("Logout"), "url": "/?cmd=web_logout"} ] }) settings = dataent.get_single("Website Settings") for k in ["banner_html", "brand_html", "copyright", "twitter_share_via", "facebook_share", "google_plus_one", "twitter_share", "linked_in_share", "disable_signup", "hide_footer_signup", "head_html", "title_prefix", "navbar_search"]: if hasattr(settings, k): context[k] = settings.get(k) if settings.address: context["footer_address"] = settings.address for k in ["facebook_share", "google_plus_one", "twitter_share", "linked_in_share", "disable_signup"]: context[k] = int(context.get(k) or 0) if dataent.request: context.url = quote(str(get_request_site_address(full_address=True)), safe="/:") context.encoded_title = quote(encode(context.title or ""), str("")) for update_website_context in hooks.update_website_context or []: dataent.get_attr(update_website_context)(context) context.web_include_js = hooks.web_include_js or [] context.web_include_css = hooks.web_include_css or [] via_hooks = dataent.get_hooks("website_context") for key in via_hooks: context[key] = via_hooks[key] if key not in ("top_bar_items", "footer_items", "post_login") \ and isinstance(context[key], (list, tuple)): context[key] = context[key][-1] add_website_theme(context) if not context.get("favicon"): context["favicon"] = "/assets/dataent/images/favicon.png" if settings.favicon and settings.favicon != "attach_files:": context["favicon"] = settings.favicon return context def get_items(parentfield): all_top_items = dataent.db.sql("""\ select * from `tabTop Bar Item` where parent='Website Settings' and parentfield= %s order by idx asc""", parentfield, as_dict=1) top_items = [d for d in all_top_items if not d['parent_label']] # attach child items to top bar for d in all_top_items: if d['parent_label']: for t in top_items: if t['label']==d['parent_label']: if not 'child_items' in t: t['child_items'] = [] t['child_items'].append(d) break return top_items @dataent.whitelist(allow_guest=True) def is_chat_enabled(): return bool(dataent.db.get_single_value('Website Settings', 'chat_enable'))
b1561a5a09375df8219b095e33b192ffafb03de1
eb755b42aa2ec9e6ab63001a6293d5e225837086
/Other_web_spider/Phantomjs/Phantomjs_id_location.py
f4b494cfe8dbfb44c591643255e8da05cbfcbc6d
[]
no_license
paramedick/python-web-crawlers
7c493cbc51c4189d0dabaeae6cfba84123f7401d
5deea2073583bbb8d229c6404680e543ebcdbc5b
refs/heads/master
2022-01-19T21:25:29.058709
2019-08-13T14:43:22
2019-08-13T14:43:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
209
py
# coding=utf-8 from selenium import webdriver browser = webdriver.Firefox() browser.get("http://www.baidu.com/") browser.find_element_by_id("kw").send_keys("python") browser.implicitly_wait(60) browser.quit()
38e66487b8f3e6080d36fa5c19b8a95bc793311f
0e9f73d2ef1239b22e049ef6338362da7dbfb122
/source/web/Django/FatQuantsDjango/FatQuantsDjango/ticker/migrations/0065_auto_20190209_2232.py
da99dbc20030cad9a96f28f1f13228b4442183bd
[]
no_license
Andy-Mason/FatQuants
3c4bfafc29834af76b0be40e93b0e210e0ef5056
edd0e98f4599ef91adbdf4179164769ddd66c62a
refs/heads/master
2023-01-11T10:57:50.563742
2021-08-11T19:04:59
2021-08-11T19:04:59
73,127,295
0
0
null
null
null
null
UTF-8
Python
false
false
661
py
# Generated by Django 2.1.3 on 2019-02-09 22:32 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ticker', '0064_auto_20190209_2231'), ] operations = [ migrations.AddField( model_name='tickereoddata', name='close_value', field=models.FloatField(blank=True, db_column='close_value', null=True, verbose_name='Close'), ), migrations.AddField( model_name='tickereoddata', name='volume', field=models.FloatField(blank=True, db_column='volume', null=True, verbose_name='Volume'), ), ]
2752c83cd011675845e421f06b6105a534c335c1
0b490626a19dae9d2b9da68cebad27ee43c3dec5
/python/seqdiff/__main__.py
3b756498b413ec01837a11212d0b346101f4a8cd
[ "Apache-2.0" ]
permissive
tamuhey/seqdiff
1c8e4bb8bafaad069472282835a245a7d2b94ffa
4bd79979fbce25989d55644d6e3ca2efbc15edd8
refs/heads/master
2023-06-15T19:23:41.763124
2021-07-13T05:08:16
2021-07-13T05:08:16
273,475,966
5
0
null
2021-07-13T05:07:25
2020-06-19T11:19:56
Rust
UTF-8
Python
false
false
96
py
from seqdiff import print_diff if __name__ == "__main__": print_diff([1, 2, 3], [1, 3, 4])
7ac53b30a3b57016edf0f73d53b71a70649b08bc
410a0fac2282d867aa7e531c40c7289e4484510c
/venv/bin/django-admin.py
61ab7e2731c49b3611f2427a7932908cf764ede8
[]
no_license
NickVazovsky/parser_go
922a46ecca1c3def1adcfe3e029c82641247804b
a8dc3c082d246e4325d13e7ef1863df7238694cc
refs/heads/master
2020-07-29T12:50:55.579542
2019-09-20T14:31:02
2019-09-20T14:31:02
209,806,760
0
0
null
2019-09-20T14:26:48
2019-09-20T14:13:45
Python
UTF-8
Python
false
false
164
py
#!/home/nick/PycharmProjects/new_project/venv/bin/python3 from django.core import management if __name__ == "__main__": management.execute_from_command_line()
9b065ad3bc64f06b7fcaff92d27fb2ee90ecfe6e
fc58366ed416de97380df7040453c9990deb7faa
/tools/dockerize/webportal/usr/lib/python2.7/site-packages/oslo_db/tests/old_import_api/utils.py
44eb1aeb24eb802ae554e3fcfda13866332912a7
[ "Apache-2.0" ]
permissive
foruy/openflow-multiopenstack
eb51e37b2892074234ebdd5b501b24aa1f72fb86
74140b041ac25ed83898ff3998e8dcbed35572bb
refs/heads/master
2016-09-13T08:24:09.713883
2016-05-19T01:16:58
2016-05-19T01:16:58
58,977,485
1
0
null
null
null
null
UTF-8
Python
false
false
1,310
py
# Copyright 2010-2011 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import contextlib from oslo.config import cfg from oslotest import base as test_base from oslotest import moxstubout import six if six.PY3: @contextlib.contextmanager def nested(*contexts): with contextlib.ExitStack() as stack: yield [stack.enter_context(c) for c in contexts] else: nested = contextlib.nested class BaseTestCase(test_base.BaseTestCase): def setUp(self, conf=cfg.CONF): super(BaseTestCase, self).setUp() moxfixture = self.useFixture(moxstubout.MoxStubout()) self.mox = moxfixture.mox self.stubs = moxfixture.stubs self.conf = conf self.addCleanup(self.conf.reset)
f44c79dba52af15a4c324b94646a2e32d5a6143e
ac5e52a3fc52dde58d208746cddabef2e378119e
/exps-gsn-edf/gsn-edf_ut=2.5_rd=0.65_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=90/sched.py
73271d93d6d30c676753279395b2f3b6ba2f57c3
[]
no_license
ricardobtxr/experiment-scripts
1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1
7bcebff7ac2f2822423f211f1162cd017a18babb
refs/heads/master
2023-04-09T02:37:41.466794
2021-04-25T03:27:16
2021-04-25T03:27:16
358,926,457
0
0
null
null
null
null
UTF-8
Python
false
false
259
py
-X FMLP -Q 0 -L 3 93 400 -X FMLP -Q 0 -L 3 80 400 -X FMLP -Q 1 -L 2 79 250 -X FMLP -Q 1 -L 2 57 175 -X FMLP -Q 2 -L 1 54 200 -X FMLP -Q 2 -L 1 39 400 -X FMLP -Q 3 -L 1 37 125 -X FMLP -Q 3 -L 1 33 200 29 150 18 150 16 150 11 125 11 125
503cdbd13ac9e95d89d2847aabb527de1c810769
369b7f114f9bd9b45dd5fef77a070cb73abb68d1
/handle/itl/h20180123/insertFundInvestLog.py
0754801c39b9a7088aaa3f77d47ee88123974bf7
[]
no_license
lyjloveabc/thor_handle
d790ee25317f724825c94a6b346a034ec0ae6e3d
8b9eda97ec873f3bf1732a428898a04d6a55c0af
refs/heads/master
2021-12-27T10:15:16.668264
2021-08-16T13:45:34
2021-08-16T13:45:34
84,824,162
0
0
null
null
null
null
UTF-8
Python
false
false
571
py
import json import requests class InsertFundInvestLog: def __init__(self): # self.ENV = 'http://127.0.0.1:7001' self.ENV = 'http://127.0.0.1:7001' self.ENV_PROD = 'http://121.43.166.200:7001' def handle(self): with open('fundInvestLog_20180301.json', 'r') as f: data = json.load(f) for row in data: response = requests.post(self.ENV_PROD + '/fundInvestLog/fundInvestLog', data=row) print(response.text) if __name__ == '__main__': ifil = InsertFundInvestLog() ifil.handle()
38986d4a704fb788926d73c8dcd2ed3bad07d847
45de3aa97525713e3a452c18dcabe61ac9cf0877
/src/primaires/objet/types/indefini.py
72fd243688f6d99b9e1d5c92bccd62e605901e8d
[ "BSD-3-Clause" ]
permissive
stormi/tsunami
95a6da188eadea3620c70f7028f32806ee2ec0d1
bdc853229834b52b2ee8ed54a3161a1a3133d926
refs/heads/master
2020-12-26T04:27:13.578652
2015-11-17T21:32:38
2015-11-17T21:32:38
25,606,146
0
0
null
null
null
null
UTF-8
Python
false
false
1,744
py
# -*-coding:Utf-8 -* # Copyright (c) 2010 LE GOFF Vincent # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # * Neither the name of the copyright holder nor the names of its contributors # may be used to endorse or promote products derived from this software # without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE # ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE # LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR # CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT # OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS # INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN # CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) # ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. """Fichier contenant le type Indefini.""" from .base import BaseType class Indefini(BaseType): """Type d'objet: indéfini. """ nom_type = "indéfini"
e20e7f340b0e719fa019c02d2a227a6589f4cc4f
9743d5fd24822f79c156ad112229e25adb9ed6f6
/xai/brain/wordbase/nouns/_baccalaureates.py
b6b3d264696b8bd5b102cbd3f2ddde23ad54b79c
[ "MIT" ]
permissive
cash2one/xai
de7adad1758f50dd6786bf0111e71a903f039b64
e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6
refs/heads/master
2021-01-19T12:33:54.964379
2017-01-28T02:00:50
2017-01-28T02:00:50
null
0
0
null
null
null
null
UTF-8
Python
false
false
287
py
from xai.brain.wordbase.nouns._baccalaureate import _BACCALAUREATE #calss header class _BACCALAUREATES(_BACCALAUREATE, ): def __init__(self,): _BACCALAUREATE.__init__(self) self.name = "BACCALAUREATES" self.specie = 'nouns' self.basic = "baccalaureate" self.jsondata = {}
e73cfc2cdec009c867b3c766a6a035d38f33dfd6
b3ab2979dd8638b244abdb2dcf8da26d45d7b730
/cloudcheckr_cmx_client/models/azure_csp_authorization_request_model.py
e76c907e4fd2a4a72db3231108f61a129ace91ae
[]
no_license
CU-CommunityApps/ct-cloudcheckr-cmx-client
4b3d9b82c5dfdaf24f8f443526868e971d8d1b15
18ac9fd4d6c4ae799c0d21745eaecd783da68c0c
refs/heads/main
2023-03-03T19:53:57.685925
2021-02-09T13:05:07
2021-02-09T13:05:07
329,308,757
0
1
null
null
null
null
UTF-8
Python
false
false
5,134
py
# coding: utf-8 """ CloudCheckr API CloudCheckr API # noqa: E501 OpenAPI spec version: v1 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six class AzureCspAuthorizationRequestModel(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'purchase_model': 'str', 'region_group': 'str' } attribute_map = { 'purchase_model': 'purchaseModel', 'region_group': 'regionGroup' } def __init__(self, purchase_model=None, region_group=None): # noqa: E501 """AzureCspAuthorizationRequestModel - a model defined in Swagger""" # noqa: E501 self._purchase_model = None self._region_group = None self.discriminator = None if purchase_model is not None: self.purchase_model = purchase_model if region_group is not None: self.region_group = region_group @property def purchase_model(self): """Gets the purchase_model of this AzureCspAuthorizationRequestModel. # noqa: E501 The account's purchase model. # noqa: E501 :return: The purchase_model of this AzureCspAuthorizationRequestModel. # noqa: E501 :rtype: str """ return self._purchase_model @purchase_model.setter def purchase_model(self, purchase_model): """Sets the purchase_model of this AzureCspAuthorizationRequestModel. The account's purchase model. # noqa: E501 :param purchase_model: The purchase_model of this AzureCspAuthorizationRequestModel. # noqa: E501 :type: str """ allowed_values = ["AzurePlan", "Classic"] # noqa: E501 if purchase_model not in allowed_values: raise ValueError( "Invalid value for `purchase_model` ({0}), must be one of {1}" # noqa: E501 .format(purchase_model, allowed_values) ) self._purchase_model = purchase_model @property def region_group(self): """Gets the region_group of this AzureCspAuthorizationRequestModel. # noqa: E501 The account's region group (i.e. the unique data center group that is being used, e.g. commercial, gov, etc). # noqa: E501 :return: The region_group of this AzureCspAuthorizationRequestModel. # noqa: E501 :rtype: str """ return self._region_group @region_group.setter def region_group(self, region_group): """Sets the region_group of this AzureCspAuthorizationRequestModel. The account's region group (i.e. the unique data center group that is being used, e.g. commercial, gov, etc). # noqa: E501 :param region_group: The region_group of this AzureCspAuthorizationRequestModel. # noqa: E501 :type: str """ allowed_values = ["Commercial", "UsGov", "Germany"] # noqa: E501 if region_group not in allowed_values: raise ValueError( "Invalid value for `region_group` ({0}), must be one of {1}" # noqa: E501 .format(region_group, allowed_values) ) self._region_group = region_group def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value if issubclass(AzureCspAuthorizationRequestModel, dict): for key, value in self.items(): result[key] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, AzureCspAuthorizationRequestModel): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
2e512f27daf59d99f6040b185763a00b8e07ea3a
8a84375dac5e6b33215d20e12e0c197aeaa6e83d
/docs/conf.py
9ff817f5b2af6580eaed06b752ba69a80d83d411
[ "Apache-2.0" ]
permissive
michaeljoseph/pymoji
5579af089cabf1784c656e7fddf9d20f9e6f5d6a
4bf26babc7b968d9a753907d4db5402cfd5c6d63
refs/heads/master
2021-01-01T18:12:37.805141
2013-12-09T10:42:24
2013-12-09T10:42:24
null
0
0
null
null
null
null
UTF-8
Python
false
false
8,403
py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # complexity documentation build configuration file, created by # sphinx-quickstart on Tue Jul 9 22:26:36 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os import sphinx_bootstrap_theme # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # Get the project root dir, which is the parent dir of this cwd = os.getcwd() project_root = os.path.dirname(cwd) # Insert the project root dir as the first element in the PYTHONPATH. # This lets us ensure that the source package is imported, and that its # version is used. sys.path.insert(0, project_root) import pymoji # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'pymoji' copyright = u', ' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = pymoji.__version__ # The full version, including alpha/beta/rc tags. release = pymoji.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'pymojidoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'pymoji.tex', u'pymoji Documentation', u'', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'pymoji', u'pymoji Documentation', [u''], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'pymoji', u'pymoji Documentation', u'', 'pymoji', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False html_theme = 'bootstrap' html_theme_path = sphinx_bootstrap_theme.get_html_theme_path()
d2b41f94eec0c608bee7477753703dafae3dccc8
98ca37f5dd2751efaa060cca19e0b83f871d7765
/sdk/metricsadvisor/azure-ai-metricsadvisor/tests/base_testcase_aad.py
a40496c6753e169137d95e6df644d3887e5da9a1
[ "LicenseRef-scancode-generic-cla", "LGPL-2.1-or-later", "MIT" ]
permissive
jayhebe/azure-sdk-for-python
5ea99732ebb9929d3f6f77c08cc640d5915970b1
f4455f85d9fe747fa4de2fdc691b975c07bfeea5
refs/heads/main
2023-06-24T01:22:06.602194
2021-07-28T02:12:25
2021-07-28T02:12:25
390,290,984
1
0
MIT
2021-07-28T09:23:46
2021-07-28T09:23:46
null
UTF-8
Python
false
false
20,507
py
# coding=utf-8 # ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- import datetime from devtools_testutils import AzureTestCase from azure_devtools.scenario_tests import ( ReplayableTest, create_random_name ) from azure.ai.metricsadvisor import ( MetricsAdvisorKeyCredential, MetricsAdvisorAdministrationClient, MetricsAdvisorClient, ) from azure.ai.metricsadvisor.models import ( SqlServerDataFeedSource, DataFeedSchema, DataFeedMetric, DataFeedDimension, DataFeedGranularity, DataFeedIngestionSettings, DataFeedMissingDataPointFillSettings, DataFeedRollupSettings, MetricAlertConfiguration, MetricAnomalyAlertScope, MetricAnomalyAlertConditions, MetricBoundaryCondition, TopNGroupScope, SeverityCondition, MetricDetectionCondition, MetricSeriesGroupDetectionCondition, MetricSingleSeriesDetectionCondition, SmartDetectionCondition, SuppressCondition, ChangeThresholdCondition, HardThresholdCondition, EmailNotificationHook, WebNotificationHook, ) from azure.identity import DefaultAzureCredential class MockCredential(): def get_token(self, *scopes, **kwargs): from azure.core.credentials import AccessToken return AccessToken("fake-token", 0) class TestMetricsAdvisorAdministrationClientBase(AzureTestCase): FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['Ocp-Apim-Subscription-Key', 'x-api-key'] def __init__(self, method_name): super(TestMetricsAdvisorAdministrationClientBase, self).__init__(method_name) self.vcr.match_on = ["path", "method", "query"] if self.is_live: service_endpoint = self.get_settings_value("METRICS_ADVISOR_ENDPOINT") self.sql_server_connection_string = self.get_settings_value("METRICS_ADVISOR_SQL_SERVER_CONNECTION_STRING") self.azure_table_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_TABLE_CONNECTION_STRING") self.azure_blob_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_BLOB_CONNECTION_STRING") self.azure_cosmosdb_connection_string = self.get_settings_value("METRICS_ADVISOR_COSMOS_DB_CONNECTION_STRING") self.application_insights_api_key = self.get_settings_value("METRICS_ADVISOR_APPLICATION_INSIGHTS_API_KEY") self.azure_data_explorer_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_DATA_EXPLORER_CONNECTION_STRING") self.influxdb_connection_string = self.get_settings_value("METRICS_ADVISOR_INFLUX_DB_CONNECTION_STRING") self.influxdb_password = self.get_settings_value("METRICS_ADVISOR_INFLUX_DB_PASSWORD") self.azure_datalake_account_key = self.get_settings_value("METRICS_ADVISOR_AZURE_DATALAKE_ACCOUNT_KEY") self.mongodb_connection_string = self.get_settings_value("METRICS_ADVISOR_AZURE_MONGO_DB_CONNECTION_STRING") self.mysql_connection_string = self.get_settings_value("METRICS_ADVISOR_MYSQL_CONNECTION_STRING") self.postgresql_connection_string = self.get_settings_value("METRICS_ADVISOR_POSTGRESQL_CONNECTION_STRING") self.anomaly_detection_configuration_id = self.get_settings_value("METRICS_ADVISOR_ANOMALY_DETECTION_CONFIGURATION_ID") self.data_feed_id = self.get_settings_value("METRICS_ADVISOR_DATA_FEED_ID") self.metric_id = self.get_settings_value("METRICS_ADVISOR_METRIC_ID") credential = DefaultAzureCredential() self.scrubber.register_name_pair( self.sql_server_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.azure_table_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.azure_blob_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.azure_cosmosdb_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.application_insights_api_key, "connectionstring" ) self.scrubber.register_name_pair( self.azure_data_explorer_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.influxdb_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.influxdb_password, "connectionstring" ) self.scrubber.register_name_pair( self.azure_datalake_account_key, "connectionstring" ) self.scrubber.register_name_pair( self.mongodb_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.mysql_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.postgresql_connection_string, "connectionstring" ) self.scrubber.register_name_pair( self.metric_id, "metric_id" ) self.scrubber.register_name_pair( self.data_feed_id, "data_feed_id" ) self.scrubber.register_name_pair( self.anomaly_detection_configuration_id, "anomaly_detection_configuration_id" ) else: service_endpoint = "https://endpointname.cognitiveservices.azure.com" self.sql_server_connection_string = "SQL_SERVER_CONNECTION_STRING" self.azure_table_connection_string = "AZURE_TABLE_CONNECTION_STRING" self.azure_blob_connection_string = "AZURE_BLOB_CONNECTION_STRING" self.azure_cosmosdb_connection_string = "COSMOS_DB_CONNECTION_STRING" self.application_insights_api_key = "METRICS_ADVISOR_APPLICATION_INSIGHTS_API_KEY" self.azure_data_explorer_connection_string = "METRICS_ADVISOR_AZURE_DATA_EXPLORER_CONNECTION_STRING" self.influxdb_connection_string = "METRICS_ADVISOR_INFLUXDB_CONNECTION_STRING" self.influxdb_password = "METRICS_ADVISOR_INFLUXDB_PASSWORD" self.azure_datalake_account_key = "METRICS_ADVISOR_AZURE_DATALAKE_ACCOUNT_KEY" self.mongodb_connection_string = "METRICS_ADVISOR_AZURE_MONGODB_CONNECTION_STRING" self.mysql_connection_string = "METRICS_ADVISOR_MYSQL_CONNECTION_STRING" self.postgresql_connection_string = "METRICS_ADVISOR_POSTGRESQL_CONNECTION_STRING" self.anomaly_detection_configuration_id = "anomaly_detection_configuration_id" self.metric_id = "metric_id" self.data_feed_id = "data_feed_id" credential = MockCredential() self.admin_client = MetricsAdvisorAdministrationClient(service_endpoint, credential) def _create_data_feed(self, name): name = create_random_name(name) return self.admin_client.create_data_feed( name=name, source=SqlServerDataFeedSource( connection_string=self.sql_server_connection_string, query="select * from adsample2 where Timestamp = @StartTime" ), granularity="Daily", schema=DataFeedSchema( metrics=[ DataFeedMetric(name="cost"), DataFeedMetric(name="revenue") ], dimensions=[ DataFeedDimension(name="category"), DataFeedDimension(name="city") ], ), ingestion_settings="2019-10-01T00:00:00Z", ) def _create_data_feed_and_detection_config(self, name): try: data_feed = self._create_data_feed(name) detection_config_name = create_random_name(name) detection_config = self.admin_client.create_detection_configuration( name=detection_config_name, metric_id=data_feed.metric_ids['cost'], description="testing", whole_series_detection_condition=MetricDetectionCondition( smart_detection_condition=SmartDetectionCondition( sensitivity=50, anomaly_detector_direction="Both", suppress_condition=SuppressCondition( min_number=5, min_ratio=5 ) ) ) ) return detection_config, data_feed except Exception as e: self.admin_client.delete_data_feed(data_feed.id) raise e def _create_data_feed_for_update(self, name): data_feed_name = create_random_name(name) return self.admin_client.create_data_feed( name=data_feed_name, source=SqlServerDataFeedSource( connection_string=self.sql_server_connection_string, query=u"select * from adsample2 where Timestamp = @StartTime" ), granularity=DataFeedGranularity( granularity_type="Daily", ), schema=DataFeedSchema( metrics=[ DataFeedMetric(name="cost", display_name="display cost", description="the cost"), DataFeedMetric(name="revenue", display_name="display revenue", description="the revenue") ], dimensions=[ DataFeedDimension(name="category", display_name="display category"), DataFeedDimension(name="city", display_name="display city") ], timestamp_column="Timestamp" ), ingestion_settings=DataFeedIngestionSettings( ingestion_begin_time=datetime.datetime(2019, 10, 1), data_source_request_concurrency=0, ingestion_retry_delay=-1, ingestion_start_offset=-1, stop_retry_after=-1, ), admins=["[email protected]"], data_feed_description="my first data feed", missing_data_point_fill_settings=DataFeedMissingDataPointFillSettings( fill_type="SmartFilling" ), rollup_settings=DataFeedRollupSettings( rollup_type="NoRollup", rollup_method="None", ), viewers=["viewers"], access_mode="Private", action_link_template="action link template" ) def _create_alert_config_for_update(self, name): try: detection_config, data_feed = self._create_data_feed_and_detection_config(name) alert_config_name = create_random_name(name) alert_config = self.admin_client.create_alert_configuration( name=alert_config_name, cross_metrics_operator="AND", metric_alert_configurations=[ MetricAlertConfiguration( detection_configuration_id=detection_config.id, alert_scope=MetricAnomalyAlertScope( scope_type="TopN", top_n_group_in_scope=TopNGroupScope( top=5, period=10, min_top_count=9 ) ), alert_conditions=MetricAnomalyAlertConditions( metric_boundary_condition=MetricBoundaryCondition( direction="Both", companion_metric_id=data_feed.metric_ids['cost'], lower=1.0, upper=5.0 ) ) ), MetricAlertConfiguration( detection_configuration_id=detection_config.id, alert_scope=MetricAnomalyAlertScope( scope_type="SeriesGroup", series_group_in_scope={'city': 'Shenzhen'} ), alert_conditions=MetricAnomalyAlertConditions( severity_condition=SeverityCondition( min_alert_severity="Low", max_alert_severity="High" ) ) ), MetricAlertConfiguration( detection_configuration_id=detection_config.id, alert_scope=MetricAnomalyAlertScope( scope_type="WholeSeries" ), alert_conditions=MetricAnomalyAlertConditions( severity_condition=SeverityCondition( min_alert_severity="Low", max_alert_severity="High" ) ) ) ], hook_ids=[] ) return alert_config, data_feed, detection_config except Exception as e: self.admin_client.delete_data_feed(data_feed.id) raise e def _create_detection_config_for_update(self, name): try: data_feed = self._create_data_feed(name) detection_config_name = create_random_name("testupdated") detection_config = self.admin_client.create_detection_configuration( name=detection_config_name, metric_id=data_feed.metric_ids['cost'], description="My test metric anomaly detection configuration", whole_series_detection_condition=MetricDetectionCondition( condition_operator="AND", smart_detection_condition=SmartDetectionCondition( sensitivity=50, anomaly_detector_direction="Both", suppress_condition=SuppressCondition( min_number=5, min_ratio=5 ) ), hard_threshold_condition=HardThresholdCondition( anomaly_detector_direction="Both", suppress_condition=SuppressCondition( min_number=5, min_ratio=5 ), lower_bound=0, upper_bound=100 ), change_threshold_condition=ChangeThresholdCondition( change_percentage=50, shift_point=30, within_range=True, anomaly_detector_direction="Both", suppress_condition=SuppressCondition( min_number=2, min_ratio=2 ) ) ), series_detection_conditions=[MetricSingleSeriesDetectionCondition( series_key={"city": "Shenzhen", "category": "Jewelry"}, smart_detection_condition=SmartDetectionCondition( anomaly_detector_direction="Both", sensitivity=63, suppress_condition=SuppressCondition( min_number=1, min_ratio=100 ) ) )], series_group_detection_conditions=[MetricSeriesGroupDetectionCondition( series_group_key={"city": "Sao Paulo"}, smart_detection_condition=SmartDetectionCondition( anomaly_detector_direction="Both", sensitivity=63, suppress_condition=SuppressCondition( min_number=1, min_ratio=100 ) ) )] ) return detection_config, data_feed except Exception as e: self.admin_client.delete_data_feed(data_feed.id) raise e def _create_email_hook_for_update(self, name): return self.admin_client.create_hook( hook=EmailNotificationHook( name=name, emails_to_alert=["[email protected]"], description="my email hook", external_link="external link" ) ) def _create_web_hook_for_update(self, name): return self.admin_client.create_hook( hook=WebNotificationHook( name=name, endpoint="https://httpbin.org/post", description="my web hook", external_link="external link", username="krista", password="123" ) ) class TestMetricsAdvisorClientBase(AzureTestCase): FILTER_HEADERS = ReplayableTest.FILTER_HEADERS + ['Ocp-Apim-Subscription-Key', 'x-api-key'] def __init__(self, method_name): super(TestMetricsAdvisorClientBase, self).__init__(method_name) self.vcr.match_on = ["path", "method", "query"] if self.is_live: service_endpoint = self.get_settings_value("METRICS_ADVISOR_ENDPOINT") self.anomaly_detection_configuration_id = self.get_settings_value("METRICS_ADVISOR_ANOMALY_DETECTION_CONFIGURATION_ID") self.anomaly_alert_configuration_id = self.get_settings_value("METRICS_ADVISOR_ANOMALY_ALERT_CONFIGURATION_ID") self.metric_id = self.get_settings_value("METRICS_ADVISOR_METRIC_ID") self.incident_id = self.get_settings_value("METRICS_ADVISOR_INCIDENT_ID") self.dimension_name = self.get_settings_value("METRICS_ADVISOR_DIMENSION_NAME") self.feedback_id = self.get_settings_value("METRICS_ADVISOR_FEEDBACK_ID") self.alert_id = self.get_settings_value("METRICS_ADVISOR_ALERT_ID") credential = DefaultAzureCredential() self.scrubber.register_name_pair( self.anomaly_detection_configuration_id, "anomaly_detection_configuration_id" ) self.scrubber.register_name_pair( self.anomaly_alert_configuration_id, "anomaly_alert_configuration_id" ) self.scrubber.register_name_pair( self.metric_id, "metric_id" ) self.scrubber.register_name_pair( self.incident_id, "incident_id" ) self.scrubber.register_name_pair( self.dimension_name, "dimension_name" ) self.scrubber.register_name_pair( self.feedback_id, "feedback_id" ) self.scrubber.register_name_pair( self.alert_id, "alert_id" ) else: service_endpoint = "https://endpointname.cognitiveservices.azure.com" self.anomaly_detection_configuration_id = "anomaly_detection_configuration_id" self.anomaly_alert_configuration_id = "anomaly_alert_configuration_id" self.metric_id = "metric_id" self.incident_id = "incident_id" self.dimension_name = "dimension_name" self.feedback_id = "feedback_id" self.alert_id = "alert_id" credential = MockCredential() self.client = MetricsAdvisorClient(service_endpoint, credential)
933b59e302f98e982ead78cbe8328132cfbe6402
6f04a6ef99c581ed2f0519c897f254a7b63fb61d
/rastervision/utils/zxy2geotiff.py
80210424cc55d2fa376a4eef16bfa35762587c46
[ "LicenseRef-scancode-generic-cla", "Apache-2.0" ]
permissive
dgketchum/raster-vision
18030c9a8bfe99386aa95adbf8e3ec51d204947f
fe74bef30daa5821023946576b00c584ddc56de8
refs/heads/master
2020-08-30T13:56:08.598240
2019-11-03T17:38:33
2019-11-03T17:38:33
218,400,435
3
1
NOASSERTION
2019-10-29T23:09:57
2019-10-29T23:09:57
null
UTF-8
Python
false
false
7,481
py
import tempfile from PIL import Image import numpy as np import click import mercantile import rasterio from rasterio.windows import Window import pyproj from rastervision.utils.files import (download_if_needed, get_local_path, upload_or_copy) from rastervision.command.aux.cogify_command import create_cog def lnglat2merc(lng, lat): """Convert lng, lat point to x/y Web Mercator tuple.""" return pyproj.transform( pyproj.Proj(init='epsg:4326'), pyproj.Proj(init='epsg:3857'), lng, lat) def merc2lnglat(x, y): """Convert x, y Web Mercator point to lng/lat tuple.""" return pyproj.transform( pyproj.Proj(init='epsg:3857'), pyproj.Proj(init='epsg:4326'), x, y) def merc2pixel(tile_x, tile_y, zoom, merc_x, merc_y, tile_sz=256): """Convert Web Mercator point to pixel coordinates. This is within the coordinate frame of a single ZXY tile. Args: tile_x: (int) x coordinate of ZXY tile tile_y: (int) y coordinate of ZXY tile zoom: (int) zoom level of ZXY tile merc_x: (float) Web Mercator x axis of point merc_y: (float) Web Mercator y axis of point tile_sz: (int) size of ZXY tile """ tile_merc_bounds = mercantile.xy_bounds(tile_x, tile_y, zoom) pix_y = int( round(tile_sz * ((tile_merc_bounds.top - merc_y) / (tile_merc_bounds.top - tile_merc_bounds.bottom)))) pix_x = int( round(tile_sz * ((merc_x - tile_merc_bounds.left) / (tile_merc_bounds.right - tile_merc_bounds.left)))) return (pix_x, pix_y) def _zxy2geotiff(tile_schema, zoom, bounds, output_uri, make_cog=False): """Generates a GeoTIFF of a bounded region from a ZXY tile server. Args: tile_schema: (str) the URI schema for zxy tiles (ie. a slippy map tile server) of the form /tileserver-uri/{z}/{x}/{y}.png. If {-y} is used, the tiles are assumed to be indexed using TMS coordinates, where the y axis starts at the southernmost point. The URI can be for http, S3, or the local file system. zoom: (int) the zoom level to use when retrieving tiles bounds: (list) a list of length 4 containing min_lat, min_lng, max_lat, max_lng output_uri: (str) where to save the GeoTIFF. The URI can be for http, S3, or the local file system """ min_lat, min_lng, max_lat, max_lng = bounds if min_lat >= max_lat: raise ValueError('min_lat must be < max_lat') if min_lng >= max_lng: raise ValueError('min_lng must be < max_lng') is_tms = False if '{-y}' in tile_schema: tile_schema = tile_schema.replace('{-y}', '{y}') is_tms = True tmp_dir_obj = tempfile.TemporaryDirectory() tmp_dir = tmp_dir_obj.name # Get range of tiles that cover bounds. output_path = get_local_path(output_uri, tmp_dir) tile_sz = 256 t = mercantile.tile(min_lng, max_lat, zoom) xmin, ymin = t.x, t.y t = mercantile.tile(max_lng, min_lat, zoom) xmax, ymax = t.x, t.y # The supplied bounds are contained within the "tile bounds" -- ie. the # bounds of the set of tiles that covers the supplied bounds. Therefore, # we need to crop out the imagery that lies within the supplied bounds. # We do this by computing a top, bottom, left, and right offset in pixel # units of the supplied bounds against the tile bounds. Getting the offsets # in pixel units involves converting lng/lat to web mercator units since we # assume that is the CRS of the tiles. These offsets are then used to crop # individual tiles and place them correctly into the output raster. nw_merc_x, nw_merc_y = lnglat2merc(min_lng, max_lat) left_pix_offset, top_pix_offset = merc2pixel(xmin, ymin, zoom, nw_merc_x, nw_merc_y) se_merc_x, se_merc_y = lnglat2merc(max_lng, min_lat) se_left_pix_offset, se_top_pix_offset = merc2pixel(xmax, ymax, zoom, se_merc_x, se_merc_y) right_pix_offset = tile_sz - se_left_pix_offset bottom_pix_offset = tile_sz - se_top_pix_offset uncropped_height = tile_sz * (ymax - ymin + 1) uncropped_width = tile_sz * (xmax - xmin + 1) height = uncropped_height - top_pix_offset - bottom_pix_offset width = uncropped_width - left_pix_offset - right_pix_offset transform = rasterio.transform.from_bounds(nw_merc_x, se_merc_y, se_merc_x, nw_merc_y, width, height) with rasterio.open( output_path, 'w', driver='GTiff', height=height, width=width, count=3, crs='epsg:3857', transform=transform, dtype=rasterio.uint8) as dataset: out_x = 0 for xi, x in enumerate(range(xmin, xmax + 1)): tile_xmin, tile_xmax = 0, tile_sz - 1 if x == xmin: tile_xmin += left_pix_offset if x == xmax: tile_xmax -= right_pix_offset window_width = tile_xmax - tile_xmin + 1 out_y = 0 for yi, y in enumerate(range(ymin, ymax + 1)): tile_ymin, tile_ymax = 0, tile_sz - 1 if y == ymin: tile_ymin += top_pix_offset if y == ymax: tile_ymax -= bottom_pix_offset window_height = tile_ymax - tile_ymin + 1 # Convert from xyz to tms if needed. # https://gist.github.com/tmcw/4954720 if is_tms: y = (2**zoom) - y - 1 tile_uri = tile_schema.format(x=x, y=y, z=zoom) tile_path = download_if_needed(tile_uri, tmp_dir) img = np.array(Image.open(tile_path)) img = img[tile_ymin:tile_ymax + 1, tile_xmin:tile_xmax + 1, :] window = Window(out_x, out_y, window_width, window_height) dataset.write( np.transpose(img[:, :, 0:3], (2, 0, 1)), window=window) out_y += window_height out_x += window_width if make_cog: create_cog(output_path, output_uri, tmp_dir) else: upload_or_copy(output_path, output_uri) @click.command() @click.argument('tile_schema') @click.argument('zoom') @click.argument('bounds') @click.argument('output_uri') @click.option('--make-cog', is_flag=True, default=False) def zxy2geotiff(tile_schema, zoom, bounds, output_uri, make_cog): """Generates a GeoTIFF of a bounded region from a ZXY tile server. TILE_SCHEMA: the URI schema for zxy tiles (ie. a slippy map tile server) of the form /tileserver-uri/{z}/{x}/{y}.png. If {-y} is used, the tiles are assumed to be indexed using TMS coordinates, where the y axis starts at the southernmost point. The URI can be for http, S3, or the local file system. ZOOM: the zoom level to use when retrieving tiles BOUNDS: a space-separated string containing min_lat, min_lng, max_lat, max_lng OUTPUT_URI: where to save the GeoTIFF. The URI can be for http, S3, or the local file system. """ bounds = [float(x) for x in bounds.split(' ')] _zxy2geotiff(tile_schema, int(zoom), bounds, output_uri, make_cog=make_cog) if __name__ == '__main__': zxy2geotiff()
0a3ca56c977088ca6e0697402dafb8661ab78d31
d08d1d0fd863e3121e27080ac5892bd39f0b11b8
/vlan-fabric/python/vlan_fabric/tenant.py
83582986a258d51be34d84e463ae50a8caff55a5
[ "MIT" ]
permissive
rrojasc/sandbox-nso
be4211dbedc3d87d8830616db593dac71c051e75
b44dce57904b916a570d0fe272c64cfe1f4c7575
refs/heads/master
2023-03-16T13:04:25.013628
2019-12-16T13:26:12
2019-12-16T13:26:12
null
0
0
null
null
null
null
UTF-8
Python
false
false
20,403
py
# -*- mode: python; python-indent: 4 -*- import ncs from ncs.application import Service # import resource_manager.id_allocator as id_allocator import ipaddress # ------------------------ # SERVICE CALLBACK EXAMPLE # ------------------------ class ServiceCallbacks(Service): # The create() callback is invoked inside NCS FASTMAP and # must always exist. @Service.create def cb_create(self, tctx, root, service, proplist): self.log.info("Service create(service=", service._path, ")") vars = ncs.template.Variables() vars.add("DUMMY", "127.0.0.1") template = ncs.template.Template(service) disable_trunk_negotiation = False # PHASE - Get Fabric Member Devices # primary_ip_address = root.devices.device[pair.primary].config.interface.mgmt["0"].ip.address.ipaddr switch_pairs = root.vlan_fabric[service.fabric].switch_pair switches = root.vlan_fabric[service.fabric].switch # Initialize with NONE border_pair = None self.log.info("Switches for Fabric {} are: ".format(service.fabric)) for pair in switch_pairs: self.log.info( "Pair: {} Primary {} Secondary {}".format( pair.name, pair.primary, pair.secondary ) ) if pair.layer3: border_pair = pair self.log.info( "Layer 3 Switch Pair is {} Primary {} Secondary {}".format( pair.name, pair.primary, pair.secondary ) ) self.log.info( "Switches for Fabric {} are {}".format(service.fabric, switches.__dict__) ) for switch in switches: self.log.info("Switch: {}".format(switch.device)) # PHASE - Get Fabric Interconnect Resources for Fabric fabric_interconnects = root.vlan_fabric[service.fabric].fabric_interconnect self.log.info("Fabric Interconnects for Fabric {} are:".format(service.fabric)) for fabric_interconnect in fabric_interconnects: self.log.info("FI: {}".format(fabric_interconnect.device)) # PHASE - Get VMwar DVS Resources for Fabric vswitches = root.vlan_fabric[service.fabric].vmware_dvs self.log.info( "VMware Distributed vSwitches for Fabric {} are:".format(service.fabric) ) for vswitch in vswitches: self.log.info( "vCenter {} Datacenter {} dVS {}".format( vswitch.vcenter, vswitch.datacenter, vswitch.dvs ) ) # PHASE - Configure Static Routes if configured if border_pair: routing_vars = ncs.template.Variables() routing_vars.add("VRFNAME", service.name) # PHASE - Static routes for route in service.static_routes: # self.log.info("Setting up static route for {} to {} in VRF {}".format(route.network, route.gateway, service.name)) routing_vars.add("STATIC_ROUTE_NETWORK", route.network) routing_vars.add("STATIC_ROUTE_GATEWAY", route.gateway) # PRIMARY self.log.info( "Setting up static route for {} to {} in VRF {} on switch_pair {} Primary Device {}".format( route.network, route.gateway, service.name, border_pair, border_pair.primary, ) ) routing_vars.add("DEVICE_NAME", border_pair.primary) self.log.info("routing_vars={}".format(routing_vars)) template.apply("vrf-static-routes", routing_vars) # Secondary if border_pair.secondary: self.log.info( "Setting up static route for {} to {} in VRF {} on switch_pair {} Primary Device {}".format( route.network, route.gateway, service.name, border_pair, border_pair.secondary, ) ) routing_vars.add("DEVICE_NAME", border_pair.secondary) template.apply("vrf-static-routes", routing_vars) else: self.log.info( "Note: Fabric {} has NO Layer 3 Border Pair.".format(service.fabric) ) # PHASE Process Each Network in Service for network in service.network: # PHASE - Add VLANS to all Fabric Switches self.log.info( "Adding VLAN {} for Network {}".format(network.name, network.vlanid) ) network_vars = ncs.template.Variables() network_vars.add("VLAN_ID", network.vlanid) network_vars.add("VLAN_NAME", network.name) for pair in switch_pairs: self.log.info( "Adding VLAN for Pair: {} Primary {} Secondary {}".format( pair.name, pair.primary, pair.secondary ) ) # PRIMARY network_vars.add("DEVICE_NAME", pair.primary) template.apply("vlan-new", network_vars) if pair.secondary: # Secondary network_vars.add("DEVICE_NAME", pair.secondary) template.apply("vlan-new", network_vars) for switch in switches: self.log.info("Adding VLAN for Switch: {}".format(switch.device)) network_vars.add("DEVICE_NAME", switch.device) template.apply("vlan-new", network_vars) # PHASE - Configure Layer 3 For Network # Check if layer3-on-fabric is configured for network if network.layer3_on_fabric: self.log.info( "Configuring Layer 3 for {} IP Network {} ".format( network.name, network.network ) ) ipnet = ipaddress.ip_network(network.network) hsrp_ipv4 = ipnet.network_address + 1 primary_ipv4 = ipnet.network_address + 2 secondary_ipv4 = ipnet.network_address + 3 network_vars.add("VRFNAME", service.name) network_vars.add("HSRP_GROUP", 1) network_vars.add("HSRP_IPV4", hsrp_ipv4) if network.build_route_neighbors: network_vars.add("BUILD_ROUTING_NEIGHBOR", "True") else: network_vars.add("BUILD_ROUTING_NEIGHBOR", "") # PRIMARY network_vars.add("DEVICE_NAME", border_pair.primary) network_vars.add( "SVI_IPV4", "{}/{}".format(primary_ipv4, ipnet.prefixlen) ) network_vars.add("HSRP_PRIORITY", 110) template.apply("vlan-layer3", network_vars) if network.dhcp_relay_address: network_vars.add("DHCP_RELAY_ADDRESS", network.dhcp_relay_address) self.log.info("Configuring DHCP Relay address {} for {} IP Network {} ".format( network.dhcp_relay_address, network.name, network.network ) ) template.apply("vlan-layer3-dhcp-relay", network_vars) if border_pair.secondary: # Secondary network_vars.add("DEVICE_NAME", border_pair.secondary) network_vars.add( "SVI_IPV4", "{}/{}".format(secondary_ipv4, ipnet.prefixlen) ) network_vars.add("HSRP_PRIORITY", 90) template.apply("vlan-layer3", network_vars) if network.dhcp_relay_address: network_vars.add("DHCP_RELAY_ADDRESS", network.dhcp_relay_address) self.log.info("Configuring DHCP Relay address {} for {} IP Network {} ".format( network.dhcp_relay_address, network.name, network.network ) ) template.apply("vlan-layer3-dhcp-relay", network_vars) else: self.log.info( "Skipping Layer 3 configuration in fabric for {} IP Network {} ".format( network.name, network.network ) ) # PHASE Process Connections for Network # PHASE Switch Connections for switch in network.connections.switch: self.log.info( "Adding Connections for Network {} on Switch {}".format( network.name, switch.device ) ) network_vars.add("DEVICE_NAME", switch.device) switch_platform = {} switch_platform["name"] = root.devices.device[ switch.device ].platform.name switch_platform["version"] = root.devices.device[ switch.device ].platform.version switch_platform["model"] = root.devices.device[ switch.device ].platform.model self.log.info("Switch Platform Info: {}".format(switch_platform)) # For old IOS that supported DTP, need to disable negotiation if ( switch_platform["model"] != "NETSIM" and switch_platform["name"] == "ios" and int(switch_platform["version"][0:2]) < 16 ): disable_trunk_negotiation = True else: disable_trunk_negotiation = False network_vars.add("DISABLE_TRUNK_NEGOTIATION", disable_trunk_negotiation) network_vars.add("MTU_SIZE", "9216") # PHASE Interfaces for interface in switch.interface: self.log.info( "Configuring Intereface {} for Network {} on Switch {}".format( interface.interface, network.name, switch.device ) ) network_vars.add("INTERFACE_ID", interface.interface) network_vars.add("DESCRIPTION", interface.description) network_vars.add("MODE", interface.mode) network_vars.add("MTU_SIZE", "9216") self.log.info("network_vars=", network_vars) template.apply("tenant_network_interface", network_vars) # PHASE Port-Channels for port_channel in switch.port_channel: self.log.info( "Configuring PortChannel {} for Network {} on Switch {}".format( port_channel.portchannel_id, network.name, switch.device ) ) network_vars.add("PORTCHANNEL_ID", port_channel.portchannel_id) network_vars.add("DESCRIPTION", port_channel.description) network_vars.add("MODE", port_channel.mode) network_vars.add("VPC", "") self.log.info("network_vars=", network_vars) template.apply("portchannel-interface", network_vars) # PHASE Port-Channel Member Interfaces for interface in port_channel.interface: self.log.info( "Adding Interface {} to Port-Channel {} on Network {} on Switch {}.".format( interface.interface, port_channel.portchannel_id, network.name, switch.device, ) ) network_vars.add("INTERFACE_ID", interface.interface) self.log.info("network_vars=", network_vars) template.apply("portchannel-member-interface", network_vars) # PHASE Switch Pair connections for pair in network.connections.switch_pair: self.log.info( "Adding Connections for Network {} on Switch Pair {}".format( network.name, pair.name ) ) # Lookup Pair from Fabric # switch_pairs = root.vlan_fabric[service.fabric].switch_pair this_pair = root.vlan_fabric[service.fabric].switch_pair[pair.name] self.log.info( "Primary {} Secondary {}".format( this_pair.primary, this_pair.secondary ) ) # Nexus Leaf Pairs Always False disable_trunk_negotiation = False network_vars.add("DISABLE_TRUNK_NEGOTIATION", disable_trunk_negotiation) # PHASE Interfaces for interface in pair.interface: self.log.info( "Configuring Intereface {} for Network {} on Pair {}".format( interface.interface, network.name, this_pair.name ) ) network_vars.add("INTERFACE_ID", interface.interface) network_vars.add("DESCRIPTION", interface.description) network_vars.add("MODE", interface.mode) network_vars.add("MTU_SIZE", "9216") # Primary network_vars.add("DEVICE_NAME", this_pair.primary) self.log.info("network_vars=", network_vars) template.apply("tenant_network_interface", network_vars) if this_pair.secondary: # Secondary network_vars.add("DEVICE_NAME", this_pair.secondary) self.log.info("network_vars=", network_vars) template.apply("tenant_network_interface", network_vars) # PHASE Port-Channels for port_channel in pair.port_channel: self.log.info( "Configuring Port-Channel {} for Network {} on Pair {}".format( port_channel.portchannel_id, network.name, this_pair.name ) ) network_vars.add("PORTCHANNEL_ID", port_channel.portchannel_id) network_vars.add("DESCRIPTION", port_channel.description) network_vars.add("MODE", port_channel.mode) network_vars.add("MTU_SIZE", "9216") network_vars.add("VPC", True) # Primary network_vars.add("DEVICE_NAME", this_pair.primary) self.log.info("network_vars=", network_vars) template.apply("portchannel-interface", network_vars) # Secondary network_vars.add("DEVICE_NAME", this_pair.secondary) self.log.info("network_vars=", network_vars) template.apply("portchannel-interface", network_vars) # PHASE Port-Channel Member Interfaces for interface in port_channel.interface: self.log.info( "Adding Interface {} to Port-Channel {} on Network {} on Pair {}.".format( interface.interface, port_channel.portchannel_id, network.name, this_pair.name, ) ) network_vars.add("INTERFACE_ID", interface.interface) # Primary network_vars.add("DEVICE_NAME", this_pair.primary) self.log.info("network_vars=", network_vars) template.apply("portchannel-member-interface", network_vars) # Secondary network_vars.add("DEVICE_NAME", this_pair.secondary) self.log.info("network_vars=", network_vars) template.apply("portchannel-member-interface", network_vars) # PHASE Fabric Interconnects for fabric_interconnect in fabric_interconnects: self.log.info( "Configuring Network {} on Fabric Interconnect {}".format( network.name, fabric_interconnect.device ) ) ucs_vars = ncs.template.Variables() ucs_vars.add("DEVICE_NAME", fabric_interconnect.device) ucs_vars.add("VLAN_NAME", network.name) ucs_vars.add("VLAN_ID", network.vlanid) # PHASE - Add VLAN to Configuration self.log.info( "Adding VLAN {} ({}) on Fabric Interconnect {}".format( network.name, network.vlanid, fabric_interconnect.device ) ) self.log.info("ucs_vars=", ucs_vars) template.apply("ucs-vlan-setup", ucs_vars) # PHASE - Update vnic-templates for vnic_template in fabric_interconnect.vnic_template_trunks: ucs_vars.add("UCS_ORG", vnic_template.org) ucs_vars.add("UCS_VNIC_TEMPLATE", vnic_template.vnic_template) self.log.info( "Adding VLAN {} ({}) to vnic-template {}/{} on Fabric Interconnect {}".format( network.name, network.vlanid, vnic_template.org, vnic_template.vnic_template, fabric_interconnect.device, ) ) self.log.info("ucs_vars=", ucs_vars) template.apply("ucs-vnic-template-vlan-setup", ucs_vars) # PHASE - VMwar Distributed Virtual Switch for vswitch in vswitches: self.log.info( "Configuring Network {} on DVS: {}/{}/{}".format( network.name, vswitch.vcenter, vswitch.datacenter, vswitch.dvs ) ) dvs_vars = ncs.template.Variables() dvs_vars.add("DEVICE_NAME", vswitch.vcenter) dvs_vars.add("VLAN_NAME", network.name) dvs_vars.add("VLAN_ID", network.vlanid) dvs_vars.add("VMWARE_DATACENTER", vswitch.datacenter) dvs_vars.add("VMWARE_DVS", vswitch.dvs) self.log.info("dvs_vars=", dvs_vars) template.apply("vmware-dvs-portprofile-setup", dvs_vars) # The pre_modification() and post_modification() callbacks are optional, # and are invoked outside FASTMAP. pre_modification() is invoked before # create, update, or delete of the service, as indicated by the enum # ncs_service_operation op parameter. Conversely # post_modification() is invoked after create, update, or delete # of the service. These functions can be useful e.g. for # allocations that should be stored and existing also when the # service instance is removed. # @Service.pre_lock_create # def cb_pre_lock_create(self, tctx, root, service, proplist): # self.log.info('Service plcreate(service=', service._path, ')') # @Service.pre_modification # def cb_pre_modification(self, tctx, op, kp, root, proplist): # self.log.info('Service premod(service=', kp, ')') # @Service.post_modification # def cb_post_modification(self, tctx, op, kp, root, proplist): # self.log.info('Service premod(service=', kp, ')')
30459cc5e6a093410d325a173ea9cba76452b99a
3b2940c38412e5216527e35093396470060cca2f
/top/api/rest/HotelSoldOrdersIncrementGetRequest.py
08229a2b1227b49f5b2a06b967fb59b0da52b1e9
[]
no_license
akingthink/goods
842eb09daddc2611868b01ebd6e330e5dd7d50be
ffdb5868a8df5c2935fc6142edcdf4c661c84dca
refs/heads/master
2021-01-10T14:22:54.061570
2016-03-04T09:48:24
2016-03-04T09:48:24
45,093,302
0
0
null
null
null
null
UTF-8
Python
false
false
523
py
''' Created by auto_sdk on 2015-01-20 12:44:31 ''' from top.api.base import RestApi class HotelSoldOrdersIncrementGetRequest(RestApi): def __init__(self,domain='gw.api.taobao.com',port=80): RestApi.__init__(self,domain, port) self.end_modified = None self.need_guest = None self.need_message = None self.page_no = None self.page_size = None self.start_modified = None self.status = None self.use_has_next = None def getapiname(self): return 'taobao.hotel.sold.orders.increment.get'
24bce9adfd9986c448487e74e16658ad17c265dd
786de89be635eb21295070a6a3452f3a7fe6712c
/poster/tags/V00-00-01/SConscript
b190c174d8a3bdc4f9abefb1be153557a06627e1
[]
no_license
connectthefuture/psdmrepo
85267cfe8d54564f99e17035efe931077c8f7a37
f32870a987a7493e7bf0f0a5c1712a5a030ef199
refs/heads/master
2021-01-13T03:26:35.494026
2015-09-03T22:22:11
2015-09-03T22:22:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,985
#------------------------------------------------------------------------ # File and Version Information: # $Id$ # # Description: # SConscript file for package poster #------------------------------------------------------------------------ # Do not delete following line, it must be present in # SConscript file for any SIT project Import('*') import os from SConsTools.standardExternalPackage import standardExternalPackage # # For the standard external packages which contain includes, libraries, # and applications it is usually sufficient to call standardExternalPackage() # giving few keyword arguments. Here is a complete list of arguments: # # PREFIX - top directory of the external package # INCDIR - include directory, absolute or relative to PREFIX # INCLUDES - include files to copy (space-separated list of patterns) # PYDIR - Python src directory, absolute or relative to PREFIX # LINKPY - Python files to link (patterns), or all files if not present # PYDIRSEP - if present and evaluates to True installs python code to a # separate directory arch/$SIT_ARCH/python/<package> # LIBDIR - libraries directory, absolute or relative to PREFIX # COPYLIBS - library names to copy # LINKLIBS - library names to link, or all libs if LINKLIBS and COPYLIBS are empty # BINDIR - binaries directory, absolute or relative to PREFIX # LINKBINS - binary names to link, or all binaries if not present # PKGLIBS - names of libraries that have to be linked for this package # DEPS - names of other packages that we depend upon # PKGINFO - package information, such as RPM package name # here is an example setting up a fictional package pkg = "poster" pkg_ver = "0.8.1" PREFIX = os.path.join('$SIT_EXTERNAL_SW', pkg, pkg_ver) PYDIR = os.path.join("lib", '$PYTHON', "site-packages", pkg) PYDIRSEP = True PKGINFO = (pkg, pkg_ver, '$PYTHON', '$SIT_ARCH.found') standardExternalPackage ( pkg, **locals() )
[ "[email protected]@b967ad99-d558-0410-b138-e0f6c56caec7" ]
[email protected]@b967ad99-d558-0410-b138-e0f6c56caec7
9217751689c20a44cbffa776fd1f9c8aabb36593
5a396f14b3689273aaf1a6e20dcb0853d78a9f04
/GetSharedWithDomainTeamDriveACLs.py
0c114d9b0daa023ea4ef045d01a424197485f1cf
[]
no_license
NosIreland/GAM-Scripts3
642b4dd827189352afd8357a41b576d6acf159bc
de3ee3007e6906c5b6d28fef8aea27827646db00
refs/heads/master
2023-03-04T21:58:44.594405
2021-02-18T14:39:20
2021-02-18T14:39:20
null
0
0
null
null
null
null
UTF-8
Python
false
false
5,744
py
#!/usr/bin/env python3 """ # Purpose: For a Google Drive User(s), delete all drive file ACLs for Team Drive files shared with a list of specified domains # Note: This script requires Advanced GAM: # https://github.com/taers232c/GAMADV-XTD3 # Customize: Set DOMAIN_LIST and DESIRED_ALLOWFILEDISCOVERY # Python: Use python or python3 below as appropriate to your system; verify that you have version 3 # $ python -V or python3 -V # Python 3.x.y # Usage: # For all Team Drives, start at step 1; For Team Drives selected by user/group/OU, start at step 7 # All Team Drives # 1: Get all Team Drives. # $ gam redirect csv ./TeamDrives.csv print teamdrives fields id,name # 2: Get ACLs for all Team Drives # $ gam redirect csv ./TeamDriveACLs.csv multiprocess csv TeamDrives.csv gam print drivefileacls ~id fields emailaddress,role,type # 3: Customize GetTeamDriveOrganizers.py for this task: # Set DOMAIN_LIST as required # Set ONE_ORGANIZER = True # Set SHOW_GROUP_ORGANIZERS = False # Set SHOW_USER_ORGANIZERS = True # 4: From that list of ACLs, output a CSV file with headers "id,name,organizers" # that shows the organizers for each Team Drive # $ python3 GetTeamDriveOrganizers.py TeamDriveACLs.csv TeamDrives.csv TeamDriveOrganizers.csv # 5: Get ACLs for all team drive files; you can use permission matching to narrow the number of files listed; add to the end of the command line # DESIRED_ALLOWFILEDISCOVERY = 'Any' - pm type domain em # DESIRED_ALLOWFILEDISCOVERY = 'True' - pm type domain allowfilediscovery true em # DESIRED_ALLOWFILEDISCOVERY = 'False' - pm type domain allowfilediscovery false em # $ gam redirect csv ./filelistperms.csv multiprocess csv TeamDriveOrganizers.csv gam user ~organizers print filelist select teamdriveid ~id fields teamdriveid,id,title,permissions pm type domain em # 6: Go to step 11 # Selected Team Drives # 7: If want Team Drives for a specific set of organizers, replace <UserTypeEntity> with your user selection in the command below # $ gam redirect csv ./AllTeamDrives.csv <UserTypeEntity> print teamdrives role organizer fields id,name # 8: Customize DeleteDuplicateRows.py for this task: # Set ID_FIELD = 'id' # 9: Delete duplicate Team Drives (some may have multiple organizers). # $ python3 DeleteDuplicateRows.py ./AllTeamDrives.csv ./TeamDrives.csv # 10: Get ACLs for all team drive files; you can use permission matching to narrow the number of files listed; add to the end of the command line # DESIRED_ALLOWFILEDISCOVERY = 'Any' - pm type domain em # DESIRED_ALLOWFILEDISCOVERY = 'True' - pm type domain allowfilediscovery true em # DESIRED_ALLOWFILEDISCOVERY = 'False' - pm type domain allowfilediscovery false em # $ gam redirect csv ./filelistperms.csv multiprocess csv TeamDrives.csv gam user ~User print filelist select teamdriveid ~id fields teamdriveid,id,title,permissions pm type domain em # Common code # 11: From that list of ACLs, output a CSV file with headers "Owner,driveFileId,driveFileTitle,permissionId,role,domain,allowFileDiscovery" # that lists the driveFileIds and permissionIds for all ACLs shared with the selected domains. # (n.b., driveFileTitle, role, domain and allowFileDiscovery are not used in the next step, they are included for documentation purposes) # $ python3 GetSharedWithDomainTeamDriveACLs.py filelistperms.csv deleteperms.csv # 12: Inspect deleteperms.csv, verify that it makes sense and then proceed # 13: Delete the ACLs # $ gam csv deleteperms.csv gam user "~Owner" delete drivefileacl "~driveFileId" "~permissionId" """ import csv import re import sys FILE_NAME = 'name' ALT_FILE_NAME = 'title' # If you want to limit finding ACLS for a specific list of domains, use the list below, e.g., DOMAIN_LIST = ['domain.com',] DOMAIN_LIST = ['domain1.com', 'domain2.com',] DOMAIN_LIST = [] # Specify desired value of allowFileDiscovery field: True, False, Any (matches True and False) DESIRED_ALLOWFILEDISCOVERY = 'Any' QUOTE_CHAR = '"' # Adjust as needed LINE_TERMINATOR = '\n' # On Windows, you probably want '\r\n' PERMISSIONS_N_TYPE = re.compile(r"permissions.(\d+).type") if (len(sys.argv) > 2) and (sys.argv[2] != '-'): outputFile = open(sys.argv[2], 'w', encoding='utf-8', newline='') else: outputFile = sys.stdout outputCSV = csv.DictWriter(outputFile, ['Owner', 'driveFileId', 'driveFileTitle', 'permissionId', 'role', 'domain', 'allowFileDiscovery'], lineterminator=LINE_TERMINATOR, quotechar=QUOTE_CHAR) outputCSV.writeheader() if (len(sys.argv) > 1) and (sys.argv[1] != '-'): inputFile = open(sys.argv[1], 'r', encoding='utf-8') else: inputFile = sys.stdin for row in csv.DictReader(inputFile, quotechar=QUOTE_CHAR): for k, v in iter(row.items()): mg = PERMISSIONS_N_TYPE.match(k) if mg and v == 'domain': permissions_N = mg.group(1) domain = row[f'permissions.{permissions_N}.domain'] allowFileDiscovery = row.get(f'permissions.{permissions_N}.allowFileDiscovery', str(row.get(f'permissions.{permissions_N}.withLink') == 'False')) if (not DOMAIN_LIST or domain in DOMAIN_LIST) and (DESIRED_ALLOWFILEDISCOVERY in ('Any', allowFileDiscovery)): outputCSV.writerow({'Owner': row['Owner'], 'driveFileId': row['id'], 'driveFileTitle': row.get(FILE_NAME, row.get(ALT_FILE_NAME, 'Unknown')), 'permissionId': f'id:{row[f"permissions.{permissions_N}.id"]}', 'role': row[f'permissions.{permissions_N}.role'], 'domain': domain, 'allowFileDiscovery': allowFileDiscovery}) if inputFile != sys.stdin: inputFile.close() if outputFile != sys.stdout: outputFile.close()
dc5f47e41dd896ee44f05aa76d5189db027ffe70
d2c4151eff768af64946ababc2e41c13d8973cd3
/ABC146/a.py
c10b5d53d83522345cefe135c52ff627ef03099c
[]
no_license
Intel-out-side/AtCoder
2de19b71981247135432aed2d6d9c2a16c3ab7f0
0c419d2df15fff02032432cb1b1323612484e16e
refs/heads/master
2022-06-23T04:21:12.886072
2022-06-13T14:39:07
2022-06-13T14:39:07
235,240,853
0
0
null
null
null
null
UTF-8
Python
false
false
181
py
week = ["MON", "TUE", "WED", "THU", "FRI", "SAT", "SUN"] day = str(input()) if day == "SUN": result = 7 else: result = week.index("SUN") - week.index(day) print(result)
cf6a0c4833d16887ee9ee3e5afefb8ed33431c13
eacff46eda2c6b509449979a16002b96d4645d8e
/Collections-a-installer/community-general-2.4.0/tests/integration/targets/launchd/files/ansible_test_service.py
87a23fc47d816bb4b2deacd93a3bcfb45fbf1a9f
[ "MIT", "GPL-3.0-only", "GPL-3.0-or-later" ]
permissive
d-amien-b/simple-getwordpress
5e6d4d15d5f87124ab591e46b63fec552998fdc3
da90d515a0aa837b633d50db4d91d22b031c04a2
refs/heads/master
2023-04-08T22:13:37.347545
2021-04-06T09:25:51
2021-04-06T09:25:51
351,698,069
0
0
MIT
2021-03-31T16:16:45
2021-03-26T07:30:00
HTML
UTF-8
Python
false
false
594
py
#!/usr/bin/env python from __future__ import absolute_import, division, print_function __metaclass__ = type import sys if __name__ == '__main__': if sys.version_info[0] >= 3: import http.server import socketserver PORT = int(sys.argv[1]) Handler = http.server.SimpleHTTPRequestHandler httpd = socketserver.TCPServer(("", PORT), Handler) httpd.serve_forever() else: import mimetypes mimetypes.init() mimetypes.add_type('application/json', '.json') import SimpleHTTPServer SimpleHTTPServer.test()
4cbf6e3fcafd24fc240850a479e41ddfe6d770ac
d5b339d5b71c2d103b186ed98167b0c9488cff09
/marvin/cloudstackAPI/deleteCondition.py
e6c1d13261e1a92194f4e5a345cf1351557e1bd8
[ "Apache-2.0" ]
permissive
maduhu/marvin
3e5f9b6f797004bcb8ad1d16c7d9c9e26a5e63cc
211205ae1da4e3f18f9a1763f0f8f4a16093ddb0
refs/heads/master
2020-12-02T17:45:35.685447
2017-04-03T11:32:11
2017-04-03T11:32:11
null
0
0
null
null
null
null
UTF-8
Python
false
false
705
py
"""Removes a condition""" from baseCmd import * from baseResponse import * class deleteConditionCmd (baseCmd): typeInfo = {} def __init__(self): self.isAsync = "true" """the ID of the condition.""" """Required""" self.id = None self.typeInfo['id'] = 'uuid' self.required = ["id", ] class deleteConditionResponse (baseResponse): typeInfo = {} def __init__(self): """any text associated with the success or failure""" self.displaytext = None self.typeInfo['displaytext'] = 'string' """true if operation is executed successfully""" self.success = None self.typeInfo['success'] = 'boolean'
9fe7a328b27380a9afc1f19106fa9edd8aa1033c
21208873652ce9a35035801cea488004e337b07b
/data_loader/__init__.py
784c4dc41287ec0e8680637c3b93983f20eae44f
[ "Apache-2.0" ]
permissive
zlszhonglongshen/crnn.pytorch
55321a6764a6143be7ab9d2c6b3bcafcdd9470e7
bf7a7c62376eee93943ca7c68e88e3d563c09aa8
refs/heads/master
2022-11-07T22:57:28.983335
2020-06-19T03:01:35
2020-06-19T03:01:35
null
0
0
null
null
null
null
UTF-8
Python
false
false
2,398
py
# -*- coding: utf-8 -*- # @Time : 18-11-16 下午5:46 # @Author : zhoujun import copy from torch.utils.data import DataLoader from torchvision import transforms def get_dataset(data_path, module_name, transform, dataset_args): """ 获取训练dataset :param data_path: dataset文件列表,每个文件内以如下格式存储 ‘path/to/img\tlabel’ :param module_name: 所使用的自定义dataset名称,目前只支持data_loaders.ImageDataset :param transform: 该数据集使用的transforms :param dataset_args: module_name的参数 :return: 如果data_path列表不为空,返回对应的Dataset对象,否则None """ from . import dataset s_dataset = getattr(dataset, module_name)(transform=transform, data_path=data_path, **dataset_args) return s_dataset def get_transforms(transforms_config): tr_list = [] for item in transforms_config: if 'args' not in item: args = {} else: args = item['args'] cls = getattr(transforms, item['type'])(**args) tr_list.append(cls) tr_list = transforms.Compose(tr_list) return tr_list def get_dataloader(module_config, num_label): if module_config is None: return None config = copy.deepcopy(module_config) dataset_args = config['dataset']['args'] dataset_args['num_label'] = num_label if 'transforms' in dataset_args: img_transfroms = get_transforms(dataset_args.pop('transforms')) else: img_transfroms = None # 创建数据集 dataset_name = config['dataset']['type'] data_path_list = dataset_args.pop('data_path') if 'data_ratio' in dataset_args: data_ratio = dataset_args.pop('data_ratio') else: data_ratio = [1.0] _dataset_list = [] for data_path in data_path_list: _dataset_list.append(get_dataset(data_path=data_path, module_name=dataset_name, dataset_args=dataset_args, transform=img_transfroms)) if len(data_ratio) > 1 and len(dataset_args['data_ratio']) == len(_dataset_list): from . import dataset loader = dataset.Batch_Balanced_Dataset(dataset_list=_dataset_list, ratio_list=data_ratio, loader_args=config['loader']) else: _dataset = _dataset_list[0] loader = DataLoader(dataset=_dataset, **config['loader']) loader.dataset_len = len(_dataset) return loader
b8954b6cea35abb939ed06c8276b23e8b81f83d3
b2e340f22a7f613dc33ea361ba87a393d65b723c
/LogicAnalyzer/config/config.py
f19d2b4e3d649ece283274df9b734d2dc8094f99
[ "MIT" ]
permissive
CospanDesign/logic-analyzer
6369cfc423f3fae050f9ab784a6ae94003422654
284ea339c001b4845a46fcb0672511487271c9c3
refs/heads/master
2021-01-20T18:58:53.477152
2016-06-24T02:22:04
2016-06-24T02:22:04
61,488,220
1
1
null
null
null
null
UTF-8
Python
false
false
2,335
py
import logging import json TRIGGER = "trigger" TRIGGER_MASK = "trigger_mask" TRIGGER_EDGE = "trigger_edge" TRIGGER_BOTH_EDGE = "both_edges" TRIGGER_REPEAT = "repeat" TRIGGER_AFTER = "trigger_after" CAPABILITY_NAMES = [ TRIGGER, TRIGGER_MASK, TRIGGER_EDGE, TRIGGER_BOTH_EDGE, TRIGGER_REPEAT, TRIGGER_AFTER ] CALLBACK_START = "start" CALLBACK_STOP = "stop" CALLBACK_FORCE = "force" CALLBACK_UPDATE = "update" CALLBACK_GET_SIZE = "get_size" CALLBACK_CLOSE = "close" CALLBACK_NAMES = [ CALLBACK_START, CALLBACK_STOP, CALLBACK_FORCE, CALLBACK_UPDATE, CALLBACK_GET_SIZE, CALLBACK_CLOSE ] class Config(object): @staticmethod def get_name(): return "Invalid Config, make your own!!" def __init__(self): self.log = logging.getLogger("LAX") self.caps = {} self.callbacks = {} self.channels = [] for name in CAPABILITY_NAMES: self.caps[name] = None for name in CALLBACK_NAMES: self.callbacks[name] = None def get_channel_dict(self): """ Return a dictionary that maps names to channel(s) """ return self.channels def get_capabilities(self): """ Return a list of capabilities (strings) that this device supports """ names = [] for name in self.caps: if self.caps[name] is not None: names.append(name) return names def has_capability(self, name): """ Return true if the device has the capabilities """ return self.caps[name] is not None def get_value(self, name): "Get the value of a capability" if not self.has_capability(name): raise AssertionError("LAX Does not have capability") else: return self.caps[name] def set_callback(self, name, func): self.log.debug("Setting callback for: %s" % name) self.callbacks[name] = func def ready(self): """The controller tells the config interface it's ready""" raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name) def captured(self): """callback when capture occurs""" raise AssertionError("%s not implemented" % sys._getframe().f_code.co_name)
1935bfa537c5f257092b4e5689d56e2394be68bb
a09c10c29478fed167c94d83d5dff9371f9a1680
/Client.py
ec5149235aa6288eed9ea16cd6590f770fc45567
[]
no_license
batra98/Distributed-Web-Cache
83e208689b18b95724dd0ba657b4ef89e9054d2a
7e08dfe4dd6739c779c59da3ab7301f3cb33af6a
refs/heads/master
2022-11-28T05:21:33.220922
2020-08-07T10:15:32
2020-08-07T10:15:32
285,793,260
2
0
null
2020-08-07T09:41:56
2020-08-07T09:41:56
null
UTF-8
Python
false
false
1,157
py
import socket import sys def send(ip, port, message): sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect((ip, port)) try: sock.sendall(bytes(message, 'ascii')) response = str(sock.recv(1024), 'ascii') print("Received: {}".format(response)) return response.split(None, 1) finally: sock.close() def get(ip, port, key): return send(ip, port, "get {0}".format(key)) def add(ip, port, key, data): return send(ip, port, "add {0} {1}".format(key, data)) def add_node(ip, port, key): return send(ip, port, "addnode {0}".format(key)) def rm_node(ip, port, key): return send(ip, port, "rmnode {0}".format(key)) def stats(ip, port): return send(ip, port, "stats") def performance(ip,port): return send(ip,port, "performance") def test_load_balancing(ip,port,num_node,num_data): return send(ip,port, "test {0} {1}".format(num_node,num_data)) def clean(ip,port): return send(ip,port,"clean") if __name__ == "__main__": ip, port = sys.argv[1], int(sys.argv[2]) while True: command = input("> ") send(ip, port, command)
02745dd02ec7954ea531da8ddfb292e43a976771
8a102033a266d39128e4b64aa0780cf67055e196
/1330.py
3fe9a718323d5727aeb4c2c1501dafb25b860ada
[]
no_license
yuseungwoo/baekjoon
4dec0798b8689b9378121b9d178713c9cf14a53f
099031e2c4401e27edcdc05bd6c9e6a558b09bb9
refs/heads/master
2020-09-03T15:25:40.764723
2018-10-08T02:35:27
2018-10-08T02:35:27
null
0
0
null
null
null
null
UTF-8
Python
false
false
129
py
# coding: utf-8 a, b = map(int, input().split()) if a > b: print('>') if a < b: print('<') if a == b: print('==')
2fea730fbc2ed8ead8cdf20b0fe1527890efd6c7
eee6dd18897d3118f41cb5e6f93f830e06fbfe2f
/venv/lib/python3.6/site-packages/scipy/sparse/bsr.py
1627132c92f71edcadf2178965702a5a2e4adba9
[]
no_license
georgeosodo/ml
2148ecd192ce3d9750951715c9f2bfe041df056a
48fba92263e9295e9e14697ec00dca35c94d0af0
refs/heads/master
2020-03-14T11:39:58.475364
2018-04-30T13:13:01
2018-04-30T13:13:01
131,595,044
0
0
null
null
null
null
UTF-8
Python
false
false
23,024
py
"""Compressed Block Sparse Row matrix format""" __docformat__ = "restructuredtext en" __all__ = ['bsr_matrix', 'isspmatrix_bsr'] from warnings import warn import numpy as np from .data import _data_matrix, _minmax_mixin from .compressed import _cs_matrix from .base import isspmatrix, _formats, spmatrix from .sputils import isshape, getdtype, to_native, upcast, get_index_dtype from . import _sparsetools from ._sparsetools import (bsr_matvec, bsr_matvecs, csr_matmat_pass1, bsr_matmat_pass2, bsr_transpose, bsr_sort_indices) class bsr_matrix(_cs_matrix, _minmax_mixin): """Block Sparse Row matrix This can be instantiated in several ways: bsr_matrix(D, [blocksize=(R,C)]) where D is a dense matrix or 2-D ndarray. bsr_matrix(S, [blocksize=(R,C)]) with another sparse matrix S (equivalent to S.tobsr()) bsr_matrix((M, N), [blocksize=(R,C), dtype]) to construct an empty matrix with shape (M, N) dtype is optional, defaulting to dtype='d'. bsr_matrix((data, ij), [blocksize=(R,C), shape=(M, N)]) where ``data`` and ``ij`` satisfy ``a[ij[0, k], ij[1, k]] = data[k]`` bsr_matrix((data, indices, indptr), [shape=(M, N)]) is the standard BSR representation where the block column indices for row i are stored in ``indices[indptr[i]:indptr[i+1]]`` and their corresponding block values are stored in ``data[ indptr[i]: indptr[i+1] ]``. If the shape parameter is not supplied, the matrix dimensions are inferred from the index arrays. Attributes ---------- dtype : dtype Data type of the matrix shape : 2-tuple Shape of the matrix ndim : int Number of dimensions (this is always 2) nnz Number of nonzero elements data Data array of the matrix indices BSR format index array indptr BSR format index pointer array blocksize Block size of the matrix has_sorted_indices Whether indices are sorted Notes ----- Sparse matrices can be used in arithmetic operations: they support addition, subtraction, multiplication, division, and matrix power. **Summary of BSR format** The Block Compressed Row (BSR) format is very similar to the Compressed Sparse Row (CSR) format. BSR is appropriate for sparse matrices with dense sub matrices like the last example below. Block matrices often arise in vector-valued finite element discretizations. In such cases, BSR is considerably more efficient than CSR and CSC for many sparse arithmetic operations. **Blocksize** The blocksize (R,C) must evenly divide the shape of the matrix (M,N). That is, R and C must satisfy the relationship ``M % R = 0`` and ``N % C = 0``. If no blocksize is specified, a simple heuristic is applied to determine an appropriate blocksize. Examples -------- >>> from scipy.sparse import bsr_matrix >>> bsr_matrix((3, 4), dtype=np.int8).toarray() array([[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]], dtype=int8) >>> row = np.array([0, 0, 1, 2, 2, 2]) >>> col = np.array([0, 2, 2, 0, 1, 2]) >>> data = np.array([1, 2, 3 ,4, 5, 6]) >>> bsr_matrix((data, (row, col)), shape=(3, 3)).toarray() array([[1, 0, 2], [0, 0, 3], [4, 5, 6]]) >>> indptr = np.array([0, 2, 3, 6]) >>> indices = np.array([0, 2, 2, 0, 1, 2]) >>> data = np.array([1, 2, 3, 4, 5, 6]).repeat(4).reshape(6, 2, 2) >>> bsr_matrix((data,indices,indptr), shape=(6, 6)).toarray() array([[1, 1, 0, 0, 2, 2], [1, 1, 0, 0, 2, 2], [0, 0, 0, 0, 3, 3], [0, 0, 0, 0, 3, 3], [4, 4, 5, 5, 6, 6], [4, 4, 5, 5, 6, 6]]) """ format = 'bsr' def __init__(self, arg1, shape=None, dtype=None, copy=False, blocksize=None): _data_matrix.__init__(self) if isspmatrix(arg1): if isspmatrix_bsr(arg1) and copy: arg1 = arg1.copy() else: arg1 = arg1.tobsr(blocksize=blocksize) self._set_self(arg1) elif isinstance(arg1,tuple): if isshape(arg1): # it's a tuple of matrix dimensions (M,N) self.shape = arg1 M,N = self.shape # process blocksize if blocksize is None: blocksize = (1,1) else: if not isshape(blocksize): raise ValueError('invalid blocksize=%s' % blocksize) blocksize = tuple(blocksize) self.data = np.zeros((0,) + blocksize, getdtype(dtype, default=float)) R,C = blocksize if (M % R) != 0 or (N % C) != 0: raise ValueError('shape must be multiple of blocksize') # Select index dtype large enough to pass array and # scalar parameters to sparsetools idx_dtype = get_index_dtype(maxval=max(M//R, N//C, R, C)) self.indices = np.zeros(0, dtype=idx_dtype) self.indptr = np.zeros(M//R + 1, dtype=idx_dtype) elif len(arg1) == 2: # (data,(row,col)) format from .coo import coo_matrix self._set_self(coo_matrix(arg1, dtype=dtype).tobsr(blocksize=blocksize)) elif len(arg1) == 3: # (data,indices,indptr) format (data, indices, indptr) = arg1 # Select index dtype large enough to pass array and # scalar parameters to sparsetools maxval = 1 if shape is not None: maxval = max(shape) if blocksize is not None: maxval = max(maxval, max(blocksize)) idx_dtype = get_index_dtype((indices, indptr), maxval=maxval, check_contents=True) self.indices = np.array(indices, copy=copy, dtype=idx_dtype) self.indptr = np.array(indptr, copy=copy, dtype=idx_dtype) self.data = np.array(data, copy=copy, dtype=getdtype(dtype, data)) else: raise ValueError('unrecognized bsr_matrix constructor usage') else: # must be dense try: arg1 = np.asarray(arg1) except: raise ValueError("unrecognized form for" " %s_matrix constructor" % self.format) from .coo import coo_matrix arg1 = coo_matrix(arg1, dtype=dtype).tobsr(blocksize=blocksize) self._set_self(arg1) if shape is not None: self.shape = shape # spmatrix will check for errors else: if self.shape is None: # shape not already set, try to infer dimensions try: M = len(self.indptr) - 1 N = self.indices.max() + 1 except: raise ValueError('unable to infer matrix dimensions') else: R,C = self.blocksize self.shape = (M*R,N*C) if self.shape is None: if shape is None: # TODO infer shape here raise ValueError('need to infer shape') else: self.shape = shape if dtype is not None: self.data = self.data.astype(dtype) self.check_format(full_check=False) def check_format(self, full_check=True): """check whether the matrix format is valid *Parameters*: full_check: True - rigorous check, O(N) operations : default False - basic check, O(1) operations """ M,N = self.shape R,C = self.blocksize # index arrays should have integer data types if self.indptr.dtype.kind != 'i': warn("indptr array has non-integer dtype (%s)" % self.indptr.dtype.name) if self.indices.dtype.kind != 'i': warn("indices array has non-integer dtype (%s)" % self.indices.dtype.name) idx_dtype = get_index_dtype((self.indices, self.indptr)) self.indptr = np.asarray(self.indptr, dtype=idx_dtype) self.indices = np.asarray(self.indices, dtype=idx_dtype) self.data = to_native(self.data) # check array shapes if self.indices.ndim != 1 or self.indptr.ndim != 1: raise ValueError("indices, and indptr should be 1-D") if self.data.ndim != 3: raise ValueError("data should be 3-D") # check index pointer if (len(self.indptr) != M//R + 1): raise ValueError("index pointer size (%d) should be (%d)" % (len(self.indptr), M//R + 1)) if (self.indptr[0] != 0): raise ValueError("index pointer should start with 0") # check index and data arrays if (len(self.indices) != len(self.data)): raise ValueError("indices and data should have the same size") if (self.indptr[-1] > len(self.indices)): raise ValueError("Last value of index pointer should be less than " "the size of index and data arrays") self.prune() if full_check: # check format validity (more expensive) if self.nnz > 0: if self.indices.max() >= N//C: raise ValueError("column index values must be < %d (now max %d)" % (N//C, self.indices.max())) if self.indices.min() < 0: raise ValueError("column index values must be >= 0") if np.diff(self.indptr).min() < 0: raise ValueError("index pointer values must form a " "non-decreasing sequence") # if not self.has_sorted_indices(): # warn('Indices were not in sorted order. Sorting indices.') # self.sort_indices(check_first=False) def _get_blocksize(self): return self.data.shape[1:] blocksize = property(fget=_get_blocksize) def getnnz(self, axis=None): if axis is not None: raise NotImplementedError("getnnz over an axis is not implemented " "for BSR format") R,C = self.blocksize return int(self.indptr[-1] * R * C) getnnz.__doc__ = spmatrix.getnnz.__doc__ def __repr__(self): format = _formats[self.getformat()][1] return ("<%dx%d sparse matrix of type '%s'\n" "\twith %d stored elements (blocksize = %dx%d) in %s format>" % (self.shape + (self.dtype.type, self.nnz) + self.blocksize + (format,))) def diagonal(self): """Returns the main diagonal of the matrix """ M,N = self.shape R,C = self.blocksize y = np.empty(min(M,N), dtype=upcast(self.dtype)) _sparsetools.bsr_diagonal(M//R, N//C, R, C, self.indptr, self.indices, np.ravel(self.data), y) return y ########################## # NotImplemented methods # ########################## def getdata(self,ind): raise NotImplementedError def __getitem__(self,key): raise NotImplementedError def __setitem__(self,key,val): raise NotImplementedError ###################### # Arithmetic methods # ###################### def matvec(self, other): return self * other def matmat(self, other): return self * other def _mul_vector(self, other): M,N = self.shape R,C = self.blocksize result = np.zeros(self.shape[0], dtype=upcast(self.dtype, other.dtype)) bsr_matvec(M//R, N//C, R, C, self.indptr, self.indices, self.data.ravel(), other, result) return result def _mul_multivector(self,other): R,C = self.blocksize M,N = self.shape n_vecs = other.shape[1] # number of column vectors result = np.zeros((M,n_vecs), dtype=upcast(self.dtype,other.dtype)) bsr_matvecs(M//R, N//C, n_vecs, R, C, self.indptr, self.indices, self.data.ravel(), other.ravel(), result.ravel()) return result def _mul_sparse_matrix(self, other): M, K1 = self.shape K2, N = other.shape R,n = self.blocksize # convert to this format if isspmatrix_bsr(other): C = other.blocksize[1] else: C = 1 from .csr import isspmatrix_csr if isspmatrix_csr(other) and n == 1: other = other.tobsr(blocksize=(n,C), copy=False) # lightweight conversion else: other = other.tobsr(blocksize=(n,C)) idx_dtype = get_index_dtype((self.indptr, self.indices, other.indptr, other.indices), maxval=(M//R)*(N//C)) indptr = np.empty(self.indptr.shape, dtype=idx_dtype) csr_matmat_pass1(M//R, N//C, self.indptr.astype(idx_dtype), self.indices.astype(idx_dtype), other.indptr.astype(idx_dtype), other.indices.astype(idx_dtype), indptr) bnnz = indptr[-1] idx_dtype = get_index_dtype((self.indptr, self.indices, other.indptr, other.indices), maxval=bnnz) indptr = indptr.astype(idx_dtype) indices = np.empty(bnnz, dtype=idx_dtype) data = np.empty(R*C*bnnz, dtype=upcast(self.dtype,other.dtype)) bsr_matmat_pass2(M//R, N//C, R, C, n, self.indptr.astype(idx_dtype), self.indices.astype(idx_dtype), np.ravel(self.data), other.indptr.astype(idx_dtype), other.indices.astype(idx_dtype), np.ravel(other.data), indptr, indices, data) data = data.reshape(-1,R,C) # TODO eliminate zeros return bsr_matrix((data,indices,indptr),shape=(M,N),blocksize=(R,C)) ###################### # Conversion methods # ###################### def tobsr(self, blocksize=None, copy=False): """Convert this matrix into Block Sparse Row Format. With copy=False, the data/indices may be shared between this matrix and the resultant bsr_matrix. If blocksize=(R, C) is provided, it will be used for determining block size of the bsr_matrix. """ if blocksize not in [None, self.blocksize]: return self.tocsr().tobsr(blocksize=blocksize) if copy: return self.copy() else: return self def tocsr(self, copy=False): return self.tocoo(copy=False).tocsr(copy=copy) # TODO make this more efficient tocsr.__doc__ = spmatrix.tocsr.__doc__ def tocsc(self, copy=False): return self.tocoo(copy=False).tocsc(copy=copy) tocsc.__doc__ = spmatrix.tocsc.__doc__ def tocoo(self, copy=True): """Convert this matrix to COOrdinate format. When copy=False the data array will be shared between this matrix and the resultant coo_matrix. """ M,N = self.shape R,C = self.blocksize indptr_diff = np.diff(self.indptr) if indptr_diff.dtype.itemsize > np.dtype(np.intp).itemsize: # Check for potential overflow indptr_diff_limited = indptr_diff.astype(np.intp) if np.any(indptr_diff_limited != indptr_diff): raise ValueError("Matrix too big to convert") indptr_diff = indptr_diff_limited row = (R * np.arange(M//R)).repeat(indptr_diff) row = row.repeat(R*C).reshape(-1,R,C) row += np.tile(np.arange(R).reshape(-1,1), (1,C)) row = row.reshape(-1) col = (C * self.indices).repeat(R*C).reshape(-1,R,C) col += np.tile(np.arange(C), (R,1)) col = col.reshape(-1) data = self.data.reshape(-1) if copy: data = data.copy() from .coo import coo_matrix return coo_matrix((data,(row,col)), shape=self.shape) def transpose(self, axes=None, copy=False): if axes is not None: raise ValueError(("Sparse matrices do not support " "an 'axes' parameter because swapping " "dimensions is the only logical permutation.")) R, C = self.blocksize M, N = self.shape NBLK = self.nnz//(R*C) if self.nnz == 0: return bsr_matrix((N, M), blocksize=(C, R), dtype=self.dtype, copy=copy) indptr = np.empty(N//C + 1, dtype=self.indptr.dtype) indices = np.empty(NBLK, dtype=self.indices.dtype) data = np.empty((NBLK, C, R), dtype=self.data.dtype) bsr_transpose(M//R, N//C, R, C, self.indptr, self.indices, self.data.ravel(), indptr, indices, data.ravel()) return bsr_matrix((data, indices, indptr), shape=(N, M), copy=copy) transpose.__doc__ = spmatrix.transpose.__doc__ ############################################################## # methods that examine or modify the internal data structure # ############################################################## def eliminate_zeros(self): R,C = self.blocksize M,N = self.shape mask = (self.data != 0).reshape(-1,R*C).sum(axis=1) # nonzero blocks nonzero_blocks = mask.nonzero()[0] if len(nonzero_blocks) == 0: return # nothing to do self.data[:len(nonzero_blocks)] = self.data[nonzero_blocks] # modifies self.indptr and self.indices *in place* _sparsetools.csr_eliminate_zeros(M//R, N//C, self.indptr, self.indices, mask) self.prune() def sum_duplicates(self): """Eliminate duplicate matrix entries by adding them together The is an *in place* operation """ if self.has_canonical_format: return self.sort_indices() R, C = self.blocksize M, N = self.shape # port of _sparsetools.csr_sum_duplicates n_row = M // R nnz = 0 row_end = 0 for i in range(n_row): jj = row_end row_end = self.indptr[i+1] while jj < row_end: j = self.indices[jj] x = self.data[jj] jj += 1 while jj < row_end and self.indices[jj] == j: x += self.data[jj] jj += 1 self.indices[nnz] = j self.data[nnz] = x nnz += 1 self.indptr[i+1] = nnz self.prune() # nnz may have changed self.has_canonical_format = True def sort_indices(self): """Sort the indices of this matrix *in place* """ if self.has_sorted_indices: return R,C = self.blocksize M,N = self.shape bsr_sort_indices(M//R, N//C, R, C, self.indptr, self.indices, self.data.ravel()) self.has_sorted_indices = True def prune(self): """ Remove empty space after all non-zero elements. """ R,C = self.blocksize M,N = self.shape if len(self.indptr) != M//R + 1: raise ValueError("index pointer has invalid length") bnnz = self.indptr[-1] if len(self.indices) < bnnz: raise ValueError("indices array has too few elements") if len(self.data) < bnnz: raise ValueError("data array has too few elements") self.data = self.data[:bnnz] self.indices = self.indices[:bnnz] # utility functions def _binopt(self, other, op, in_shape=None, out_shape=None): """Apply the binary operation fn to two sparse matrices.""" # Ideally we'd take the GCDs of the blocksize dimensions # and explode self and other to match. other = self.__class__(other, blocksize=self.blocksize) # e.g. bsr_plus_bsr, etc. fn = getattr(_sparsetools, self.format + op + self.format) R,C = self.blocksize max_bnnz = len(self.data) + len(other.data) idx_dtype = get_index_dtype((self.indptr, self.indices, other.indptr, other.indices), maxval=max_bnnz) indptr = np.empty(self.indptr.shape, dtype=idx_dtype) indices = np.empty(max_bnnz, dtype=idx_dtype) bool_ops = ['_ne_', '_lt_', '_gt_', '_le_', '_ge_'] if op in bool_ops: data = np.empty(R*C*max_bnnz, dtype=np.bool_) else: data = np.empty(R*C*max_bnnz, dtype=upcast(self.dtype,other.dtype)) fn(self.shape[0]//R, self.shape[1]//C, R, C, self.indptr.astype(idx_dtype), self.indices.astype(idx_dtype), self.data, other.indptr.astype(idx_dtype), other.indices.astype(idx_dtype), np.ravel(other.data), indptr, indices, data) actual_bnnz = indptr[-1] indices = indices[:actual_bnnz] data = data[:R*C*actual_bnnz] if actual_bnnz < max_bnnz/2: indices = indices.copy() data = data.copy() data = data.reshape(-1,R,C) return self.__class__((data, indices, indptr), shape=self.shape) # needed by _data_matrix def _with_data(self,data,copy=True): """Returns a matrix with the same sparsity structure as self, but with different data. By default the structure arrays (i.e. .indptr and .indices) are copied. """ if copy: return self.__class__((data,self.indices.copy(),self.indptr.copy()), shape=self.shape,dtype=data.dtype) else: return self.__class__((data,self.indices,self.indptr), shape=self.shape,dtype=data.dtype) # # these functions are used by the parent class # # to remove redudancy between bsc_matrix and bsr_matrix # def _swap(self,x): # """swap the members of x if this is a column-oriented matrix # """ # return (x[0],x[1]) def isspmatrix_bsr(x): return isinstance(x, bsr_matrix)
bf91de8bc79c94d76bf93ea0cc534b567dc2e161
4d9bd7874fc5a4f2ec56bb172f4e93a9601c4c83
/main.py
4864dd4bbecc043b09c96f4fb427a06e03a0c031
[]
no_license
liziniu/Model-Uncertainty-in-Neural-Networks
ff65009b3c165c4fd82efb9759cb26d41f914a2e
67c6042c52dd7e7a918ab42d34764bbb9a88c8a2
refs/heads/master
2020-05-04T00:26:47.315086
2019-04-06T03:19:47
2019-04-06T03:19:47
178,884,785
0
0
null
null
null
null
UTF-8
Python
false
false
1,506
py
from model1.default import get_config from model1.model import Model from utli import load_data, get_session, update_para import argparse def arg_parse(): parser = argparse.ArgumentParser() parser.add_argument("--model", type=int, default=1) parser.add_argument("--epochs", type=int, default=100) parser.add_argument("--batch_size", type=int, default=128) parser.add_argument("--lr", type=float, default=1e-3) parser.add_argument("--num_units", type=int, default=100) parser.add_argument("--pi", type=float, default=0.25) parser.add_argument("--mu1", type=float, default=0.0) parser.add_argument("--std1", type=float, default=0.5) parser.add_argument("--mu2", type=float, default=0.0) parser.add_argument("--std2", type=float, default=1.5) parser.add_argument("--train", action="store_true", default=False) parser.add_argument("--load_path", type=str, default="logs/model1/") return parser.parse_args() def main(args): sess = get_session() default_para = get_config() para = update_para(default_para, args) model = Model(sess, para) x_train, x_test, y_train, y_test = load_data() x_train_ = x_train[:-5000] y_train_ = y_train[:-5000] x_valid = x_train[-5000:] y_valid = y_train[-5000:] if args.train: model.train(x_train_, y_train_, x_valid, y_valid) else: model.load(args.load_path) model.test(x_test, y_test) if __name__ == "__main__": args = arg_parse() main(args)
014a6b6fc7c93c425ce7da5ad70dfce4b7273ee8
52b5773617a1b972a905de4d692540d26ff74926
/.history/largestTime_20200903122053.py
697fb00998e96926352a6433e5a6da6d088d57dd
[]
no_license
MaryanneNjeri/pythonModules
56f54bf098ae58ea069bf33f11ae94fa8eedcabc
f4e56b1e4dda2349267af634a46f6b9df6686020
refs/heads/master
2022-12-16T02:59:19.896129
2020-09-11T12:05:22
2020-09-11T12:05:22
null
0
0
null
null
null
null
UTF-8
Python
false
false
552
py
from itertools import permutations def Time(A): # getting the different permutations # get the one that falls between 0000 and 2359 # then place the semi colon in the proper place # otherwise return an empty string A = [str(i) for i in A] perm = permutations(A) newArray = [] for i in list(perm): string = "".join(i) newArray.append(string) newArray = [int(i) for i in newArray] for i in newArray: if i > 0000 and i =<2359: Time([1,2,3,4])
47213a723487f5382748a8d76a7546ee674ea1f5
a26d91163fe40924c7c4f9d94fcd973989b68983
/watchlist_app/migrations/0003_alter_movie_description.py
bed775bad8d30539b2b34f84c48a3511902e2b22
[]
no_license
rcoffie/Django-Rest-Tut
a840ecb838098ed2d525c1b5321f042e0d29c5fb
9925bfb11b92a49aa6973e3929b2d05d9528ee27
refs/heads/master
2023-08-25T06:43:41.019767
2021-10-27T15:27:06
2021-10-27T15:27:06
409,567,488
0
0
null
null
null
null
UTF-8
Python
false
false
384
py
# Generated by Django 3.2.5 on 2021-09-23 11:15 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('watchlist_app', '0002_rename_movies_movie'), ] operations = [ migrations.AlterField( model_name='movie', name='description', field=models.TextField(), ), ]
9207f63b377b4990be34f2882127edcd256361e6
70e1d7c3e375ecff09df36e5a4ceda5691221968
/tmp.py
1936e8cb7425e44fc01199bc2937e82f0e03ce0a
[ "Apache-2.0" ]
permissive
doublechenching/ship_detection
5b91aa4a7fbe6eb5a88389d1a517641a53740890
1ba4926e0d28043863df05ae8afc3d5b336b350d
refs/heads/master
2020-04-06T17:53:23.855070
2018-11-15T08:47:02
2018-11-15T08:47:02
157,676,999
8
0
null
null
null
null
UTF-8
Python
false
false
1,213
py
Nucleoplasm 12885 0.25 Cytosol 8228 0.16 Plasma membrane 3777 0.07 Nucleoli 3621 0.07 Mitochondria 2965 0.06 Golgi apparatus 2822 0.06 Nuclear bodies 2513 0.05 Nuclear speckles 1858 0.04 Nucleoli fibrillar center 1561 0.03 Centrosome 1482 0.03 Nuclear membrane 1254 0.02 Intermediate filaments 1093 0.02 Microtubules 1066 0.02 Endoplasmic reticulum 1008 0.02 Microtubule organizing center 902 0.02 Cell junctions 802 0.02 Actin filaments 688 0.01 Focal adhesion sites 537 0.01 Cytokinetic bridge 530 0.01 Cytoplasmic bodies 328 0.01 Aggresome 322 0.01 Mitotic spindle 210 0.00 Lipid droplets 172 0.00 Peroxisomes 53 0.00 Endosomes 45 0.00 Lysosomes 28 0.00 Microtubule ends 21 0.00 Rods & rings 11 0.00
49125a103d0ef8ad23344162256cf34b29c740c5
5c0506e42fc7f0325728994223f1b0be4f1187fc
/summa_py_textrank.py
2fd1d59fa66724ab7ba0f6a9607be02ff57006a6
[]
no_license
Trevahok/summarizer
602d492385c3130c6c9f11dd82e71177541ede73
cfd134e79ec5dfac3530081c6863421ab667207d
refs/heads/master
2020-03-19T20:36:21.680650
2018-06-12T06:54:36
2018-06-12T06:54:36
136,908,134
0
0
null
null
null
null
UTF-8
Python
false
false
1,124
py
from urllib.request import urlopen from summa.summarizer import summarize from sys import argv from bs4 import BeautifulSoup as bs import PyPDF2 def from_link(): page=urlopen(argv[1]) soup=bs(page,'lxml') text=soup.find_all('p') text='\n'.join([ i.text for i in text]) print(summarize(text,ratio=0.2)) def from_pdf(): pdfdoc = open(argv[1], 'rb') pdfReader = PyPDF2.PdfFileReader(pdfdoc) count = pdfReader.numPages for i in range(count): page = pdfReader.getPage(i) print('Page Number: ',i,'\n') print(summarize(page.extractText(),ratio=0.2)) print('\n\n') def from_txt(): file=open(argv[1],'r') text=file.read() print(summarize(text,ratio=0.2)) if __name__=="__main__": try: filetype = argv[2] if filetype=='url': from_link() elif filetype=='pdf': from_pdf() else: from_txt() except IndexError: print("\nUsage:\n \tsummarize 'http:// url.to.summarize' url \n or \n \tsummarize 'path/to/file/file.pdf' pdf \n or \n \tsummarize 'path/to/file/file.txt' txt ")
c9708fe103af2012e13994b656c45ba4a852077c
abad82a1f487c5ff2fb6a84059a665aa178275cb
/Codewars/8kyu/8kyu-interpreters-hq9-plus/Python/solution1.py
bdd53cce40278d9d04a75b8b2e61e0cc09d79511
[ "MIT" ]
permissive
RevansChen/online-judge
8ae55f136739a54f9c9640a967ec931425379507
ad1b07fee7bd3c49418becccda904e17505f3018
refs/heads/master
2021-01-19T23:02:58.273081
2019-07-05T09:42:40
2019-07-05T09:42:40
88,911,035
9
0
null
null
null
null
UTF-8
Python
false
false
487
py
# Python - 3.6.0 gets = lambda i: 's' if i != 1 else '' HQ9 = { 'H': 'Hello World!', 'Q': 'Q', '9': '\n'.join( f'{i} bottle{gets(i)} of beer on the wall, {i} bottle{gets(i)} of beer.\nTake one down and pass it around, {i - 1 if i > 1 else "no more"} bottle{gets(i - 1)} of beer on the wall.' for i in range(99, 0, -1) ) + '\nNo more bottles of beer on the wall, no more bottles of beer.\nGo to the store and buy some more, 99 bottles of beer on the wall.' }.get
703a8e40bd746970ed7d5c2e13f250617fe1a660
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02574/s746022410.py
9331bad8a322a0b5502729d4fc4e2aa050191d05
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
1,135
py
import math,itertools,fractions,heapq,collections,bisect,sys,queue,copy sys.setrecursionlimit(10**7) inf=10**20 mod=10**9+7 dd=[(-1,0),(0,1),(1,0),(0,-1)] ddn=[(-1,0),(-1,1),(0,1),(1,1),(1,0),(1,-1),(0,-1),(-1,-1)] def LI(): return [int(x) for x in sys.stdin.readline().split()] # def LF(): return [float(x) for x in sys.stdin.readline().split()] def I(): return int(sys.stdin.readline()) def F(): return float(sys.stdin.readline()) def LS(): return sys.stdin.readline().split() def S(): return input() def main(): N=I() A=LI() g=0 for x in A: g=math.gcd(g,x) if g>1: return 'not coprime' sosu=[0]*1000100 for x in A: if x==1: continue sosu[x]+=1 if sosu[x]>1: return 'setwise coprime' for y in range(2,int(math.sqrt(x))+1): if x%y!=0: continue z=x//y if y==z: sosu[y]+=1 if sosu[y]>1: return 'setwise coprime' else: sosu[y]+=1 if sosu[y]>1: return 'setwise coprime' sosu[z]+=1 if sosu[z]>1: return 'setwise coprime' return 'pairwise coprime' # main() print(main())
b2432c7ce576836fc769e1c9a990bb2a1b00d91c
ef243d91a1826b490e935fa3f3e6c29c3cc547d0
/cv2/cv2/MergeExposures.py
7d68ec4c5e8da4d27c6ad8ddb544c23ea3973a7e
[]
no_license
VentiFang/Python_local_module
6b3d0b22399e817057dfd15d647a14bb1e41980e
c44f55379eca2818b29732c2815480ee755ae3fb
refs/heads/master
2020-11-29T11:24:54.932967
2019-12-25T12:57:14
2019-12-25T12:57:14
230,101,875
0
0
null
null
null
null
UTF-8
Python
false
false
1,989
py
# encoding: utf-8 # module cv2.cv2 # from F:\Python\Python36\lib\site-packages\cv2\cv2.cp36-win_amd64.pyd # by generator 1.147 """ Python wrapper for OpenCV. """ # imports import cv2.cv2 as # F:\Python\Python36\lib\site-packages\cv2\cv2.cp36-win_amd64.pyd import cv2.Error as Error # <module 'cv2.Error'> import cv2.cuda as cuda # <module 'cv2.cuda'> import cv2.detail as detail # <module 'cv2.detail'> import cv2.dnn as dnn # <module 'cv2.dnn'> import cv2.fisheye as fisheye # <module 'cv2.fisheye'> import cv2.flann as flann # <module 'cv2.flann'> import cv2.instr as instr # <module 'cv2.instr'> import cv2.ipp as ipp # <module 'cv2.ipp'> import cv2.ml as ml # <module 'cv2.ml'> import cv2.ocl as ocl # <module 'cv2.ocl'> import cv2.ogl as ogl # <module 'cv2.ogl'> import cv2.samples as samples # <module 'cv2.samples'> import cv2.utils as utils # <module 'cv2.utils'> import cv2.videoio_registry as videoio_registry # <module 'cv2.videoio_registry'> import cv2 as __cv2 class MergeExposures(__cv2.Algorithm): # no doc def process(self, src, times, response, dst=None): # real signature unknown; restored from __doc__ """ process(src, times, response[, dst]) -> dst . @brief Merges images. . . @param src vector of input images . @param dst result image . @param times vector of exposure time values for each image . @param response 256x1 matrix with inverse camera response function for each pixel value, it should . have the same number of channels as images. """ pass def __init__(self, *args, **kwargs): # real signature unknown pass @staticmethod # known case of __new__ def __new__(*args, **kwargs): # real signature unknown """ Create and return a new object. See help(type) for accurate signature. """ pass def __repr__(self, *args, **kwargs): # real signature unknown """ Return repr(self). """ pass
d40d4c0886ebeb7c5e6c46de7f421799756c92b7
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_318/ch23_2019_03_27_15_00_44_973644.py
e268beaac58b3819e4295d3aa5d048c89b2d4156
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
223
py
def verifica_idade(x): if(x>20): print("Liberado EUA e BRASIL") return x if(x<21 and x>17): print("Liberado BRASIL") return x if(x<18): print("Nao esta liberado") return x
3f982e8a36a779567542f4c382cd555febeef961
ed10dc841d5b4f6a038e8f24f603750992d9fae9
/lldb/test/API/lang/objc/foundation/TestFoundationDisassembly.py
bf9a40fc8da9b49c77e740cb835ab78aef313bfc
[ "NCSA", "Apache-2.0", "LLVM-exception" ]
permissive
WYK15/swift-Ollvm10
90c2f0ade099a1cc545183eba5c5a69765320401
ea68224ab23470963b68dfcc28b5ac769a070ea3
refs/heads/main
2023-03-30T20:02:58.305792
2021-04-07T02:41:01
2021-04-07T02:41:01
355,189,226
5
0
null
null
null
null
UTF-8
Python
false
false
5,449
py
""" Test the lldb disassemble command on foundation framework. """ import unittest2 import os import lldb from lldbsuite.test.decorators import * from lldbsuite.test.lldbtest import * from lldbsuite.test import lldbutil @skipUnlessDarwin class FoundationDisassembleTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) NO_DEBUG_INFO_TESTCASE = True @expectedFailureDarwin('rdar://problem/54977700') @skipIfAsan def test_foundation_disasm(self): """Do 'disassemble -n func' on each and every 'Code' symbol entry from the Foundation.framework.""" self.build() # Enable synchronous mode self.dbg.SetAsync(False) # Create a target by the debugger. target = self.dbg.CreateTarget(self.getBuildArtifact("a.out")) self.assertTrue(target, VALID_TARGET) # Now launch the process, and do not stop at entry point. process = target.LaunchSimple( None, None, self.get_process_working_directory()) self.assertTrue(process, PROCESS_IS_VALID) foundation_framework = None for module in target.modules: if module.file.basename == "Foundation": foundation_framework = module.file.fullpath break self.assertTrue( foundation_framework is not None, "Foundation.framework path located") self.runCmd("image dump symtab '%s'" % foundation_framework) raw_output = self.res.GetOutput() # Now, grab every 'Code' symbol and feed it into the command: # 'disassemble -n func'. # # The symbol name is on the last column and trails the flag column which # looks like '0xhhhhhhhh', i.e., 8 hexadecimal digits. codeRE = re.compile(r""" \ Code\ {9} # ' Code' followed by 9 SPCs, .* # the wildcard chars, 0x[0-9a-f]{8} # the flag column, and \ (.+)$ # finally the function symbol. """, re.VERBOSE) for line in raw_output.split(os.linesep): match = codeRE.search(line) if match: func = match.group(1) self.runCmd('image lookup -s "%s"' % func) self.runCmd('disassemble -n "%s"' % func) @skipIfAsan def test_simple_disasm(self): """Test the lldb 'disassemble' command""" self.build() # Create a target by the debugger. target = self.dbg.CreateTarget(self.getBuildArtifact("a.out")) self.assertTrue(target, VALID_TARGET) # Stop at +[NSString stringWithFormat:]. symbol_name = "+[NSString stringWithFormat:]" break_results = lldbutil.run_break_set_command( self, "_regexp-break %s" % (symbol_name)) lldbutil.check_breakpoint_result( self, break_results, symbol_name=symbol_name, num_locations=1) # Stop at -[MyString initWithNSString:]. lldbutil.run_break_set_by_symbol( self, '-[MyString initWithNSString:]', num_expected_locations=1, sym_exact=True) # Stop at the "description" selector. lldbutil.run_break_set_by_selector( self, 'description', num_expected_locations=1, module_name='a.out') # Stop at -[NSAutoreleasePool release]. break_results = lldbutil.run_break_set_command( self, "_regexp-break -[NSAutoreleasePool release]") lldbutil.check_breakpoint_result( self, break_results, symbol_name='-[NSAutoreleasePool release]', num_locations=1) self.runCmd("run", RUN_SUCCEEDED) # First stop is +[NSString stringWithFormat:]. self.expect( "thread backtrace", "Stop at +[NSString stringWithFormat:]", substrs=["Foundation`+[NSString stringWithFormat:]"]) # Do the disassemble for the currently stopped function. self.runCmd("disassemble -f") self.runCmd("process continue") # Skip another breakpoint for +[NSString stringWithFormat:]. self.runCmd("process continue") # Followed by a.out`-[MyString initWithNSString:]. self.expect( "thread backtrace", "Stop at a.out`-[MyString initWithNSString:]", substrs=["a.out`-[MyString initWithNSString:]"]) # Do the disassemble for the currently stopped function. self.runCmd("disassemble -f") self.runCmd("process continue") # Followed by -[MyString description]. self.expect("thread backtrace", "Stop at -[MyString description]", substrs=["a.out`-[MyString description]"]) # Do the disassemble for the currently stopped function. self.runCmd("disassemble -f") self.runCmd("process continue") # Skip another breakpoint for -[MyString description]. self.runCmd("process continue") # Followed by -[NSAutoreleasePool release]. self.expect("thread backtrace", "Stop at -[NSAutoreleasePool release]", substrs=["Foundation`-[NSAutoreleasePool release]"]) # Do the disassemble for the currently stopped function. self.runCmd("disassemble -f")
c46b8a2458a636eea1bde0cc9df07da0126d1e1c
0c13891448e6c3136e2f651c776d1d11edee2577
/src/template_method.py
91d30a13953d467cfa30df4a7500ae59f97997f2
[ "MIT" ]
permissive
MrRezoo/design-patterns-python
31cb7b73ae05c5bd361eb3455df234c20529f465
8f8e2501ad8e05f1a75ce5be659d926c0ec99698
refs/heads/master
2023-08-01T22:01:01.186910
2021-10-02T07:57:49
2021-10-02T07:57:49
349,936,987
8
1
MIT
2021-04-07T14:55:10
2021-03-21T08:13:44
Python
UTF-8
Python
false
false
1,178
py
""" Behavioral pattern: Template method Example: when we have static job between several classes use one ABC class """ from abc import ABC, abstractmethod class Top(ABC): def template_method(self): self.first_common() self.second_common() self.third_require() self.fourth_require() self.hook() def first_common(self): print('I am first common...') def second_common(self): print('I am second common') @abstractmethod def third_require(self): pass @abstractmethod def fourth_require(self): pass def hook(self): pass class One(Top): def third_require(self): print('This is Third require from One...') def fourth_require(self): print('This is Fourth require from One...') def hook(self): print('This is Hook from One') class Two(Top): def third_require(self): print('This is Third require from Two...') def fourth_require(self): print('This is Fourth require from Two...') def client(class_): class_.template_method() if __name__ == '__main__': client(Two())
56fbf2a47fa9865416f2c8ff06113e4b3ebbf002
934f170481a5f3807b14823f9e704fd877044d30
/SAGAN.py
5af51b32adb641eff5c37319b7332b3957d40e21
[]
no_license
JustinLion83/Anime-GAN-tensorflow
c234bd28e197a801460683d07aa35d2d80cb96f9
e3a5fd726aeaf08d01445d8176468d84cd3295f4
refs/heads/master
2020-07-03T06:36:25.765500
2019-08-17T19:37:04
2019-08-17T19:37:04
201,822,987
0
0
null
2019-08-11T22:45:42
2019-08-11T22:45:42
null
UTF-8
Python
false
false
12,068
py
from layers import * import numpy as np import time from utils import util import os class SAGAN_model(object): def __init__(self, args): self.args = args self.d_loss_log = [] self.g_loss_log = [] self.layer_num = int(np.log2(self.args.img_size[0])) - 3 # inputs self.is_training = tf.placeholder_with_default(False, (), name='is_training') self.inputs = tf.placeholder(tf.float32, [None, self.args.img_size[0], self.args.img_size[1], self.args.img_size[2]], name='inputs') self.z = tf.placeholder(tf.float32, [None, 1, 1, self.args.z_dim], name='z') # noise # output of D for real images real_logits = self.discriminator(self.inputs) # output of D for fake images self.fake_images = self.generator(self.z) fake_logits = self.discriminator(self.fake_images, reuse=True) # get loss for discriminator self.d_loss = self.discriminator_loss(d_logits_real=real_logits, d_logits_fake=fake_logits) # get loss for generator self.g_loss = self.generator_loss(d_logits_fake=fake_logits) # divide trainable variables into a group for D and a group for G t_vars = tf.trainable_variables() d_vars = [var for var in t_vars if 'discriminator' in var.name] g_vars = [var for var in t_vars if 'generator' in var.name] # global step self.global_step = tf.get_variable('global_step', initializer=tf.constant(0), trainable=False) self.add_step = self.global_step.assign(self.global_step + 1) # optimizers self.d_lr = tf.train.exponential_decay(self.args.d_lr, tf.maximum(self.global_step - self.args.decay_start_steps, 0), self.args.decay_steps, self.args.decay_rate) self.g_lr = tf.train.exponential_decay(self.args.g_lr, tf.maximum(self.global_step - self.args.decay_start_steps, 0), self.args.decay_steps, self.args.decay_rate) update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): d_grads = tf.gradients(self.d_loss, d_vars) d_opt = tf.train.AdamOptimizer(self.d_lr, beta1=self.args.beta1, beta2=self.args.beta2) self.train_d = d_opt.apply_gradients(zip(d_grads, d_vars)) g_grads = tf.gradients(self.g_loss, g_vars) g_opt = tf.train.AdamOptimizer(self.g_lr, beta1=self.args.beta1, beta2=self.args.beta2) self.train_g = g_opt.apply_gradients(zip(g_grads, g_vars)) # EMA for generator with tf.variable_scope("EMA_Weights"): if self.args.ema_decay is not None: self.var_ema = tf.train.ExponentialMovingAverage(self.args.ema_decay, num_updates=self.global_step) with tf.control_dependencies([self.train_g]): self.ema_train_g = self.var_ema.apply(tf.trainable_variables(scope='generator')) # assign ema weights self.assign_vars = [] for var in tf.trainable_variables(scope='generator'): v = self.var_ema.average(var) if v is not None: self.assign_vars.append(tf.assign(var, v)) def discriminator_loss(self, d_logits_real, d_logits_fake): real_loss = tf.reduce_mean(tf.nn.relu(1.0 - d_logits_real)) fake_loss = tf.reduce_mean(tf.nn.relu(1.0 + d_logits_fake)) loss = real_loss + fake_loss return loss def generator_loss(self, d_logits_fake): loss = -tf.reduce_mean(d_logits_fake) return loss def generator(self, z, reuse=False): with tf.variable_scope("generator", reuse=reuse): ch = self.args.g_filters x = spectral_deconv2d(z, filters=ch, kernel_size=4, stride=1, is_training=self.is_training, padding='VALID', use_bias=False, scope='deconv2d') x = batch_norm(x, self.is_training, scope='batch_norm') x = tf.nn.leaky_relu(x, alpha=0.2) for i in range(self.layer_num // 2): with tf.variable_scope('layer' + str(i)): if self.args.up_sample: x = up_sample(x, scale_factor=2) x = spectral_conv2d(x, filters=ch // 2, kernel_size=3, stride=1, is_training=self.is_training, padding='SAME', scope='up_conv2d_' + str(i)) else: x = spectral_deconv2d(x, filters=ch // 2, kernel_size=4, stride=2, is_training=self.is_training, use_bias=False, scope='deconv2d_' + str(i)) x = batch_norm(x, self.is_training, scope='batch_norm_' + str(i)) x = tf.nn.leaky_relu(x, alpha=0.2) ch = ch // 2 # Self Attention x = attention(x, ch, is_training=self.is_training, scope="attention", reuse=reuse) for i in range(self.layer_num // 2, self.layer_num): with tf.variable_scope('layer' + str(i)): if self.args.up_sample: x = up_sample(x, scale_factor=2) x = spectral_conv2d(x, filters=ch // 2, kernel_size=3, stride=1, is_training=self.is_training, padding='SAME', scope='up_conv2d_' + str(i)) else: x = spectral_deconv2d(x, filters=ch // 2, kernel_size=4, stride=2, is_training=self.is_training, use_bias=False, scope='deconv2d_' + str(i)) x = batch_norm(x, self.is_training, scope='batch_norm_' + str(i)) x = tf.nn.leaky_relu(x, alpha=0.2) ch = ch // 2 if self.args.up_sample: x = up_sample(x, scale_factor=2) x = spectral_conv2d(x, filters=self.args.img_size[2], kernel_size=3, stride=1, is_training=self.is_training, padding='SAME', scope='G_conv_logit') else: x = spectral_deconv2d(x, filters=self.args.img_size[2], kernel_size=4, stride=2, is_training=self.is_training, use_bias=False, scope='G_deconv_logit') x = tf.nn.tanh(x) return x def discriminator(self, x, reuse=False): with tf.variable_scope("discriminator", reuse=reuse): ch = self.args.d_filters x = spectral_conv2d(x, filters=ch, kernel_size=4, stride=2, is_training=self.is_training, padding='SAME', use_bias=False, scope='conv2d') x = tf.nn.leaky_relu(x, alpha=0.2) for i in range(self.layer_num // 2): x = spectral_conv2d(x, filters=ch * 2, kernel_size=4, stride=2, is_training=self.is_training, padding='SAME', use_bias=False, scope='conv2d_' + str(i)) x = batch_norm(x, self.is_training, scope='batch_norm' + str(i)) x = tf.nn.leaky_relu(x, alpha=0.2) ch = ch * 2 # Self Attention x = attention(x, ch, is_training=self.is_training, scope="attention", reuse=reuse) for i in range(self.layer_num // 2, self.layer_num): x = spectral_conv2d(x, filters=ch * 2, kernel_size=4, stride=2, is_training=self.is_training, padding='SAME', use_bias=False, scope='conv2d_' + str(i)) x = batch_norm(x, self.is_training, scope='batch_norm' + str(i)) x = tf.nn.leaky_relu(x, alpha=0.2) ch = ch * 2 x = spectral_conv2d(x, filters=1, kernel_size=4, padding='VALID', stride=1, is_training=self.is_training, use_bias=False, scope='D_logit') x = tf.squeeze(x, axis=[1, 2]) return x def preprocess(self, x): x = x / 127.5 - 1 return x def train_epoch(self, sess, saver, train_next_element, i_epoch, n_batch, truncated_norm, z_fix=None): t_start = None global_step = 0 for i_batch in range(n_batch): if i_batch == 1: t_start = time.time() batch_imgs = sess.run(train_next_element) batch_imgs = self.preprocess(batch_imgs) batch_z = truncated_norm.rvs([self.args.batch_size, 1, 1, self.args.z_dim]) feed_dict_ = {self.inputs: batch_imgs, self.z: batch_z, self.is_training: True} # update D network _, d_loss, d_lr, g_lr = sess.run([self.train_d, self.d_loss, self.d_lr, self.g_lr], feed_dict=feed_dict_) self.d_loss_log.append(d_loss) # update G network g_loss = None if i_batch % self.args.n_critic == 0: if self.args.ema_decay is not None: _, g_loss, _, global_step = sess.run( [self.ema_train_g, self.g_loss, self.add_step, self.global_step], feed_dict=feed_dict_) else: _, g_loss, _, global_step = sess.run([self.train_g, self.g_loss, self.add_step, self.global_step], feed_dict=feed_dict_) self.g_loss_log.append(g_loss) last_train_str = "[epoch:%d/%d, global_step:%d] -d_loss:%.3f - g_loss:%.3f -d_lr:%.e -g_lr:%.e" % ( i_epoch + 1, int(self.args.epochs), global_step, d_loss, g_loss, d_lr, g_lr) if i_batch > 0: last_train_str += (' -ETA:%ds' % util.cal_ETA(t_start, i_batch, n_batch)) if (i_batch + 1) % 20 == 0 or i_batch == 0: tf.logging.info(last_train_str) # show fake_imgs if global_step % self.args.show_steps == 0: tf.logging.info('generating fake imgs in steps %d...' % global_step) # do ema if self.args.ema_decay is not None: # save temp weights for generator saver.save(sess, os.path.join(self.args.checkpoint_dir, 'temp_model.ckpt')) sess.run(self.assign_vars, feed_dict={self.inputs: batch_imgs, self.z:batch_z, self.is_training: False}) tf.logging.info('After EMA...') if z_fix is not None: show_z = z_fix else: show_z = truncated_norm.rvs([self.args.batch_size, 1, 1, self.args.z_dim]) fake_imgs = sess.run(self.fake_images, feed_dict={self.z: show_z}) manifold_h = int(np.floor(np.sqrt(self.args.sample_num))) util.save_images(fake_imgs, [manifold_h, manifold_h], image_path=os.path.join(self.args.result_dir, 'fake_steps_' + str(global_step) + '.jpg')) if self.args.ema_decay is not None: # restore temp weights for generator saver.restore(sess, os.path.join(self.args.checkpoint_dir, 'temp_model.ckpt')) tf.logging.info('Recover weights over...') return global_step, self.d_loss_log, self.g_loss_log
c2d75f8bbadf428b1d890435ae40bd179a74edc5
1d1a21b37e1591c5b825299de338d18917715fec
/ML,DL, RL/Machine Learning/ml/m42_xgb_qpu.py
477dafeb04fe720c755d988a9fb2f21ae8325e6c
[]
no_license
brunoleej/study_git
46279c3521f090ebf63ee0e1852aa0b6bed11b01
0c5c9e490140144caf1149e2e1d9fe5f68cf6294
refs/heads/main
2023-08-19T01:07:42.236110
2021-08-29T16:20:59
2021-08-29T16:20:59
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,122
py
# XGBoost # tree_method = 'gpu_hist' : cPU 대신, 실행을 시켰을 때 전체 GPU는 활동을 안하는데 CUDA만 활동 # predictor='gpu_predictor' : GPU로 예측 수행 # predictor='cpu_predictor' : CPU로 예측 수행 # gpu_id=0 : GPU 선택하여 처리 from xgboost import XGBClassifier, XGBRegressor from sklearn.datasets import load_boston from sklearn.model_selection import train_test_split import numpy as np from sklearn.metrics import r2_score datasets = load_boston() x = datasets.data y = datasets.target x_train, x_test, y_train, y_test = train_test_split(x, y, train_size=0.8, random_state=66) model = XGBRegressor(n_estimators=100000, learning_rate=0.01, tree_method = 'gpu_hist', # predictor='gpu_predictor' predictor='cpu_predictor', gpu_id=0 ) model.fit(x_train, y_train, verbose=1, eval_metric=['rmse'], eval_set =[(x_train, y_train), (x_test, y_test)], early_stopping_rounds=10000 ) aaa = model.score(x_test, y_test) print("model.score : ",aaa) # model.score : 0.9254888275792001
47b74b1775ebe7c948754a92b962e1cee4c592e8
4d327de5447519d3c00e6572f74362380783006f
/source/res/scripts/client/gui/Scaleform/daapi/view/lobby/rankedBattles/RankedBattlesCalendarPopover.py
1597d3315544c1d1c8513bc4b036cfea883af256
[]
no_license
XFreyaX/WorldOfTanks-Decompiled
706ac55d919b766aa89f90c97a75672bf2142611
5025466edd0dd3e5e50a6c60feb02ae793f6adac
refs/heads/master
2021-09-21T15:10:32.655452
2018-08-28T07:34:00
2018-08-28T07:34:00
null
0
0
null
null
null
null
UTF-8
Python
false
false
6,129
py
# Python bytecode 2.7 (decompiled from Python 2.7) # Embedded file name: scripts/client/gui/Scaleform/daapi/view/lobby/rankedBattles/RankedBattlesCalendarPopover.py from datetime import datetime import BigWorld from gui.Scaleform.locale.COMMON import COMMON from gui.Scaleform.daapi.settings.views import VIEW_ALIAS from gui.Scaleform.genConsts.TOOLTIPS_CONSTANTS import TOOLTIPS_CONSTANTS from gui.Scaleform.managers.UtilsManager import UtilsManager from gui.ranked_battles.ranked_models import CYCLE_STATUS from helpers import i18n, dependency from gui.Scaleform.daapi.view.meta.RankedBattlesCalendarPopoverMeta import RankedBattlesCalendarPopoverMeta from gui.Scaleform.locale.RANKED_BATTLES import RANKED_BATTLES from gui.shared.formatters import text_styles from helpers import time_utils from skeletons.gui.game_control import IRankedBattlesController from skeletons.connection_mgr import IConnectionManager ARROW_LEFT = 3 class RankedBattlesCalendarPopover(RankedBattlesCalendarPopoverMeta): rankedController = dependency.descriptor(IRankedBattlesController) connectionMgr = dependency.descriptor(IConnectionManager) arrowDirection = ARROW_LEFT def __init__(self, ctx=None): super(RankedBattlesCalendarPopover, self).__init__() self.__seasonInfo = self.rankedController.getCurrentSeason() self.__currentCycle = self.__seasonInfo.getNumber() self.__selectedDate = time_utils.getCurrentLocalServerTimestamp() self.__weekDays = self._createUtilsManager().getWeekDayNames(full=True, isLower=False, isUpper=False, useRegionSettings=False) data = ctx.get('data', None) if data is not None: self.arrowDirection = data.arrowDirection return def _createUtilsManager(self): return UtilsManager() def _populate(self): super(RankedBattlesCalendarPopover, self)._populate() self.as_setDataS({'rawDate': self.__selectedDate, 'arrowDirection': self.arrowDirection, 'statusText': self.__getCurrnetCycleString(), 'statusTooltip': TOOLTIPS_CONSTANTS.RANKED_CALENDAR_STEPS_INFO}) self.onDaySelect(time_utils.getCurrentTimestamp()) calendar = self.__getCalendar() if calendar is not None: calendar.as_setMinAvailableDateS(self.__seasonInfo.getStartDate()) calendar.as_setMaxAvailableDateS(self.__seasonInfo.getEndDate()) calendar.as_openMonthS(self.__selectedDate) calendar.as_selectDateS(self.__selectedDate) calendar.as_setHighlightedDaysS([self.__seasonInfo.getCycleStartDate(), self.__seasonInfo.getCycleEndDate()]) calendar.as_setDayTooltipTypeS(TOOLTIPS_CONSTANTS.RANKED_CALENDAR_DAY_INFO) return def onDaySelect(self, date): formattedDate = datetime.fromtimestamp(date) selectedDayOfWeek = self.__weekDays[formattedDate.weekday()] self.as_setDayDataS({'primeTimeGroupData': self.__constructPrimeTimes(date), 'dayText': text_styles.superPromoTitle(formattedDate.day), 'dayNameText': text_styles.middleTitle(selectedDayOfWeek)}) def __getCycleListString(self): key = RANKED_BATTLES.RANKEDBATTLEVIEW_STATUSBLOCK_CALENDARPOPOVER_CYCLEITEM cycles = self.__seasonInfo.getAllCycles() result = [] for cycle in sorted(cycles.values()): formatter = text_styles.main if cycle.status == CYCLE_STATUS.CURRENT else text_styles.standard startDate = time_utils.getTimeStructInLocal(cycle.startDate) endDate = time_utils.getTimeStructInLocal(cycle.endDate) result.append(formatter(i18n.makeString(key, cycleNumber=self.__currentCycle, day0='{:02d}'.format(startDate.tm_mday), month0='{:02d}'.format(startDate.tm_mon), day1='{:02d}'.format(endDate.tm_mday), month1='{:02d}'.format(endDate.tm_mon)))) def __constructPrimeTimes(self, selectedTime): items = [] serversPeriodsMapping = self.rankedController.getPrimeTimesForDay(selectedTime, groupIdentical=True) frmt = BigWorld.wg_getShortTimeFormat for serverName in sorted(serversPeriodsMapping.keys()): periodsStr = [] dayPeriods = serversPeriodsMapping[serverName] if dayPeriods: for periodStart, periodEnd in dayPeriods: periodsStr.append(i18n.makeString(RANKED_BATTLES.CALENDARDAY_TIME, start=frmt(periodStart), end=frmt(periodEnd))) else: periodsStr = i18n.makeString(COMMON.COMMON_DASH) if dayPeriods: items.append({'serverNameText': text_styles.highlightText(serverName), 'primeTimeText': '\n'.join(periodsStr)}) return items def __getCurrnetCycleString(self): key = RANKED_BATTLES.RANKEDBATTLEVIEW_STATUSBLOCK_CALENDARPOPOVER_CYCLEITEM cycles = self.__seasonInfo.getAllCycles() for cycle in sorted(cycles.values()): if cycle.status == CYCLE_STATUS.CURRENT: formatter = text_styles.main startDate = time_utils.getTimeStructInLocal(cycle.startDate) endDate = time_utils.getTimeStructInLocal(cycle.endDate) return formatter(i18n.makeString(key, cycleNumber=self.__currentCycle, day0='{:02d}'.format(startDate.tm_mday), month0='{:02d}'.format(startDate.tm_mon), day1='{:02d}'.format(endDate.tm_mday), month1='{:02d}'.format(endDate.tm_mon))) def __getAttentionText(self): key = RANKED_BATTLES.RANKEDBATTLEVIEW_STATUSBLOCK_CALENDARPOPOVER_ATTENTIONTEXT cycleNumber = self.__currentCycle timeDelta = time_utils.getTimeDeltaFromNow(self.__seasonInfo.getCycleEndDate()) endTimeStr = time_utils.getTillTimeString(timeDelta, RANKED_BATTLES.STATUS_TIMELEFT) if timeDelta <= time_utils.ONE_HOUR: formatter = text_styles.alert else: formatter = text_styles.neutral return formatter(i18n.makeString(key, cycleNumber=cycleNumber, timeLeft=endTimeStr)) def __getCalendar(self): return self.components.get(VIEW_ALIAS.CALENDAR)
f9a929408b32170e178231ad8907c38aa8647599
9cef4ef20efd0eec18846242e78be0b9be144c30
/teacher_cade/day19/14.greenlet.py
e2d537f9ff67e3f0659a5afb381d8128caa9ab71
[]
no_license
Vaild/python-learn
4e6511a62a40b6104b081e0f8fe30f7d829901f5
5d602daf3b4b7e42349b7d9251df1f4dd62c299c
refs/heads/master
2022-11-19T00:47:48.808384
2020-07-20T14:27:49
2020-07-20T14:27:49
279,044,379
0
0
null
null
null
null
UTF-8
Python
false
false
360
py
#!/usr/bin/python3 # coding=utf-8 from greenlet import greenlet import time def test1(): while True: print("---A--") gr2.switch() time.sleep(0.5) def test2(): while True: print("---B--") gr1.switch() time.sleep(0.5) gr1 = greenlet(test1) gr2 = greenlet(test2) # 切换到gr1中运行 gr1.switch()
5cae4351928b729521bafe551e04ae158fbbd2f3
d60acaac9e460c5693efe61449667b3c399c53c8
/diffeq/logisticbifurcation.py
392cc43dfa415350c9c23054e6d5784488977d9c
[]
no_license
HussainAther/mathematics
53ea7fb2470c88d674faa924405786ba3b860705
6849cc891bbb9ac69cb20dfb13fe6bb5bd77d8c5
refs/heads/master
2021-07-22T00:07:53.940786
2020-05-07T03:11:17
2020-05-07T03:11:17
157,749,226
2
0
null
null
null
null
UTF-8
Python
false
false
2,159
py
import matplotlib.pyplot as plt import numpy as np """ Logistic map bifurcation """ def logistic(r, x): """ Logistic map function for nonlinear systems """ return r * x * (1 - x) x = np.linspace(0, 1) fig, ax = plt.subplots(1, 1) ax.plot(x, logistic(2, x), "k") def plotsystem(r, x0, n, ax=None): """ Plot the function and the y=x diagonal line. """ t = np.linspace(0, 1) ax.plot(t, logistic(r, t), "k", lw=2) ax.plot([0, 1], [0, 1], "k", lw=2) # Recursively apply y=f(x) and plot two lines: # (x, x) -> (x, y) # (x, y) -> (y, y) x = x0 for i in range(n): y = logistic(r, x) # Plot the two lines. ax.plot([x, x], [x, y], "k", lw=1) ax.plot([x, y], [y, y], "k", lw=1) # Plot the positions with increasing # opacity. ax.plot([x], [y], "ok", ms=10, alpha=(i + 1) / n) x = y ax.set_xlim(0, 1) ax.set_ylim(0, 1) fig, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 6), sharey=True) plotsystem(2.5, .1, 10, ax=ax1) plotsystem(3.5, .1, 10, ax=ax2) n = 10000 r = np.linspace(2.5, 4.0, n) iterations = 1000 last = 100 x = 1e-5 * np.ones(n) # lyapunov = np.zeros(n) # fig, (ax1, ax2) = plt.subplots(2, 1, figsize=(8, 9), # sharex=True) for i in range(iterations): x = logistic(r, x) # We compute the partial sum of the # Lyapunov exponent. # lyapunov += np.log(abs(r - 2 * r * x)) # We display the bifurcation diagram. if i >= (iterations - last): ax1.plot(r, x, ",k", alpha=.25) ax1.set_xlim(2.5, 4) ax1.set_title("Bifurcation diagram") # Display the Lyapunov exponent. # Horizontal line. # ax2.axhline(0, color="k", lw=.5, alpha=.5) # Negative Lyapunov exponent. # ax2.plot(r[lyapunov < 0], # lyapunov[lyapunov < 0] / iterations, # ".k", alpha=.5, ms=.5) # Positive Lyapunov exponent. # ax2.plot(r[lyapunov >= 0], # lyapunov[lyapunov >= 0] / iterations, # ".r", alpha=.5, ms=.5) # ax2.set_xlim(2.5, 4) # ax2.set_ylim(-2, 1) # ax2.set_title("Lyapunov exponent") # plt.tight_layout()
9bc70906c5a573ba42746d4a2f4efbf81e0e86c1
98f730ec6a43d8be4a34b0f2a44a9d35989d2287
/pynifi_client/models/tenants_entity.py
b4af3df3c70dc03de0e1a0bfb4fb63eb26b9a058
[]
no_license
scottwr98/pynifi-client
9337a4f322536ee466d419a788b8b5948cdc62d7
013ac2ffa591284a0d6cbb9ed552681cc6f91165
refs/heads/master
2020-04-18T08:47:03.680749
2017-11-04T23:59:58
2017-11-04T23:59:58
null
0
0
null
null
null
null
UTF-8
Python
false
false
4,219
py
# coding: utf-8 """ NiFi Rest Api The Rest Api provides programmatic access to command and control a NiFi instance in real time. Start and stop processors, monitor queues, query provenance data, and more. Each endpoint below includes a description, definitions of the expected input and output, potential response codes, and the authorizations required to invoke each service. # noqa: E501 OpenAPI spec version: 1.4.0 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git """ import pprint import re # noqa: F401 import six from pynifi_client.models.tenant_entity import TenantEntity # noqa: F401,E501 class TenantsEntity(object): """NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. """ """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'users': 'list[TenantEntity]', 'user_groups': 'list[TenantEntity]' } attribute_map = { 'users': 'users', 'user_groups': 'userGroups' } def __init__(self, users=None, user_groups=None): # noqa: E501 """TenantsEntity - a model defined in Swagger""" # noqa: E501 self._users = None self._user_groups = None self.discriminator = None if users is not None: self.users = users if user_groups is not None: self.user_groups = user_groups @property def users(self): """Gets the users of this TenantsEntity. # noqa: E501 :return: The users of this TenantsEntity. # noqa: E501 :rtype: list[TenantEntity] """ return self._users @users.setter def users(self, users): """Sets the users of this TenantsEntity. :param users: The users of this TenantsEntity. # noqa: E501 :type: list[TenantEntity] """ self._users = users @property def user_groups(self): """Gets the user_groups of this TenantsEntity. # noqa: E501 :return: The user_groups of this TenantsEntity. # noqa: E501 :rtype: list[TenantEntity] """ return self._user_groups @user_groups.setter def user_groups(self, user_groups): """Sets the user_groups of this TenantsEntity. :param user_groups: The user_groups of this TenantsEntity. # noqa: E501 :type: list[TenantEntity] """ self._user_groups = user_groups def to_dict(self): """Returns the model properties as a dict""" result = {} for attr, _ in six.iteritems(self.swagger_types): value = getattr(self, attr) if isinstance(value, list): result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) elif hasattr(value, "to_dict"): result[attr] = value.to_dict() elif isinstance(value, dict): result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) else: result[attr] = value return result def to_str(self): """Returns the string representation of the model""" return pprint.pformat(self.to_dict()) def __repr__(self): """For `print` and `pprint`""" return self.to_str() def __eq__(self, other): """Returns true if both objects are equal""" if not isinstance(other, TenantsEntity): return False return self.__dict__ == other.__dict__ def __ne__(self, other): """Returns true if both objects are not equal""" return not self == other
d1af6798fa61b5eb64f308f4719273047da1e155
6189f34eff2831e3e727cd7c5e43bc5b591adffc
/WebMirror/management/rss_parser_funcs/feed_parse_extractSleepysmutCom.py
315dbd399ffd084df7ad20e203281fee61738490
[ "BSD-3-Clause" ]
permissive
fake-name/ReadableWebProxy
24603660b204a9e7965cfdd4a942ff62d7711e27
ca2e086818433abc08c014dd06bfd22d4985ea2a
refs/heads/master
2023-09-04T03:54:50.043051
2023-08-26T16:08:46
2023-08-26T16:08:46
39,611,770
207
20
BSD-3-Clause
2023-09-11T15:48:15
2015-07-24T04:30:43
Python
UTF-8
Python
false
false
541
py
def extractSleepysmutCom(item): ''' Parser for 'sleepysmut.com' ''' vol, chp, frag, postfix = extractVolChapterFragmentPostfix(item['title']) if not (chp or vol) or "preview" in item['title'].lower(): return None tagmap = [ ('PRC', 'PRC', 'translated'), ('Loiterous', 'Loiterous', 'oel'), ] for tagname, name, tl_type in tagmap: if tagname in item['tags']: return buildReleaseMessageWithType(item, name, vol, chp, frag=frag, postfix=postfix, tl_type=tl_type) return False
b0391f312af0eaea6305f39574b9a3f17f511b59
b1e7481f8b5bf40c2547c95b1863e25b11b8ef78
/Kai/crab/NANOv7_NoveCampaign/2018/crab_cfg_2018_ElMu_A.py
6670c230a950ef43e35c8eec67ec84cd44951904
[ "Apache-2.0" ]
permissive
NJManganelli/FourTopNAOD
3df39fd62c0546cdbb1886b23e35ebdc1d3598ad
c86181ae02b1933be59d563c94e76d39b83e0c52
refs/heads/master
2022-12-22T22:33:58.697162
2022-12-17T01:19:36
2022-12-17T01:19:36
143,607,743
1
1
Apache-2.0
2022-06-04T23:11:42
2018-08-05T11:40:42
Python
UTF-8
Python
false
false
1,524
py
import os from WMCore.Configuration import Configuration from CRABClient.UserUtilities import config, getUsernameFromCRIC config = Configuration() config.section_("General") config.General.requestName = '2018_ElMu_A' config.General.transferOutputs = True config.General.transferLogs = True config.section_("JobType") config.JobType.allowUndistributedCMSSW = True config.JobType.pluginName = 'Analysis' config.JobType.psetName = 'crab_PSet_2018_ElMu_A.py' config.JobType.maxMemoryMB = 3000 config.JobType.maxJobRuntimeMin = 1315 config.JobType.numCores = 1 config.JobType.scriptExe = 'crab_script_2018_ElMu_A.sh' config.JobType.inputFiles = ['crab_script_2018_ElMu_A.py', os.path.join(os.environ['CMSSW_BASE'],'src/PhysicsTools/NanoAODTools/scripts/haddnano.py'), ] config.JobType.outputFiles = [] #['hist.root'] config.JobType.sendPythonFolder = True config.section_("Data") config.Data.inputDataset = '/MuonEG/Run2018A-02Apr2020-v1/NANOAOD' config.Data.inputDBS = 'global' config.Data.splitting = 'FileBased' if config.Data.splitting == 'FileBased': config.Data.unitsPerJob = 1 # config.Data.totalUnits = $TOTAL_UNITS # config.Data.userInputFiles = [] # config.Data.outLFNDirBase = '/store/user/{user}/NoveCampaign'.format(user=getUsernameFromCRIC()) config.Data.outLFNDirBase = '/store/group/fourtop/NoveCampaign' config.Data.publication = True config.Data.outputDatasetTag = 'NoveCampaign' config.section_("Site") config.Site.storageSite = 'T2_BE_IIHE'
3fff1e8913ecade61c264485b8d0d2ab2e8f1eef
ff768174490619c119d166273365dcc480e7201c
/tuiuiu/tuiuiuimages/migrations/0008_image_created_at_index.py
dbcc329edff45d13de512a5f638dee64e8a53c0d
[ "BSD-3-Clause", "LicenseRef-scancode-public-domain" ]
permissive
caputomarcos/tuiuiu.io
15ea9323be09b69efb6b88490c2bb558ffb4cc55
d8fb57cf95487e7fe1454b2130ef18acc916da46
refs/heads/master
2022-03-02T12:56:43.889894
2017-09-23T22:53:51
2017-09-23T22:53:51
102,543,365
3
1
NOASSERTION
2022-02-02T10:46:32
2017-09-06T00:30:06
Python
UTF-8
Python
false
false
457
py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('tuiuiuimages', '0007_image_file_size'), ] operations = [ migrations.AlterField( model_name='image', name='created_at', field=models.DateTimeField(db_index=True, verbose_name='Created at', auto_now_add=True), ), ]
311abd6f195d36542f90a27ae366b5bbe1325dd5
ca7aa979e7059467e158830b76673f5b77a0f5a3
/Python_codes/p02586/s677278751.py
be1ac288360753caa4fa9d68ee573cb35be6d292
[]
no_license
Aasthaengg/IBMdataset
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
refs/heads/main
2023-04-22T10:22:44.763102
2021-05-13T17:27:22
2021-05-13T17:27:22
367,112,348
0
0
null
null
null
null
UTF-8
Python
false
false
879
py
import sys sys.setrecursionlimit(10**7) readline = sys.stdin.buffer.readline def readstr():return readline().rstrip().decode() def readstrs():return list(readline().decode().split()) def readint():return int(readline()) def readints():return list(map(int,readline().split())) def printrows(x):print('\n'.join(map(str,x))) def printline(x):print(' '.join(map(str,x))) r,c,k = readints() a = [[0]*(c+1) for i in range(r+1)] for i in range(k): R,C,V = readints() a[R][C] = V dp = [0]*(r+1)*(c+1)*4 for x in range((r+1)*(c+1)*4): i = x//((c+1)*4) l = x%((c+1)*4) j = l//4 l %= 4 if i==0 or j==0: continue if l==0: dp[x] = max(dp[i*(c+1)*4 + (j-1)*4], dp[(i-1)*(c+1)*4 + j*4 + 3]) else: dp[x] = max(dp[i*(c+1)*4 + (j-1)*4 + l], dp[(i-1)*(c+1)*4 + j*4 + 3]+a[i][j], dp[i*(c+1)*4 + (j-1)*4 + l-1]+a[i][j]) print(dp[-1])
14c287e40ef9f07fe3dd6944c53d3460a99de7cb
85b7487c00cabf70cbcf180c5015ac4886e78fb1
/test/support/__init__.py
bdbdb8f5f19075bc664a148dbdf532d577c3550c
[]
no_license
mkatsimpris/test_jpeg
7e686f27ac54db4128f4edbeb42b7cd284db0fa4
ee626d87e26a08d5ce80f73a883f00703ff34e70
refs/heads/master
2020-04-06T04:49:58.952565
2016-08-17T21:41:25
2016-08-17T21:41:25
49,828,665
3
2
null
2016-07-25T16:50:52
2016-01-17T17:58:21
Verilog
UTF-8
Python
false
false
254
py
from __future__ import absolute_import from .jpeg_prep_cosim import prep_cosim from .jpeg_v1_intf import JPEGEncV1 from .jpeg_v2_intf import JPEGEncV2 from .jpegenc_v1_top import convert as convertv1 from .utils import set_default_args, get_cli_args
19f97b46e444e83a2f72744a9002611efe7ccf0a
69e5676a801c5446ddec5e1cfd8daf527dbb3ab9
/stringcheese/wrangling/get_fficutout.py
917178ace1351e624d1dfa092c3a7e383136a123
[ "MIT" ]
permissive
lgbouma/stringcheese
96d8d48aaa8da9da92744401bba5498399758636
e7f5919335f18d54f331e67f4df1a48e4904526d
refs/heads/master
2020-07-07T10:11:29.697659
2020-03-29T19:05:52
2020-03-29T19:05:52
203,321,632
0
0
null
null
null
null
UTF-8
Python
false
false
632
py
import requests from astroquery.mast import Tesscut def get_fficutout(c_obj, cutoutdir=None, sector=None): # c_obj (SkyCoord): location of target star print('beginning download tesscut for {}'.format(repr(c_obj))) try: tab = Tesscut.download_cutouts(c_obj, size=20, sector=sector, path=cutoutdir) except (requests.exceptions.HTTPError, requests.exceptions.ConnectionError) as e: print('got {}, try again'.format(repr(e))) tab = Tesscut.download_cutouts(c_obj, size=20, sector=sector, path=cutoutdir)
0945e2340abb7961a09bf19356b325727714a0a7
b92b0e9ba2338ab311312dcbbeefcbb7c912fc2e
/build/shogun_lib/examples/undocumented/python_modular/kernel_spherical_modular.py
ef002d63c31f4dc1896ca111b2223acffcd201b9
[]
no_license
behollis/muViewBranch
384f8f97f67723b2a4019294854969d6fc1f53e8
1d80914f57e47b3ad565c4696861f7b3213675e0
refs/heads/master
2021-01-10T13:22:28.580069
2015-10-27T21:43:20
2015-10-27T21:43:20
45,059,082
1
0
null
null
null
null
UTF-8
Python
false
false
919
py
from tools.load import LoadMatrix from numpy import where lm=LoadMatrix() traindat = lm.load_numbers('../data/fm_train_real.dat') testdat = lm.load_numbers('../data/fm_test_real.dat') parameter_list=[[traindat,testdat, 1.0],[traindat,testdat, 5.0]] def kernel_spherical_modular (fm_train_real=traindat,fm_test_real=testdat, sigma=1.0): from shogun.Features import RealFeatures from shogun.Kernel import MultiquadricKernel from shogun.Distance import EuclidianDistance feats_train=RealFeatures(fm_train_real) feats_test=RealFeatures(fm_test_real) distance=EuclidianDistance(feats_train, feats_train) kernel=MultiquadricKernel(feats_train, feats_train, sigma, distance) km_train=kernel.get_kernel_matrix() kernel.init(feats_train, feats_test) km_test=kernel.get_kernel_matrix() return km_train,km_test,kernel if __name__=='__main__': print('Spherical') kernel_spherical_modular(*parameter_list[0])
[ "prosen@305cdda6-5ce1-45b3-a98d-dfc68c8b3305" ]
prosen@305cdda6-5ce1-45b3-a98d-dfc68c8b3305
e5606578ee246cb02c6785fd196d6805f1b96756
5537eec7f43098d216d2b550678c8d10b2a26f09
/venv/tower/lib/python2.7/site-packages/ldif.py
a96b5a03bce37713258634fe0021605318f206ef
[]
no_license
wipro-sdx/Automation
f0ae1512b8d9d491d7bacec94c8906d06d696407
a8c46217d0fbe51a71597b5db87cbe98ed19297a
refs/heads/master
2021-07-08T11:09:05.314435
2018-05-02T07:18:54
2018-05-02T07:18:54
131,812,982
0
1
null
2020-07-23T23:22:33
2018-05-02T07:15:28
Python
UTF-8
Python
false
false
18,540
py
""" ldif - generate and parse LDIF data (see RFC 2849) See http://www.python-ldap.org/ for details. $Id: ldif.py,v 1.101 2016/11/11 14:41:07 stroeder Exp $ Python compability note: Tested with Python 2.0+, but should work with Python 1.5.2+. """ __version__ = '2.4.28' __all__ = [ # constants 'ldif_pattern', # functions 'CreateLDIF','ParseLDIF', # classes 'LDIFWriter', 'LDIFParser', 'LDIFRecordList', 'LDIFCopy', ] import urlparse,urllib,base64,re,types try: from cStringIO import StringIO except ImportError: from StringIO import StringIO attrtype_pattern = r'[\w;.-]+(;[\w_-]+)*' attrvalue_pattern = r'(([^,]|\\,)+|".*?")' attrtypeandvalue_pattern = attrtype_pattern + r'[ ]*=[ ]*' + attrvalue_pattern rdn_pattern = attrtypeandvalue_pattern + r'([ ]*\+[ ]*' + attrtypeandvalue_pattern + r')*[ ]*' dn_pattern = rdn_pattern + r'([ ]*,[ ]*' + rdn_pattern + r')*[ ]*' dn_regex = re.compile('^%s$' % dn_pattern) ldif_pattern = '^((dn(:|::) %(dn_pattern)s)|(%(attrtype_pattern)s(:|::) .*)$)+' % vars() MOD_OP_INTEGER = { 'add' :0, # ldap.MOD_REPLACE 'delete' :1, # ldap.MOD_DELETE 'replace':2, # ldap.MOD_REPLACE } MOD_OP_STR = { 0:'add',1:'delete',2:'replace' } CHANGE_TYPES = ['add','delete','modify','modrdn'] valid_changetype_dict = {} for c in CHANGE_TYPES: valid_changetype_dict[c]=None def is_dn(s): """ returns 1 if s is a LDAP DN """ if s=='': return 1 rm = dn_regex.match(s) return rm!=None and rm.group(0)==s SAFE_STRING_PATTERN = '(^(\000|\n|\r| |:|<)|[\000\n\r\200-\377]+|[ ]+$)' safe_string_re = re.compile(SAFE_STRING_PATTERN) def list_dict(l): """ return a dictionary with all items of l being the keys of the dictionary """ return dict([(i,None) for i in l]) class LDIFWriter: """ Write LDIF entry or change records to file object Copy LDIF input to a file output object containing all data retrieved via URLs """ def __init__(self,output_file,base64_attrs=None,cols=76,line_sep='\n'): """ output_file file object for output base64_attrs list of attribute types to be base64-encoded in any case cols Specifies how many columns a line may have before it's folded into many lines. line_sep String used as line separator """ self._output_file = output_file self._base64_attrs = list_dict([a.lower() for a in (base64_attrs or [])]) self._cols = cols self._last_line_sep = line_sep self.records_written = 0 def _unfold_lines(self,line): """ Write string line as one or more folded lines """ # Check maximum line length line_len = len(line) if line_len<=self._cols: self._output_file.write(line) self._output_file.write(self._last_line_sep) else: # Fold line pos = self._cols self._output_file.write(line[0:min(line_len,self._cols)]) self._output_file.write(self._last_line_sep) while pos<line_len: self._output_file.write(' ') self._output_file.write(line[pos:min(line_len,pos+self._cols-1)]) self._output_file.write(self._last_line_sep) pos = pos+self._cols-1 return # _unfold_lines() def _needs_base64_encoding(self,attr_type,attr_value): """ returns 1 if attr_value has to be base-64 encoded because of special chars or because attr_type is in self._base64_attrs """ return self._base64_attrs.has_key(attr_type.lower()) or \ not safe_string_re.search(attr_value) is None def _unparseAttrTypeandValue(self,attr_type,attr_value): """ Write a single attribute type/value pair attr_type attribute type attr_value attribute value """ if self._needs_base64_encoding(attr_type,attr_value): # Encode with base64 self._unfold_lines(':: '.join([attr_type,base64.encodestring(attr_value).replace('\n','')])) else: self._unfold_lines(': '.join([attr_type,attr_value])) return # _unparseAttrTypeandValue() def _unparseEntryRecord(self,entry): """ entry dictionary holding an entry """ attr_types = entry.keys()[:] attr_types.sort() for attr_type in attr_types: for attr_value in entry[attr_type]: self._unparseAttrTypeandValue(attr_type,attr_value) def _unparseChangeRecord(self,modlist): """ modlist list of additions (2-tuple) or modifications (3-tuple) """ mod_len = len(modlist[0]) if mod_len==2: changetype = 'add' elif mod_len==3: changetype = 'modify' else: raise ValueError("modlist item of wrong length: %d" % (mod_len)) self._unparseAttrTypeandValue('changetype',changetype) for mod in modlist: if mod_len==2: mod_type,mod_vals = mod elif mod_len==3: mod_op,mod_type,mod_vals = mod self._unparseAttrTypeandValue(MOD_OP_STR[mod_op],mod_type) else: raise ValueError("Subsequent modlist item of wrong length") if mod_vals: for mod_val in mod_vals: self._unparseAttrTypeandValue(mod_type,mod_val) if mod_len==3: self._output_file.write('-'+self._last_line_sep) def unparse(self,dn,record): """ dn string-representation of distinguished name record Either a dictionary holding the LDAP entry {attrtype:record} or a list with a modify list like for LDAPObject.modify(). """ # Start with line containing the distinguished name self._unparseAttrTypeandValue('dn',dn) # Dispatch to record type specific writers if isinstance(record,types.DictType): self._unparseEntryRecord(record) elif isinstance(record,types.ListType): self._unparseChangeRecord(record) else: raise ValueError('Argument record must be dictionary or list instead of %s' % (repr(record))) # Write empty line separating the records self._output_file.write(self._last_line_sep) # Count records written self.records_written = self.records_written+1 return # unparse() def CreateLDIF(dn,record,base64_attrs=None,cols=76): """ Create LDIF single formatted record including trailing empty line. This is a compability function. Use is deprecated! dn string-representation of distinguished name record Either a dictionary holding the LDAP entry {attrtype:record} or a list with a modify list like for LDAPObject.modify(). base64_attrs list of attribute types to be base64-encoded in any case cols Specifies how many columns a line may have before it's folded into many lines. """ f = StringIO() ldif_writer = LDIFWriter(f,base64_attrs,cols,'\n') ldif_writer.unparse(dn,record) s = f.getvalue() f.close() return s class LDIFParser: """ Base class for a LDIF parser. Applications should sub-class this class and override method handle() to implement something meaningful. Public class attributes: records_read Counter for records processed so far """ def __init__( self, input_file, ignored_attr_types=None, max_entries=0, process_url_schemes=None, line_sep='\n' ): """ Parameters: input_file File-object to read the LDIF input from ignored_attr_types Attributes with these attribute type names will be ignored. max_entries If non-zero specifies the maximum number of entries to be read from f. process_url_schemes List containing strings with URLs schemes to process with urllib. An empty list turns off all URL processing and the attribute is ignored completely. line_sep String used as line separator """ self._input_file = input_file self._max_entries = max_entries self._process_url_schemes = list_dict([s.lower() for s in (process_url_schemes or [])]) self._ignored_attr_types = list_dict([a.lower() for a in (ignored_attr_types or [])]) self._last_line_sep = line_sep self.version = None # Initialize counters self.line_counter = 0 self.byte_counter = 0 self.records_read = 0 self.changetype_counter = {}.fromkeys(CHANGE_TYPES,0) # Store some symbols for better performance self._base64_decodestring = base64.decodestring # Read very first line try: self._last_line = self._readline() except EOFError: self._last_line = '' def handle(self,dn,entry): """ Process a single content LDIF record. This method should be implemented by applications using LDIFParser. """ pass def _readline(self): s = self._input_file.readline() self.line_counter = self.line_counter + 1 self.byte_counter = self.byte_counter + len(s) if not s: return None elif s[-2:]=='\r\n': return s[:-2] elif s[-1:]=='\n': return s[:-1] else: return s def _unfold_lines(self): """ Unfold several folded lines with trailing space into one line """ if self._last_line is None: raise EOFError('EOF reached after %d lines (%d bytes)' % ( self.line_counter, self.byte_counter, )) unfolded_lines = [ self._last_line ] next_line = self._readline() while next_line and next_line[0]==' ': unfolded_lines.append(next_line[1:]) next_line = self._readline() self._last_line = next_line return ''.join(unfolded_lines) def _next_key_and_value(self): """ Parse a single attribute type and value pair from one or more lines of LDIF data """ # Reading new attribute line unfolded_line = self._unfold_lines() # Ignore comments which can also be folded while unfolded_line and unfolded_line[0]=='#': unfolded_line = self._unfold_lines() if not unfolded_line: return None,None if unfolded_line=='-': return '-',None try: colon_pos = unfolded_line.index(':') except ValueError,e: raise ValueError('no value-spec in %s' % (repr(unfolded_line))) attr_type = unfolded_line[0:colon_pos] # if needed attribute value is BASE64 decoded value_spec = unfolded_line[colon_pos:colon_pos+2] if value_spec==': ': attr_value = unfolded_line[colon_pos+2:].lstrip() elif value_spec=='::': # attribute value needs base64-decoding attr_value = self._base64_decodestring(unfolded_line[colon_pos+2:]) elif value_spec==':<': # fetch attribute value from URL url = unfolded_line[colon_pos+2:].strip() attr_value = None if self._process_url_schemes: u = urlparse.urlparse(url) if self._process_url_schemes.has_key(u[0]): attr_value = urllib.urlopen(url).read() else: attr_value = unfolded_line[colon_pos+1:] return attr_type,attr_value def _consume_empty_lines(self): """ Consume empty lines until first non-empty line. Must only be used between full records! Returns non-empty key-value-tuple. """ # Local symbol for better performance next_key_and_value = self._next_key_and_value # Consume empty lines try: k,v = next_key_and_value() while k==v==None: k,v = next_key_and_value() except EOFError: k,v = None,None return k,v def parse_entry_records(self): """ Continously read and parse LDIF entry records """ # Local symbol for better performance next_key_and_value = self._next_key_and_value try: # Consume empty lines k,v = self._consume_empty_lines() # Consume 'version' line if k=='version': self.version = int(v) k,v = self._consume_empty_lines() except EOFError: return # Loop for processing whole records while k!=None and \ (not self._max_entries or self.records_read<self._max_entries): # Consume first line which must start with "dn: " if k!='dn': raise ValueError('Line %d: First line of record does not start with "dn:": %s' % (self.line_counter,repr(k))) if not is_dn(v): raise ValueError('Line %d: Not a valid string-representation for dn: %s.' % (self.line_counter,repr(v))) dn = v entry = {} # Consume second line of record k,v = next_key_and_value() # Loop for reading the attributes while k!=None: # Add the attribute to the entry if not ignored attribute if not k.lower() in self._ignored_attr_types: try: entry[k].append(v) except KeyError: entry[k]=[v] # Read the next line within the record try: k,v = next_key_and_value() except EOFError: k,v = None,None # handle record self.handle(dn,entry) self.records_read = self.records_read + 1 # Consume empty separator line(s) k,v = self._consume_empty_lines() return # parse_entry_records() def parse(self): """ Invokes LDIFParser.parse_entry_records() for backward compability """ return self.parse_entry_records() # parse() def handle_modify(self,dn,modops,controls=None): """ Process a single LDIF record representing a single modify operation. This method should be implemented by applications using LDIFParser. """ controls = [] or None pass def parse_change_records(self): # Local symbol for better performance next_key_and_value = self._next_key_and_value # Consume empty lines k,v = self._consume_empty_lines() # Consume 'version' line if k=='version': self.version = int(v) k,v = self._consume_empty_lines() # Loop for processing whole records while k!=None and \ (not self._max_entries or self.records_read<self._max_entries): # Consume first line which must start with "dn: " if k!='dn': raise ValueError('Line %d: First line of record does not start with "dn:": %s' % (self.line_counter,repr(k))) if not is_dn(v): raise ValueError('Line %d: Not a valid string-representation for dn: %s.' % (self.line_counter,repr(v))) dn = v # Consume second line of record k,v = next_key_and_value() # Read "control:" lines controls = [] while k!=None and k=='control': try: control_type,criticality,control_value = v.split(' ',2) except ValueError: control_value = None control_type,criticality = v.split(' ',1) controls.append((control_type,criticality,control_value)) k,v = next_key_and_value() # Determine changetype first changetype = None # Consume changetype line of record if k=='changetype': if not v in valid_changetype_dict: raise ValueError('Invalid changetype: %s' % repr(v)) changetype = v k,v = next_key_and_value() if changetype=='modify': # From here we assume a change record is read with changetype: modify modops = [] try: # Loop for reading the list of modifications while k!=None: # Extract attribute mod-operation (add, delete, replace) try: modop = MOD_OP_INTEGER[k] except KeyError: raise ValueError('Line %d: Invalid mod-op string: %s' % (self.line_counter,repr(k))) # we now have the attribute name to be modified modattr = v modvalues = [] try: k,v = next_key_and_value() except EOFError: k,v = None,None while k==modattr: modvalues.append(v) try: k,v = next_key_and_value() except EOFError: k,v = None,None modops.append((modop,modattr,modvalues or None)) k,v = next_key_and_value() if k=='-': # Consume next line k,v = next_key_and_value() except EOFError: k,v = None,None if modops: # append entry to result list self.handle_modify(dn,modops,controls) else: # Consume the unhandled change record while k!=None: k,v = next_key_and_value() # Consume empty separator line(s) k,v = self._consume_empty_lines() # Increment record counters try: self.changetype_counter[changetype] = self.changetype_counter[changetype] + 1 except KeyError: self.changetype_counter[changetype] = 1 self.records_read = self.records_read + 1 return # parse_change_records() class LDIFRecordList(LDIFParser): """ Collect all records of LDIF input into a single list. of 2-tuples (dn,entry). It can be a memory hog! """ def __init__( self, input_file, ignored_attr_types=None,max_entries=0,process_url_schemes=None ): """ See LDIFParser.__init__() Additional Parameters: all_records List instance for storing parsed records """ LDIFParser.__init__(self,input_file,ignored_attr_types,max_entries,process_url_schemes) self.all_records = [] self.all_modify_changes = [] def handle(self,dn,entry): """ Append single record to dictionary of all records. """ self.all_records.append((dn,entry)) def handle_modify(self,dn,modops,controls=None): """ Process a single LDIF record representing a single modify operation. This method should be implemented by applications using LDIFParser. """ controls = [] or None self.all_modify_changes.append((dn,modops,controls)) class LDIFCopy(LDIFParser): """ Copy LDIF input to LDIF output containing all data retrieved via URLs """ def __init__( self, input_file,output_file, ignored_attr_types=None,max_entries=0,process_url_schemes=None, base64_attrs=None,cols=76,line_sep='\n' ): """ See LDIFParser.__init__() and LDIFWriter.__init__() """ LDIFParser.__init__(self,input_file,ignored_attr_types,max_entries,process_url_schemes) self._output_ldif = LDIFWriter(output_file,base64_attrs,cols,line_sep) def handle(self,dn,entry): """ Write single LDIF record to output file. """ self._output_ldif.unparse(dn,entry) def ParseLDIF(f,ignore_attrs=None,maxentries=0): """ Parse LDIF records read from file. This is a compability function. Use is deprecated! """ ldif_parser = LDIFRecordList( f,ignored_attr_types=ignore_attrs,max_entries=maxentries,process_url_schemes=0 ) ldif_parser.parse() return ldif_parser.all_records
ec1e5dbc338ecf43d1bd53ded885b1450fb0c5be
da570c2047d335b3553e63c27ac7f60b57b28b7e
/images/urls.py
6c3df5aaf6b9ca607cd5fbcabe80ae605ee575b6
[ "MIT" ]
permissive
mfannick/viewImages
8c799fc52566de03f4909d36f5ccc50e7fff9564
27e447faff455fba306ef3e677d5f2f63160065e
refs/heads/master
2021-09-09T11:53:42.786004
2019-10-14T09:21:16
2019-10-14T09:21:16
214,357,014
0
0
null
2021-09-08T01:21:15
2019-10-11T06:11:06
Python
UTF-8
Python
false
false
425
py
from django.conf.urls import url from . import views from django.conf import settings from django.conf.urls.static import static urlpatterns=[ url('^$' ,views.homePage,name='homePage'), url(r'^search/', views.searchImageByCategory, name='searchImageByCategory'), url(r'^description/(\d+)',views.imageDescription,name='imageDescription') ] urlpatterns+=static(settings.MEDIA_URL,document_root=settings.MEDIA_ROOT)
546d674261f3df935f47d76c22d816e02e5c5599
4e0ee2b68398a90b0986975f645350033a624558
/tests/onnx_resnet18/test_onnx_resnet18_int8.py
5c23ade3f33cd92e177415de06a1d717c36ea894
[ "Apache-2.0", "LicenseRef-scancode-unknown-license-reference" ]
permissive
kindsenior/nngen
697b80b32cf2b33e7f2c64e4d1a27eb2d739b30c
301b19b35e50174d8abb1a757b061ae80cdfe612
refs/heads/master
2022-09-21T05:53:34.565461
2020-05-03T14:58:19
2020-05-03T14:58:19
269,007,213
0
0
Apache-2.0
2020-06-03T06:26:43
2020-06-03T06:26:42
null
UTF-8
Python
false
false
2,129
py
from __future__ import absolute_import from __future__ import print_function import os import sys # the next line can be removed after installation sys.path.insert(0, os.path.dirname(os.path.dirname( os.path.dirname(os.path.abspath(__file__))))) import nngen as ng import veriloggen import onnx_resnet18 act_dtype = ng.int8 weight_dtype = ng.int8 bias_dtype = ng.int16 scale_dtype = ng.int16 with_batchnorm = True disable_fusion = False conv2d_par_ich = 1 conv2d_par_och = 1 conv2d_par_col = 1 conv2d_par_row = 1 conv2d_concur_och = None conv2d_stationary = 'filter' pool_par = 1 elem_par = 1 chunk_size = 64 axi_datawidth = 32 def test(request, silent=True): veriloggen.reset() simtype = request.config.getoption('--sim') rslt = onnx_resnet18.run(act_dtype, weight_dtype, bias_dtype, scale_dtype, with_batchnorm, disable_fusion, conv2d_par_ich, conv2d_par_och, conv2d_par_col, conv2d_par_row, conv2d_concur_och, conv2d_stationary, pool_par, elem_par, chunk_size, axi_datawidth, silent, filename=None, simtype=simtype, outputfile=os.path.splitext(os.path.basename(__file__))[0] + '.out') verify_rslt = rslt.splitlines()[-1] assert(verify_rslt == '# verify: PASSED') if __name__ == '__main__': rslt = onnx_resnet18.run(act_dtype, weight_dtype, bias_dtype, scale_dtype, with_batchnorm, disable_fusion, conv2d_par_ich, conv2d_par_och, conv2d_par_col, conv2d_par_row, conv2d_concur_och, conv2d_stationary, pool_par, elem_par, chunk_size, axi_datawidth, silent=False, filename='tmp.v', outputfile=os.path.splitext(os.path.basename(__file__))[0] + '.out') print(rslt)
0c537c7a76a12d5ad4b319e5ecd8695d74b3b0f6
5c5458622ab8413fef8ab11ef5e09dcdcd42ff69
/1.py
ad47b2fba1e02ef70d242917fbf7c8954a6efe8d
[]
no_license
zhenyakeg/Console
127fbbfe33cf86a0e4d5eb968c783407168364f5
31eea7a22a95701049872d4da2c01307f05e920d
refs/heads/master
2021-01-13T10:06:17.797796
2016-10-27T17:59:03
2016-10-27T17:59:03
72,119,199
0
0
null
null
null
null
UTF-8
Python
false
false
218
py
__author__ = 'student' # импортируем модуль import sys # выводим на экран список всех аргументов s=0 for arg in sys.argv: if len(arg) == 3: s+=1 print (s)
ed817dc1416c6f9ee3bd63420344cf905981be76
f8b77d8b7d90dabfa3b222116d9fe462d890e89b
/plans/fixed_ensemble_resnet_linear_4.py
53480a497b356b8478b73ceacb2478cfd372a96e
[ "BSD-2-Clause" ]
permissive
dbis-uibk/MediaEval2021
94e4041d6e82a28ceb95c68994808d0acc725915
14d754d9cea36415090aaa115db81f5ace465964
refs/heads/master
2023-08-27T19:12:17.758042
2021-11-03T12:12:57
2021-11-03T12:12:57
424,210,495
1
0
null
null
null
null
UTF-8
Python
false
false
1,196
py
"""Ensemble plan manually split by type moode/theme.""" import json from dbispipeline.evaluators import FixedSplitEvaluator from dbispipeline.evaluators import ModelCallbackWrapper import numpy as np from sklearn.pipeline import Pipeline from mediaeval2021 import common from mediaeval2021.dataloaders.melspectrograms import MelSpectPickleLoader from mediaeval2021.models.ensemble import Ensemble from mediaeval2021.models.wrapper import TorchWrapper dataloader = MelSpectPickleLoader('data/mediaeval2020/melspect_1366.pickle') label_splits = [ np.arange(0, 14, 1), np.arange(14, 28, 1), np.arange(28, 42, 1), np.arange(42, 56, 1), ] pipeline = Pipeline([ ('model', Ensemble( base_estimator=TorchWrapper( model_name='ResNet-18', dataloader=dataloader, batch_size=64, early_stopping=True, ), label_splits=label_splits, epochs=100, )), ]) evaluator = ModelCallbackWrapper( FixedSplitEvaluator(**common.fixed_split_params()), lambda model: common.store_prediction(model, dataloader), ) result_handlers = [ lambda results: print(json.dumps(results, indent=4)), ]
8c55b1b583c89eaaf63961ca00dde5c69b6b67c5
5e5799e0ccce7a72d514fbc76dcb0a2108013f18
/Textfile2DefDomGeom.py
710ab37655fe1cd3158b6347c04304f6a2e29644
[]
no_license
sourcery-ai-bot/dash
6d68937d225473d06a18ef64079a4b3717b5c12c
e1d1c3a601cd397d2508bfd4bb12bdb4e878cd9a
refs/heads/master
2023-03-07T17:15:39.174964
2011-03-01T17:11:21
2011-03-01T17:11:21
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,140
py
#!/usr/bin/env python # # Use doms.txt or nicknames.txt file to create a default-dom-geometry file and # print the result to sys.stdout # # URL: http://icecube.wisc.edu/~testdaq/database_files/nicknames.txt import sys from DefaultDomGeometry import DefaultDomGeometryReader, DomsTxtReader, \ NicknameReader if __name__ == "__main__": if len(sys.argv) < 2: raise SystemExit("Please specify a file to load!") if len(sys.argv) > 2: raise SystemExit("Too many command-line arguments!") if sys.argv[1].endswith("nicknames.txt"): newGeom = NicknameReader.parse(sys.argv[1]) elif sys.argv[1].endswith("doms.txt"): newGeom = DomsTxtReader.parse(sys.argv[1]) else: raise SystemExit("File must be 'nicknames.txt' or 'doms.txt'," + " not '%s'" % sys.argv[1]) oldDomGeom = DefaultDomGeometryReader.parse() # rewrite the 64-DOM strings to 60 DOM strings plus 32 DOM icetop hubs newGeom.rewrite(False) oldDomGeom.rewrite() oldDomGeom.mergeMissing(newGeom) # dump the new default-dom-geometry data to sys.stdout oldDomGeom.dump()
0365a71e6ddcbf5d739bd768676f3f793715d525
1799fe1d9dfcf5f9619a87a11f3fa6170e1864fc
/00998/test_maximum_binary_tree_ii.py
44cc467d9b38a539320c803e7bc639b674e44c3e
[]
no_license
SinCatGit/leetcode
5e52b49324d16a96de1ba4804e3d17569377e804
399e40e15cd64781a3cea295bf29467d2284d2ae
refs/heads/master
2021-07-05T18:51:46.018138
2020-04-25T04:06:48
2020-04-25T04:06:48
234,226,791
1
1
null
2021-04-20T19:17:43
2020-01-16T03:27:08
Python
UTF-8
Python
false
false
1,519
py
import unittest from maximum_binary_tree_ii import Solution, TreeNode class TestSolution(unittest.TestCase): def test_Calculate_Solution(self): solution = Solution() # 6 # / \ # 1 4 # / \ # 3 2 t25 = TreeNode(6) t21 = TreeNode(1) t24 = TreeNode(4) t23 = TreeNode(3) t26 = TreeNode(2) t25.left = t21 t25.right = t24 t24.left = t23 t24.right = t26 def dfs(r): if r.left: yield from dfs(r.left) yield r.val if r.right: yield from dfs(r.right) root = solution.insertIntoMaxTree(t25, 7) self.assertEqual([1, 6, 3, 4, 2, 7], [v for v in dfs(root)]) root = solution.insertIntoMaxTree(t25, 5) self.assertEqual([1, 6, 3, 4, 2, 5], [v for v in dfs(root)]) # 6 # / \ # 1 4 # / \ # 3 2 t25 = TreeNode(6) t21 = TreeNode(1) t24 = TreeNode(4) t23 = TreeNode(3) t26 = TreeNode(2) t25.left = t21 t25.right = t24 t24.left = t23 t24.right = t26 root = solution.insertIntoMaxTreeV01(t25, 7) self.assertEqual([1, 6, 3, 4, 2, 7], [v for v in dfs(root)]) root = solution.insertIntoMaxTreeV01(t25, 5) self.assertEqual([1, 6, 3, 4, 2, 5], [v for v in dfs(root)]) if __name__ == '__main__': unittest.main()
1230bd8aea0ed2cf0e02c98811fd1bca3bac9353
e6d4a87dcf98e93bab92faa03f1b16253b728ac9
/algorithms/python/smallestGoodBase/smallestGoodBase.py
0fc48f86b09d916c4865349241f9dfd2a7f0a365
[]
no_license
MichelleZ/leetcode
b5a58e1822e3f6ef8021b29d9bc9aca3fd3d416f
a390adeeb71e997b3c1a56c479825d4adda07ef9
refs/heads/main
2023-03-06T08:16:54.891699
2023-02-26T07:17:47
2023-02-26T07:17:47
326,904,500
3
0
null
null
null
null
UTF-8
Python
false
false
843
py
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # Source: https://leetcode.com/problems/smallest-good-base/ # Author: Miao Zhang # Date: 2021-02-15 class Solution: def smallestGoodBase(self, n: str) -> str: num = int(n) # n = 1 + k + k^2+....k^(m - 1) # i:m for i in range(int(math.log(num + 1, 2)), 1, -1): # base k left = 2 right = pow(num, 1 / (i - 1)) + 1 while left < right: mid = int(left + (right - left) // 2) sums = 0 for j in range(i): sums = sums * mid + 1 if sums == num: return str(mid) elif sums < num: left = mid + 1 else: right = mid return str(num - 1)
35b1952b06b8274553605dae56d4b25d5da81a3d
77900cdd9a815caf1cd04705321ca93f5072179f
/Project/.history/product_20211116215222.py
58d5bb8d7d70ef65639f0d85425607bc5e56f6aa
[]
no_license
Bom19990111/helloword_python
717799d994223d65de5adaeabecf396ff2bc1fb7
2ee2e67a60043f03c1ce4b070470c7d2dcdc72a7
refs/heads/master
2023-09-06T04:17:02.057628
2021-11-21T20:00:46
2021-11-21T20:00:46
407,063,273
0
1
null
2021-11-21T20:00:47
2021-09-16T07:18:35
Python
UTF-8
Python
false
false
11,136
py
import data as list_product import random import pandas as pd # def __init__(self, Id, Product_code, Product_name, Brand, Year, Size): # self.Id = Id # self.Product_code = Product_code # self.Product_name = Product_name # self.Brand = Brand # self.Year = Year # self.Size = Size # Thêm sản phẩm def AddProduct(): print("THÊM SẢN PHẨM") product = { "Id": "", "Product_code": "", "Product_name": "", "Brand": "", "Price": "", "Year": "", "Quantity": "", "Size": "", "Status": "" } print("Nhập ID sản phẩm:") try: Id = int(input()) except: print("ID phải là kiểu số, vui lòng nhập lại".upper()) print("------------------------------------") try: AddProduct() except RuntimeError: print("Dừng chương trình!") while True: student = FindProductDuplicate(Id) if student != False: print("ID đã tồn tại, vui lòng nhập lại ID") Id = int(input()) else: break product['Id'] = Id # Mã sản phẩm random code_product = random.randint(1, 99) str_id = "HKSP" if code_product <= 9: str_id += "0" + str(code_product) else: str_id += str(code_product) product["Product_code"] = str_id print("Nhập tên sản phẩm: ") product['Product_name'] = input() print("Nhập thương hiệu sản phẩm: ") product['Brand'] = input() print("Nhập giá sản phẩm: ") try: product['Price'] = float(input()) except ValueError: print("Giá phải là kiểu số, vui lòng nhập lại".upper()) print("------------------------------------") try: print("Nhập giá sản phẩm: ") product['Price'] = float(input()) except: print("Dừng chương trình!") print("Nhập năm sản xuất: ") try: product['Year'] = int(input()) except ValueError: print("Năm phải là kiểu số, vui lòng nhập lại".upper()) print("------------------------------------") try: print("Nhập năm sản xuất: ") product['Year'] = int(input()) except: print('Dừng chương trình!') print("Nhập số lượng: ") try: product['Quantity'] = int(input()) except ValueError: print("Số lượng phải là kiểu số, vui lòng nhập lại".upper()) print("------------------------------------") try: print("Nhập số lượng: ") product['Quantity'] = int(input()) except: print('Dừng chương trình!') print("Nhập size giày: ") product['Size'] = input() print("Nhập tình trạng sản phẩm: ") product['Status'] = input() list_product.list_product.append(product) answer = input("Bạn có muốn nhập tiếp không? Y/N ") if answer == "y" or answer == "Y": AddProduct() # Tìm kiếm ID trùng lặp def FindProductDuplicate(Id): for i in range(0, len(list_product.list_product)): if list_product.list_product[i]['Id'] == Id: return [i, list_product.list_product[i]] return False # Hiển thị tất cả sản phẩm def ShowAllProduct(): print("*** HIỂN THỊ TẤT CẢ SẢN PHẨM ***") if len(list_product.list_product) == 0 or len(list_product.list_product) < 0: print("Chưa có sản phẩm nào để hiển thị! ".upper()) for i in range(0, len(list_product.list_product)): print("ID : \t", list_product.list_product[i]['Id']), print("Mã sản phẩm : \t", list_product.list_product[i]['Product_code']), print("Tên sản phẩm : \t", list_product.list_product[i]['Product_name']), print("Thương hiệu : \t", list_product.list_product[i]['Brand']), print("Giá : \t", list_product.list_product[i]['Price']), print("Năm xuất bản : \t", list_product.list_product[i]['Year']), print("Số lượng : \t", list_product.list_product[i]['Quantity']), print("Size giày : \t", list_product.list_product[i]['Size']) print("Tình trạng : \t", list_product.list_product[i]['Status']) print("________________________________") # Sửa thông tin sản phẩm def UpdateProduct(): print("*** CẬP NHẬT THÔNG TIN SẢN PHẨM ***") print("Nhập ID sản phẩm cần sửa") try: Id = int(input()) product = FindProductDuplicate(Id) except: print("Vui lòng nhập đúng định dạng ID".upper()) UpdateProduct() if product == False: print("Không tìm thấy sản phẩm ID = ".upper(), Id) print("********************************") UpdateProduct() else: print("""Bạn muốn cập nhật mục nào ? : 0. Thoát. 1. Tên sản phẩm. 2. Thương hiệu sản phẩm. 3. Giá sản phẩm 4. Size giày. 5. Số lượng. 6. Năm xuất bản. 7. Tình trạng """) action = 0 while action >= 0: if action == 1: UpdateProductName() elif action == 2: UpdateProductBrand() elif action == 3: UpdateProductPrice() elif action == 4: UpdateProductSize() elif action == 5: UpdateProductQuatity() elif action == 6: UpdateProductYear() elif action == 7: UpdateStatus() def UpdateProductName(): print("Nhập tên cập nhật của sản phẩm: ") name_product = input() product[1]['Product_name'] = name_product def UpdateProductBrand(): print("Nhập thương hiệu muốn cập nhật: ") name_product = input() product[1]['Brand'] = name_product def UpdateProductPrice(): print("Nhập giá muốn cập nhật: ") name_product = float(input()) product[1]['Price'] = name_product def UpdateProductSize(): print("Nhập size muốn cập nhật: ") name_product = input() product[1]['Size'] = name_product def UpdateProductYear(): print("Nhập năm sản xuất muốn cập nhật: ") name_product = int(input()) product[1]['Year'] = name_product list_product.list_product[product[0]] = product[1] def UpdateProductQuatity(): print("Nhập số lượng muốn cập nhật: ") name_product = int(input()) product[1]['Quantity'] = name_product list_product.list_product[product[0]] = product[1] def UpdateStatus(): print("Nhập tình trạng muốn cập nhật: ") name_product = input() product[1]['Status'] = name_product list_product.list_product[product[0]] = product[1] action = int(input("Bạn chọn mục cập nhật nào? ")) if action == 0: print("Không cập nhật mục nào".upper()) print("********************************") break # Xóa sản phẩm def DeleteProduct(): print("*** XÓA SẢN PHẨM ***") print("Nhập ID sản phẩm cần xóa:") Id = int(input()) product = FindProductDuplicate(Id) if product == False: print("Không tìm thấy sản phẩm ID = ".upper(), Id) print("********************************") else: answer = input("Bạn có muốn xóa sản phẩm này không? Y/N ".upper()) if answer == "y" or answer == "Y": if product != False: list_product.list_product.remove(product[1]) print("Xóa sản phẩm thành công!".upper()) print("********************************") else: print("Đã từ chối xóa sản phẩm này!".upper()) print("********************************") # Tìm kiếm sản phẩm def FindProductByName(): print("*** TÌM KIẾM SẢN PHẨM ***") if (len(list_product.list_product) == 0 or len(list_product.list_product) < 0): print("Chưa có sản phẩm nào trong giỏ!".upper()) print("********************************") else: NameProduct = str( input("Nhập tên sản phẩm hoặc tên thương hiệu bạn muốn tìm kiếm: ")).upper() is_found = False for i in range(0, len(list_product.list_product)): if str(list_product.list_product[i]['Product_name']).upper() in NameProduct or str(list_product.list_product[i]['Brand']).upper() in NameProduct: is_found = True print("ID : \t", list_product.list_product[i]['Id']), print("Mã sản phẩm : \t", list_product.list_product[i]['Product_code']), print("Tên sản phẩm : \t", list_product.list_product[i]['Product_name']), print("Thương hiệu : \t", list_product.list_product[i]['Brand']), print("Giá : \t", list_product.list_product[i]['Price']), print("Năm xuất bản : \t", list_product.list_product[i]['Year']), print("Số lượng : \t", list_product.list_product[i]['Quantity']), print("Size giày : \t", list_product.list_product[i]['Size']) print("Tình trạng : \t", list_product.list_product[i]['Status']) print("________________________________") if not is_found: print("Không tìm thấy sản phẩm này @@".upper()) print("********************************") def SortProductNameA_Z(): list_product.list_product.sort(key=lambda item: item.get("Product_name")) def SortProductNameZ_A(): list_product.list_product.sort( key=lambda item: item.get("Product_name"), reverse=True) def SortPriceAsc(): list_product.list_product.sort(key=lambda item: item.get("Price")) def SortPriceDesc(): list_product.list_product.sort( key=lambda item: item.get("Price"), reverse=True) def ExportExecel(): pd.DataFrame(list_product.list_product).to_excel('danhsachsanpham.xlsx', header=False, index=False) df = pd.read_excel(xl, header=None) print(df.head()) def ImportExecel(): xl = pd.ExcelFile('danhsachsanpham.xlsx') df = pd.read_excel(xl, header=None) print(df.head())
a819988d7ff2b5dd395cdf6647f534d1e2bd76d9
ac227cc22d5f5364e5d029a2cef83816a6954590
/applications/physbam/physbam-lib/External_Libraries/Archives/boost/tools/build/v2/test/test2.py
cb74b851f46253b5bae80ccdd8ca872abd5ef6de
[ "BSL-1.0", "LicenseRef-scancode-unknown-license-reference", "BSD-3-Clause" ]
permissive
schinmayee/nimbus
597185bc8bac91a2480466cebc8b337f5d96bd2e
170cd15e24a7a88243a6ea80aabadc0fc0e6e177
refs/heads/master
2020-03-11T11:42:39.262834
2018-04-18T01:28:23
2018-04-18T01:28:23
129,976,755
0
0
BSD-3-Clause
2018-04-17T23:33:23
2018-04-17T23:33:23
null
UTF-8
Python
false
false
471
py
#!/usr/bin/python from BoostBuild import Tester, List from time import sleep t = Tester() t.set_tree("test2") t.run_build_system("-sBOOST_BUILD_PATH=" + t.original_workdir + "/..") file_list = 'bin/foo/$toolset/debug/runtime-link-dynamic/' * List("foo foo.o") t.expect_addition(file_list) t.write("foo.cpp", "int main(int, char**) { return 0; }\n") t.run_build_system("-d2 -sBOOST_BUILD_PATH=" + t.original_workdir + "/..") t.expect_touch(file_list) t.pass_test()
b454e42a2bc91d38dbdb4c2052da1a603fdaf6db
4f026ddcf8f058d884f15259f0e42c2178eb2157
/clare/clare/application/factories.py
0dadccd763c699f4a366d61970db657bafff708f
[ "MIT" ]
permissive
dnguyen0304/roomlistwatcher
afd95e5f601f77fc8d7c4cd4307e60f36b53162c
7ac4d5172de22dd8906662da521995c8e06c2617
refs/heads/master
2021-01-20T22:55:04.289589
2017-11-16T04:09:49
2017-11-16T04:09:49
101,829,306
0
0
null
2017-11-16T04:09:49
2017-08-30T02:38:56
Python
UTF-8
Python
false
false
2,677
py
# -*- coding: utf-8 -*- import Queue import os import threading import uuid from . import applications from . import download_bot from . import room_list_watcher class Application(object): def __init__(self, infrastructure, properties): """ Parameters ---------- infrastructure : clare.infrastructure.infrastructures.ApplicationInfrastructure properties : collections.Mapping """ self._infrastructure = infrastructure self._properties = properties def create(self): """ Returns ------- clare.application.applications.Application """ queue = Queue.Queue() # Construct the room list watcher. room_list_watcher_factory = room_list_watcher.factories.Producer( infrastructure=self._infrastructure.room_list_watcher, properties=self._properties['room_list_watcher']) room_list_watcher_ = room_list_watcher_factory.create() # Include threading. kwargs = { 'interval': self._properties['room_list_watcher']['interval'] } room_list_watcher_ = threading.Thread(name='room_list_watcher', target=room_list_watcher_.produce, kwargs=kwargs) room_list_watcher_.daemon = True # Construct the download bot. download_bot_factory = download_bot.factories.Factory( queue=queue, properties=self._properties['download_bot']) directory_path = os.path.join( self._properties['download_bot']['factory']['root_directory_path'], str(uuid.uuid4())) download_bot_ = download_bot_factory.create( download_directory_path=directory_path) # Include threading. kwargs = { 'interval': self._properties['download_bot']['interval'], 'timeout': self._properties['download_bot']['timeout'] } download_bot_ = threading.Thread(name='download_bot', target=download_bot_.consume, kwargs=kwargs) download_bot_.daemon = True # Construct the application. application = applications.Application( room_list_watcher=room_list_watcher_, download_bot=download_bot_) return application def __repr__(self): repr_ = '{}(infrastructure={}, properties={})' return repr_.format(self.__class__.__name__, self._infrastructure, self._properties)
7b11a37746ad28f3e18303de213f4beb2bbb4404
315450354c6ddeda9269ffa4c96750783963d629
/CMSSW_7_0_4/src/SimTotem/RPTimingDetectorsDigiProducer/python/BeamMisalignmentFinder.py
3ff7d944d97f722285c3413832867036093dcd54
[]
no_license
elizamelo/CMSTOTEMSim
e5928d49edb32cbfeae0aedfcf7bd3131211627e
b415e0ff0dad101be5e5de1def59c5894d7ca3e8
refs/heads/master
2021-05-01T01:31:38.139992
2017-09-12T17:07:12
2017-09-12T17:07:12
76,041,270
0
2
null
null
null
null
UTF-8
Python
false
false
6,414
py
import ROOT import os import argparse import numpy as np import matplotlib.pyplot as plt from scipy.optimize import curve_fit from scipy import exp parser = argparse.ArgumentParser(description='Finds beam misaligned between given template and misaligned ntuple') parser.add_argument('template_file', metavar='template_file', type=str, nargs=1, help='File containing ntuple used as template to find misaligned') parser.add_argument('misaligned_file', metavar='misaligned_file', type=str, nargs=1, help='File containing misaligned ntuple') args = parser.parse_args() ROOT.gROOT.ProcessLine('.include ' + os.environ['CMSSW_BASE'] + '/src') ROOT.gROOT.ProcessLine( '.L ' + os.environ['CMSSW_BASE'] + '/src/TotemAnalysis/TotemNtuplizer/interface/RPTimingDetectorsNtuplizerHit.h+g') tree_name = 'TotemNtuple' hit_source_types = ["reco", "filtered", "tracking_pot_210_far", "tracking_pot_220_far"] ids = { "reco": ["%03d" % id for id in [20, 21, 120, 121]] } ids["filtered"] = ids["reco"] ids["tracking_pot_220_far"] = ids["reco"] ids["tracking_pot_210_far"] = ids["reco"] tracking_pot_maps = { "tracking_pot_220_far": { "020": "24", "021": "25", "120": "124", "121": "125" }, "tracking_pot_210_far": { "020": "4", "021": "5", "120": "104", "121": "105" } } def get_branch_name(detector_id, type): if type == "geant": return 'rp_timing_detector_%s_hits' % detector_id elif type == "reco": return 'rp_timing_detector_%s_reco_hits' % detector_id elif type == "filtered": return 'rp_timing_detector_%s_filtered_hits' % detector_id elif type == "tracking_pot_220_far": return 'rp_timing_detector_%s_tracking_pot_%03.d' % (detector_id, int(tracking_pot_maps[type][detector_id])) elif type == "tracking_pot_210_far": return 'rp_timing_detector_%s_tracking_pot_%03.d' % (detector_id, int(tracking_pot_maps[type][detector_id])) def load_ntuple(file_path): tree_path = '/'.join([file_path, tree_name]) tchain = ROOT.TChain('TotemNtuple', '') tchain.Add(tree_path) return tchain def gauss(x, a, x0, sigma): return a*exp(-(x-x0)**2/(2*sigma**2)) def cut_zero_bins(histogram, bins): first = histogram.index(next(x for x in histogram if x != 0)) last = (len(histogram) - 1) - histogram[::-1].index( next(x for x in histogram[::-1] if x != 0)) hist_x = bins[first:last + 1] hist_y = histogram[first:last + 1] return hist_x, hist_y def fit_gauss(x, y): gauss_params, pcov = curve_fit(gauss, x, y, p0=[1., 0., 1.]) return gauss_params, np.sqrt(np.diag(pcov)) def find_misalignment(template_data, misaligned_data): sum = 0.0 number = 0.0 template_error = 0.0 misaligned_error = 0.0 for bin_width in list(np.arange(0.1, 1.1, 0.1))[::-1]: try: histogram_bins = np.arange(-20, 20, bin_width) template_histogram = list(np.histogram(template_data, bins=histogram_bins)[0]) misaligned_histogram = list(np.histogram(misaligned_data, bins=histogram_bins)[0]) template_x, template_y = cut_zero_bins(template_histogram, histogram_bins) misaligned_x, misaligned_y = cut_zero_bins(misaligned_histogram, histogram_bins) template_gauss_params, template_standard_deviation_error = fit_gauss(template_x, template_y) misaligned_gauss_params, misaligned_standard_deviation_error = fit_gauss(misaligned_x, misaligned_y) template_error += template_standard_deviation_error[1] misaligned_error += misaligned_standard_deviation_error[1] template_x0 = template_gauss_params[1] misaligned_x0 = misaligned_gauss_params[1] # plt.plot(misaligned_x, misaligned_y, 'b+:', label='data') # plt.plot(misaligned_x, gauss(misaligned_x, *misaligned_gauss_params), 'ro:', label='fit') # plt.legend() # plt.savefig("foo.png") sum += (misaligned_x0 - template_x0) number += 1 except RuntimeError: # print "result not found for %.2f bins width" % bin_width pass if number > 0: return sum/number, template_error/number, misaligned_error/number raise Exception('Cannot find misalignment') if __name__ == "__main__": template_file_name = args.template_file[0] misaligned_file_name = args.misaligned_file[0] template_ntuple = load_ntuple(template_file_name) misaligned_ntuple = load_ntuple(misaligned_file_name) # check sizes if template_ntuple.GetEntries() != misaligned_ntuple.GetEntries(): print "Error, all sources must have te same number of events" exit(-1) sources_ntuples_types = ["template", "misaligned"] hits_histograms = {} for ntuple_type in sources_ntuples_types: hits_histograms[ntuple_type] = {} for hit_type in hit_source_types: hits_histograms[ntuple_type][hit_type] = {} for id in ids[hit_type]: hits_histograms[ntuple_type][hit_type][id] = [] for source_name, source_ntuple in zip(sources_ntuples_types, [template_ntuple, misaligned_ntuple]): for event in source_ntuple: for type in hit_source_types: for id in ids[type]: for hits_vector in getattr(event, get_branch_name(id, type)): if type in ["reco", "filtered"]: hits_histograms[source_name][type][id].append(hits_vector.position.x) elif type in ["tracking_pot_210_far", "tracking_pot_220_far"]: hits_histograms[source_name][type][id].append(hits_vector.x) sum = 0.0 number = 0.0 print "Calculated misalignment" for type in hit_source_types: for id in ids[type]: result, template_error, misaligned_error = \ find_misalignment(hits_histograms["template"][type][id], hits_histograms["misaligned"][type][id]) sum += result number += 1 print '%s %.2fmm; standard deviation error: template: %.2f misaligned: %.2f' % (get_branch_name(id, type), result, template_error, misaligned_error) print 'Average %.2fmm' % (sum/number)
04a0ae35c0b49d0518e6a68d481f6e317f214115
3a8c2bd3b8df9054ed0c26f48616209859faa719
/Challenges/surroundedRegions.py
570dcbb6b411e6fd035814e01998a9a4779b635f
[]
no_license
AusCommsteam/Algorithm-and-Data-Structures-and-Coding-Challenges
684f1ca2f9ee3c49d0b17ecb1e80707efe305c82
98fb752c574a6ec5961a274e41a44275b56da194
refs/heads/master
2023-09-01T23:58:15.514231
2021-09-10T12:42:03
2021-09-10T12:42:03
null
0
0
null
null
null
null
UTF-8
Python
false
false
1,718
py
""" Surrounded Regions Given a 2D board containing 'X' and 'O' (the letter O), capture all regions surrounded by 'X'. A region is captured by flipping all 'O's into 'X's in that surrounded region. Example: X X X X X O O X X X O X X O X X After running your function, the board should be: X X X X X X X X X X X X X O X X Explanation: Surrounded regions shouldn’t be on the border, which means that any 'O' on the border of the board are not flipped to 'X'. Any 'O' that is not on the border and it is not connected to an 'O' on the border will be flipped to 'X'. Two cells are connected if they are adjacent cells connected horizontally or vertically. """ """ BFS Time: O(MN) Space: O(N) """ class Solution: def solve(self, board: List[List[str]]) -> None: """ Do not return anything, modify board in-place instead. """ queue = collections.deque() for i in range(len(board)): for j in range(len(board[0])): if (i == 0 or i == len(board)-1 or j == 0 or j == len(board[0])-1) and board[i][j] == 'O': queue.append((i, j)) directions = [(1, 0), (-1, 0), (0, -1), (0, 1)] while queue: i, j = queue.popleft() if 0 <= i < len(board) and 0 <= j < len(board[0]) and board[i][j] == 'O': board[i][j] = 'D' for di, dj in directions: queue.append((i + di, j + dj)) for i in range(len(board)): for j in range(len(board[0])): if board[i][j] == 'O': board[i][j] = 'X' elif board[i][j] == 'D': board[i][j] = 'O'
dece0de38d388908615b7dfa117a5a0a64cc883f
fe40fb53bdeb3d693174a57fe336503e92fe299b
/eheritage/utils.py
f8dbc0756aecfe7426966fb4198086045afbacea
[ "BSD-2-Clause" ]
permissive
uq-eresearch/eheritage
8c8d096d43888e6e41fbbacdf55f2c6808bace27
e4a2f01c56d438d8b3f4de63d50d979a8105d652
refs/heads/master
2022-07-18T19:21:53.224175
2016-08-05T02:40:08
2016-08-05T02:40:08
18,045,275
0
0
BSD-3-Clause
2022-07-06T19:49:44
2014-03-23T22:33:56
HTML
UTF-8
Python
false
false
281
py
from flask.json import JSONEncoder class IterableAwareEncoder(JSONEncoder): def default(self, o): try: iterable = iter(o) except TypeError: pass else: return list(iterable) return JSONEncoder.default(self, o)
53184abe0b9d0cc7e28ab1ab15066c700988bf39
22bcb68759d516eea70d18116cd434fcd0a9d842
/scrap/booksadda_scrap.py
f8cbcbef3fd0f32b0ab736622fb7afee1cde6327
[]
no_license
lovesh/abhiabhi-web-scrapper
1f5da38c873fea74870d59f61c3c4f52b50f1886
b66fcadc56377276f625530bdf8e739a01cbe16b
refs/heads/master
2021-01-01T17:16:51.577914
2014-10-18T15:56:42
2014-10-18T15:56:42
null
0
0
null
null
null
null
UTF-8
Python
false
false
8,758
py
import downloader import dom import urllib import re import time import datetime import simplejson as json import pymongo from collections import defaultdict import util siteurl='http://www.bookadda.com/' books=[] book_urls=set() logfile=open('bookadda_log.txt','w') dl=downloader.Downloader() dl.addHeaders({'Origin':siteurl,'Referer':siteurl}) debug=True DBName='abhiabhi' temporary=pymongo.Connection().DBName.ba_temporary temporary.create_index('url',unique=True) url_pattern = re.compile('bookadda.com', re.I) def getCategories(): doc=dom.DOM(url=siteurl) category_path='//div[@id="body_container"]//ul[@class="left_menu"][1]/li/a' categories=[[c[0],c[1]] for c in doc.getLinksWithXpath(category_path) if c[1] != 'http://www.bookadda.com/view-books/medical-books'] return categories def getBookUrlsFromPage(html): book_url_path='//ul[@class="results"]//div[@class="details"]//h4/a' page_dom=dom.DOM(string=html) links=set(l[1] for l in page_dom.getLinksWithXpath(book_url_path)) return links def getBookUrlsOfSubcategory(subcategory_url): subcategory_dom=dom.DOM(url=subcategory_url) book_url_path='//ul[@class="results"]//div[@class="details"]//h4/a' book_urls=set(l[1] for l in subcategory_dom.getLinksWithXpath(book_url_path)) result_count_path='//div[@id="search_container"]//div[@class="contentbox"]//div[@class="head"]' count_node=subcategory_dom.getNodesWithXpath(result_count_path) if count_node: count_string=count_node[0].text_content() print count_string count=int(re.search('\d+ of (\d+) result',count_string).group(1)) subcat_col=pymongo.Connection().DBName.ba_subcats subcat_col.update({'subcat_url':subcategory_url},{'$set':{'num_books':count}}) if count>20: page_urls=set(subcategory_url+'?pager.offset='+str(x) for x in xrange(20,count,20)) dl.putUrls(page_urls) subcategory_pages=dl.download() for s in subcategory_pages: status=subcategory_pages[s][0] html=subcategory_pages[s][1] if status > 199 and status < 400: book_urls.update(getBookUrlsFromPage(html)) #print book_urls return book_urls def getAllBookUrls(): global book_urls subcategory_path='//div[@id="left_container"]/ul[@class="left_menu"][1]/li/a' cats=getCategories() subcat_col=pymongo.Connection().DBName.ba_subcats subcat_col.create_index('subcat_url',unique=True) for cat in cats: page=dom.DOM(url=cat[1]) subcats=page.getLinksWithXpath(subcategory_path) for subcat in subcats: try: subcat_col.insert({'subcat':subcat[0].strip('\n\t\r '),'subcat_url':subcat[1],'cat':cat[0],'cat_url':cat[1],'status':0}) except: pass try: subcat_col.insert({'subcat':'Medical','subcat_url':'http://www.bookadda.com/view-books/medical-books','cat':'Medical','status':0}) except: pass subcats=[{'cat':subcat['cat'],'subcat':subcat['subcat'],'subcat_url':subcat['subcat_url']} for subcat in subcat_col.find({'status':0})] start=time.time() for subcat in subcats: print 'Getting book urls of subcategory %s\n\n'%subcat['subcat_url'] logfile.write('Getting book urls of subcategory %s\n\n'%subcat['subcat_url']) logfile.flush() urls=getBookUrlsOfSubcategory(subcat['subcat_url']) for url in urls: try: temporary.insert({'url':url,'subcat_url':subcat['subcat_url'],'categories':[[subcat['cat'],subcat['subcat']],],'status':0}) except pymongo.errors.DuplicateKeyError: temporary.update({'url':url},{'$push':{'categories':[subcat['cat'],subcat['subcat']]}}) print "done with subcategory %s"%subcat['subcat_url'] logfile.write("done with subcategory %s\n\n"%subcat['subcat_url']) subcat_col.update({'subcat_url':subcat['subcat_url']},{'$set':{'status':1}}) finish=time.time() print "All book urls(%d) fetched in %s\n\n"%(len(book_urls),str(finish-start)) logfile.write("All book urls fetched in %s\n\n"%str(finish-start)) logfile.flush() return book_urls def parseBookPage(url=None,string=None): book={} if url: try: doc=dom.DOM(url=url,utf8=True) except: return False else: try: doc=dom.DOM(string=string,utf8=True) except: return False addBox=doc.getNodesWithXpath('//a[@id="addBox"]') url_path='//meta[@property="og:url"]' book['url']=doc.getNodesWithXpath(url_path)[0].get('content').strip() if debug: print book['url'] if url_pattern.search(book['url']) is None: return False if addBox: #availability check book['availability']=1 shipping_path='//span[@class="numofdys"]/strong' shipping=doc.getNodesWithXpath(shipping_path) if shipping: shipping=re.search('(\d+)-(\d+)',shipping[0].text) book['shipping']=[shipping.group(1),shipping.group(2)] else: book['availability']=0 name_path='//div[@class="prdcol2"]/h1' name = doc.getNodesWithXpath(name_path) if len(name) > 0: book['name']=name[0].text_content().strip() image_path='//meta[@property="og:image"]' image=doc.getNodesWithXpath(image_path) if image: book['img_url']=image[0].text_content().strip() desc_path='//div[@class="reviews-box-cont-inner"]' desc=doc.getNodesWithXpath(desc_path) if desc: book['description']=desc[0].text_content().strip() price_path='//span[@class="actlprc"]' price=doc.getNodesWithXpath(price_path) if len(price) > 0: price = price[0].text.strip() book['price']=int(re.search('(\d+)',price).group(1)) book['scraped_datetime']=datetime.datetime.now() book['last_modified_datetime']=datetime.datetime.now() product_history={} if 'price' in book: product_history['price']=book['price'] if 'shipping' in book: product_history['shipping'] = book['shipping'] product_history['availability'] = book['availability'] product_history['datetime'] = book['last_modified_datetime'] book['product_history'] = [product_history,] book['site']='bookadda' tbody_path='//div[@class="grey_background"]/table/tbody' if len(doc.getNodesWithXpath(tbody_path)) == 0: tbody_path='//div[@class="grey_background"]/table' data=doc.parseTBody(tbody_path) if 'author' in data: data['author']=data['author'].encode('utf8').split('\xc2\xa0') util.replaceKey(data,'number of pages','num_pages') util.replaceKey(data,'publishing date','pubdate') util.replaceKey(data,'isbn-13','isbn13') if 'isbn13' in data: data['isbn13']=data['isbn13'].split(',')[0].replace('-','').strip() util.replaceKey(data,'book','name') book.update(data) return book def go(): global books getAllBookUrls() temporary=pymongo.Connection().DBName.ba_temporary con=pymongo.Connection() coll=con[DBName]['scraped_books'] count=1 #so that the following loop starts total=0 #keeps a track of total downloaded books start=time.time() while count>0: docs=temporary.find({'status':0}).limit(500) count=docs.count() urls=[] processed={} urls=[doc['url'] for doc in docs] dl.putUrls(urls,30) result=dl.download() books=[] for r in result: status=str(result[r][0]) html=result[r][1] if int(status) > 199 and int(status) < 400: book=parseBookPage(string=html) if book: books.append(book) if status in processed: processed[status].append(r) else: processed[status]=[r,] coll.insert(books) total+=total+len(books) for status in processed: temporary.update({'url':{'$in':processed[status]}},{'$set':{'status':int(status)}},multi=True) finish=time.time() logfile.write("All books parsed in %s"%str(finish-start)) def prepareXMLFeed(): go() root=dom.XMLNode('books') start=time.time() for book in books: child=root.createChildNode('book') child.createChildNodes(book) f=open('booksadda.xml','w') f.write(root.nodeToString()) f.close() finish=time.time() logfile.write("XML file created in %s"%str(finish-start)) if __name__ == '__main__': go()
a07324a9d67bfb019bf47a4e379d797eab6ed5f3
728f639b8d536348e200a6c6b8dfd3e70a781d85
/HTMLTestRunner测试报告&unittest/可以复用项目/webTest/comm/common.py
967c849ad0793853febfe741f742e28639cee19c
[]
no_license
jingshiyue/my_dict_forPython
00adad2a1492b7ecff66a3de44793f17682aaea6
7a0da28d68eb130e62d196467d0ef0ee3d8ebf95
refs/heads/master
2023-04-05T18:29:36.707082
2023-03-30T10:30:13
2023-03-30T10:30:13
192,511,669
0
0
null
null
null
null
UTF-8
Python
false
false
7,436
py
# -*- coding:utf-8 -*- import os import readConfig as readConfig from xlrd import open_workbook from xml.etree import ElementTree as ElementTree from selenium import webdriver from selenium.common.exceptions import NoSuchElementException from comm.webDriver import MyDriver as Driver import time import comm.runSet as runSet localReadConfig = readConfig.ReadConfig() def open_browser(): """ open browser by url :return: """ browser = webdriver.Chrome() # 绐楀彛鏈�ぇ鍖� browser.maximize_window() return browser def close_browser(browser): """ close browser :param browser: :return: """ browser.close() def open_url(name): """ open web page by url :param name: :return: """ url = localReadConfig.get_webServer(name) browser = open_browser() browser.get(url) return browser def get_xls(xls_name, sheet_name): """ :param xls_name: excel file name :param sheet_name: sheet name :return: sheet value """ web = runSet.get_web() site = runSet.get_site() cls = [] # get excel file path xls_path = os.path.join(readConfig.proDir, 'file', web, site, xls_name) print("xls path:"+xls_path) # open excel file book = open_workbook(xls_path) # get sheet by name sheet = book.sheet_by_name(sheet_name) # get nrows nrows = sheet.nrows for i in range(nrows): if sheet.row_values(i)[0] != u'case_name': cls.append(sheet.row_values(i)) # print(sheet.row_values(i)) return cls activity = {} def set_xml(): """ get element :return: """ web = runSet.get_web() site = runSet.get_site() if len(activity) == 0: file_path = os.path.join(readConfig.proDir, 'file', web, site, 'element.xml') tree = ElementTree.parse(file_path) for a in tree.findall('activity'): activity_name = a.get('name') element = {} for e in a.getchildren(): element_name = e.get('id') element_child = {} for t in e.getchildren(): element_child[t.tag] = t.text element[element_name] = element_child activity[activity_name] = element def get_el_dict(activity_name, element): """ According to page, activity and element getting element :param activity_name: activity name :param element: element name :return: """ set_xml() element_dict = activity.get(activity_name).get(element) print(element_dict) return element_dict class Element: #Element("shein", "www", "login", "login_link").is_exist() def __init__(self, activity_name, element_name): self.driver1 = Driver.get_browser() self.driver = self.driver1.get_driver() self.activity = activity_name self.element = element_name element_dict = get_el_dict(self.activity, self.element) self.pathType = element_dict.get('pathType') self.pathValue = element_dict.get('pathValue') def is_exist(self): """ Determine element is exist :return: TRUE OR FALSE """ try: if self.pathType == 'ID': self.driver.find_element_by_id(self.pathValue) return True if self.pathType == 'XPATH': self.driver.find_elements_by_xpath(self.pathValue) return True if self.pathType == 'CLASSNAME': self.driver.find_element_by_class_name(self.pathValue) return True if self.pathType == 'NAME': self.driver.find_element_by_name(self.pathValue) return True except NoSuchElementException: return False def wait_element(self, wait_time): """ wait element appear in time :param wait_time: wait time :return: true or false """ time.sleep(wait_time) if self.is_exist(): return True else: return False def get_element(self): """ get element :return: element """ try: if self.pathType == 'ID': element = self.driver.find_element_by_id(self.pathValue) return element if self.pathType == 'XPATH': element = self.driver.find_elements_by_xpath(self.pathValue) return element if self.pathType == 'CLASSNAME': element = self.driver.find_element_by_class_name(self.pathValue) return element if self.pathType == 'NAME': element = self.driver.find_element_by_name(self.pathValue) return element except NoSuchElementException: return None def get_element_by_index(self, index): """ get element by index :param index: index :return: element """ try: if self.pathType == 'ID': element = self.driver.find_element_by_id(self.pathValue) return element[index] if self.pathType == 'XPATH': element = self.driver.find_elements_by_xpath(self.pathValue) return element[index] if self.pathType == 'CLASSNAME': element = self.driver.find_element_by_class_name(self.pathValue) return element[index] if self.pathType == 'NAME': element = self.driver.find_element_by_name(self.pathValue) return element[index] except NoSuchElementException: return None def get_element_list(self): """ get element list :return: element list """ try: if self.pathType == 'ID': element_list = self.driver.find_element_by_id(self.pathValue) return element_list if self.pathType == 'XPATH': element_list = self.driver.find_elements_by_xpath(self.pathValue) return element_list if self.pathType == 'CLASSNAME': element_list = self.driver.find_element_by_class_name(self.pathValue) return element_list if self.pathType == 'NAME': element_list = self.driver.find_element_by_name(self.pathValue) return element_list except NoSuchElementException: return None def click(self): """ click element :return: """ element = self.get_element() time.sleep(1) element.click() def send_key(self, key): """ input key :param key: input value :return: """ element = self.get_element() time.sleep(1) element.clear() element.send_keys(key) def input_keys(self, index, key): """ By index send key :param index: index :param key: key :return: """ element = self.get_element_by_index(index) time.sleep(1) element.clear() element.send_keys(key) def get_text_value(self): """ get attribute :return: """ element = self.get_element() value = element.get_attribute('text') return str(value)
a40004ba548e520cede3f28efbf8c20e012e0185
373cd41477438cc8826cd2a2f8689be84f486339
/msticpy/config/ce_data_providers.py
461dd3a6013e76b92a7875acbbc937f2d5327b61
[ "LicenseRef-scancode-generic-cla", "LGPL-3.0-only", "BSD-3-Clause", "LicenseRef-scancode-free-unknown", "ISC", "LGPL-2.0-or-later", "PSF-2.0", "Apache-2.0", "BSD-2-Clause", "LGPL-2.1-only", "Unlicense", "Python-2.0", "LicenseRef-scancode-python-cwi", "MIT", "LGPL-2.1-or-later", "GPL-2.0-or-later", "HPND", "ODbL-1.0", "GPL-1.0-or-later", "MPL-2.0" ]
permissive
RiskIQ/msticpy
cd42d601144299ec43631554076cc52cbb42dc98
44b1a390510f9be2772ec62cb95d0fc67dfc234b
refs/heads/master
2023-08-27T00:11:30.098917
2021-06-17T22:54:29
2021-06-17T22:54:29
374,787,165
1
0
MIT
2021-09-16T19:05:43
2021-06-07T20:05:09
Python
UTF-8
Python
false
false
1,049
py
# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- """Data Providers Component Edit.""" from .._version import VERSION from .ce_provider_base import CEProviders, HELP_URIS __version__ = VERSION __author__ = "Ian Hellen" # pylint: disable=too-many-ancestors, duplicate-code class CEDataProviders(CEProviders): """Data providers edit component.""" _DESCRIPTION = "Settings for Data Providers" _COMP_PATH = "DataProviders" # _HELP_TEXT inherited from base _HELP_URI = { "Data Providers": ( "https://msticpy.readthedocs.io/en/latest/" + "DataAcquisition.html" ), **HELP_URIS, } _COMPONENT_HELP = """ <p><b>LocalData provider <i>data_paths</i></b> Enter one or more data paths, separated by new lines </p> """
09cebb2e4a74f46e415c95b46e898d0f613ea202
1dae87abcaf49f1d995d03c0ce49fbb3b983d74a
/programs/subroutines/Grav Comp Ramp.sub.py
d53530d7c120d2b2fbd9cc6eeda242696b5c8d6d
[]
no_license
BEC-Trento/BEC1-data
651cd8e5f15a7d9848f9921b352e0830c08f27dd
f849086891bc68ecf7447f62962f791496d01858
refs/heads/master
2023-03-10T19:19:54.833567
2023-03-03T22:59:01
2023-03-03T22:59:01
132,161,998
0
0
null
null
null
null
UTF-8
Python
false
false
115
py
prg_comment = "" prg_version = "0.5.1" def program(prg, cmd): prg.add(0, "Grav Comp", 0.000000) return prg
b4e3ece9d63cafcc0094b68b757097aa4f19d1a0
6c00499dfe1501294ac56b0d1607fb942aafc2ee
/eventregistry/Query.py
b5101c6dd57c31efc43c5f5eb21add58b4e548fd
[ "MIT" ]
permissive
EventRegistry/event-registry-python
dd692729cb5c505e421d4b771804e712e5b6442b
bf3ce144fa61cc195840591bae5ca88b31ca9139
refs/heads/master
2023-07-06T11:04:41.033864
2023-06-23T08:40:31
2023-06-23T08:40:31
40,995,963
176
48
MIT
2020-10-21T09:17:06
2015-08-18T20:29:23
Python
UTF-8
Python
false
false
16,151
py
from .Base import QueryParamsBase, QueryItems import six, datetime from typing import Union, List class _QueryCore(object): def __init__(self): self._queryObj = {} def getQuery(self): return self._queryObj def setQueryParam(self, paramName, val): self._queryObj[paramName] = val def _setValIfNotDefault(self, propName, value, defVal): if value != defVal: self._queryObj[propName] = value class BaseQuery(_QueryCore): def __init__(self, keyword: Union[str, QueryItems, None] = None, conceptUri: Union[str, QueryItems, None] = None, categoryUri: Union[str, QueryItems, None] = None, sourceUri: Union[str, QueryItems, None] = None, locationUri: Union[str, QueryItems, None] = None, lang: Union[str, QueryItems, None] = None, dateStart: Union[datetime.datetime, datetime.date, str, None] = None, dateEnd: Union[datetime.datetime, datetime.date, str, None] = None, sourceLocationUri: Union[str, List[str], None] = None, sourceGroupUri: Union[str, List[str], None] = None, # article or event search only: dateMention: Union[datetime.datetime, datetime.date, str, None] = None, authorUri: Union[str, List[str], None] = None, keywordLoc: str = "body", # event search only: minMaxArticlesInEvent = None, # mention search only: industryUri: Union[str, QueryItems, None] = None, sdgUri: Union[str, QueryItems, None] = None, sasbUri: Union[str, QueryItems, None] = None, esgUri: Union[str, QueryItems, None] = None, # universal: exclude: Union["BaseQuery", "CombinedQuery", None] = None): """ @param keyword: keyword(s) to query. Either None, string or QueryItems instance @param conceptUri: concept(s) to query. Either None, string or QueryItems instance @param sourceUri: source(s) to query. Either None, string or QueryItems instance @param locationUri: location(s) to query. Either None, string or QueryItems instance @param categoryUri: categories to query. Either None, string or QueryItems instance @param lang: language(s) to query. Either None, string or QueryItems instance @param dateStart: starting date. Either None, string or date or datetime @param dateEnd: ending date. Either None, string or date or datetime @param dateMention: search by mentioned dates - Either None, string or date or datetime or a list of these types @param sourceLocationUri: find content generated by news sources at the specified geographic location - can be a city URI or a country URI. Multiple items can be provided using a list @param sourceGroupUri: a single or multiple source group URIs. A source group is a group of news sources, commonly defined based on common topic or importance @param authorUri: author(s) to query. Either None, string or QueryItems instance @param keywordLoc: where should we look when searching using the keywords provided by "keyword" parameter. "body" (default), "title", or "body,title" @param minMaxArticlesInEvent: a tuple containing the minimum and maximum number of articles that should be in the resulting events. Parameter relevant only if querying events @param exclude: a instance of BaseQuery, CombinedQuery or None. Used to filter out results matching the other criteria specified in this query """ super(BaseQuery, self).__init__() self._setQueryArrVal("keyword", keyword) self._setQueryArrVal("conceptUri", conceptUri) self._setQueryArrVal("categoryUri", categoryUri) self._setQueryArrVal("sourceUri", sourceUri) self._setQueryArrVal("locationUri", locationUri) self._setQueryArrVal("lang", lang) # starting date of the published articles (e.g. 2014-05-02) if dateStart is not None: self._queryObj["dateStart"] = QueryParamsBase.encodeDate(dateStart) # ending date of the published articles (e.g. 2014-05-02) if dateEnd is not None: self._queryObj["dateEnd"] = QueryParamsBase.encodeDate(dateEnd) # mentioned date detected in articles (e.g. 2014-05-02) if dateMention is not None: if isinstance(dateMention, list): self._queryObj["dateMention"] = [QueryParamsBase.encodeDate(d) for d in dateMention] else: self._queryObj["dateMention"] = QueryParamsBase.encodeDate(dateMention) self._setQueryArrVal("sourceLocationUri", sourceLocationUri) self._setQueryArrVal("sourceGroupUri", sourceGroupUri) self._setQueryArrVal("authorUri", authorUri) self._setQueryArrVal("industryUri", industryUri) self._setQueryArrVal("sdgUri", sdgUri) self._setQueryArrVal("sasbUri", sasbUri) self._setQueryArrVal("esgUri", esgUri) if keywordLoc != "body": self._queryObj["keywordLoc"] = keywordLoc if minMaxArticlesInEvent is not None: assert isinstance(minMaxArticlesInEvent, tuple), "minMaxArticlesInEvent parameter should either be None or a tuple with two integer values" self._queryObj["minArticlesInEvent"] = minMaxArticlesInEvent[0] self._queryObj["maxArticlesInEvent"] = minMaxArticlesInEvent[1] if exclude is not None: assert isinstance(exclude, (CombinedQuery, BaseQuery)), "exclude parameter was not a CombinedQuery or BaseQuery instance" self._queryObj["$not"] = exclude.getQuery() def _setQueryArrVal(self, propName: str, value): # by default we have None - so don't do anything if value is None: return # if we have an instance of QueryItems then apply it if isinstance(value, QueryItems): self._queryObj[propName] = { value.getOper(): value.getItems() } # if we have a string value, just use it elif isinstance(value, six.string_types): self._queryObj[propName] = value # there should be no other valid types else: assert False, "Parameter '%s' was of unsupported type. It should either be None, a string or an instance of QueryItems" % (propName) class CombinedQuery(_QueryCore): def __init__(self): super(CombinedQuery, self).__init__() @staticmethod def AND(queryArr: List[Union["BaseQuery", "CombinedQuery"]], exclude: Union["BaseQuery", "CombinedQuery", None] = None): """ create a combined query with multiple items on which to perform an AND operation @param queryArr: a list of items on which to perform an AND operation. Items can be either a CombinedQuery or BaseQuery instances. @param exclude: a instance of BaseQuery, CombinedQuery or None. Used to filter out results matching the other criteria specified in this query """ assert isinstance(queryArr, list), "provided argument as not a list" assert len(queryArr) > 0, "queryArr had an empty list" q = CombinedQuery() q.setQueryParam("$and", []) for item in queryArr: assert isinstance(item, (CombinedQuery, BaseQuery)), "item in the list was not a CombinedQuery or BaseQuery instance" q.getQuery()["$and"].append(item.getQuery()) if exclude is not None: assert isinstance(exclude, (CombinedQuery, BaseQuery)), "exclude parameter was not a CombinedQuery or BaseQuery instance" q.setQueryParam("$not", exclude.getQuery()) return q @staticmethod def OR(queryArr: List[Union["BaseQuery", "CombinedQuery"]], exclude: Union["BaseQuery", "CombinedQuery", None] = None): """ create a combined query with multiple items on which to perform an OR operation @param queryArr: a list of items on which to perform an OR operation. Items can be either a CombinedQuery or BaseQuery instances. @param exclude: a instance of BaseQuery, CombinedQuery or None. Used to filter out results matching the other criteria specified in this query """ assert isinstance(queryArr, list), "provided argument as not a list" assert len(queryArr) > 0, "queryArr had an empty list" q = CombinedQuery() q.setQueryParam("$or", []) for item in queryArr: assert isinstance(item, (CombinedQuery, BaseQuery)), "item in the list was not a CombinedQuery or BaseQuery instance" q.getQuery()["$or"].append(item.getQuery()) if exclude is not None: assert isinstance(exclude, (CombinedQuery, BaseQuery)), "exclude parameter was not a CombinedQuery or BaseQuery instance" q.setQueryParam("$not", exclude.getQuery()) return q class ComplexArticleQuery(_QueryCore): def __init__(self, query: Union["BaseQuery", "CombinedQuery"], dataType: Union[str, List[str]] = "news", minSentiment: Union[float, None] = None, maxSentiment: Union[float, None] = None, minSocialScore: int = 0, minFacebookShares: int = 0, startSourceRankPercentile: int = 0, endSourceRankPercentile: int = 100, isDuplicateFilter: str = "keepAll", hasDuplicateFilter: str = "keepAll", eventFilter: str = "keepAll"): """ create an article query using a complex query @param query: an instance of CombinedQuery or BaseQuery to use to find articles that match the conditions @param dataType: data type to search for. Possible values are "news" (news content), "pr" (PR content) or "blogs". If you want to use multiple data types, put them in an array (e.g. ["news", "pr"]) @param minSentiment: what should be the minimum sentiment on the articles in order to return them (None means that we don't filter by sentiment) @param maxSentiment: what should be the maximum sentiment on the articles in order to return them (None means that we don't filter by sentiment) @param minSocialScore: at least how many times should the articles be shared on social media in order to return them @param minFacebookShares: at least how many times should the articles be shared on Facebook in order to return them @param startSourceRankPercentile: starting percentile of the sources to consider in the results (default: 0). Value should be in range 0-90 and divisible by 10. @param endSourceRankPercentile: ending percentile of the sources to consider in the results (default: 100). Value should be in range 10-100 and divisible by 10. @param isDuplicateFilter: some articles can be duplicates of other articles. What should be done with them. Possible values are: "skipDuplicates" (skip the resulting articles that are duplicates of other articles) "keepOnlyDuplicates" (return only the duplicate articles) "keepAll" (no filtering, default) @param hasDuplicateFilter: some articles are later copied by others. What should be done with such articles. Possible values are: "skipHasDuplicates" (skip the resulting articles that have been later copied by others) "keepOnlyHasDuplicates" (return only the articles that have been later copied by others) "keepAll" (no filtering, default) @param eventFilter: some articles describe a known event and some don't. This filter allows you to filter the resulting articles based on this criteria. Possible values are: "skipArticlesWithoutEvent" (skip articles that are not describing any known event in ER) "keepOnlyArticlesWithoutEvent" (return only the articles that are not describing any known event in ER) "keepAll" (no filtering, default) """ super(ComplexArticleQuery, self).__init__() assert isinstance(query, (CombinedQuery, BaseQuery)), "query parameter was not a CombinedQuery or BaseQuery instance" self._queryObj["$query"] = query.getQuery() filter = {} if dataType != "news": filter["dataType"] = dataType if minSentiment is not None: filter["minSentiment"] = minSentiment if maxSentiment is not None: filter["maxSentiment"] = maxSentiment if minSocialScore > 0: filter["minSocialScore"] = minSocialScore if minFacebookShares > 0: filter["minFacebookShares"] = minFacebookShares if startSourceRankPercentile != 0: filter["startSourceRankPercentile"] = startSourceRankPercentile if endSourceRankPercentile != 100: filter["endSourceRankPercentile"] = endSourceRankPercentile if isDuplicateFilter != "keepAll": filter["isDuplicate"] = isDuplicateFilter if hasDuplicateFilter != "keepAll": filter["hasDuplicate"] = hasDuplicateFilter if eventFilter != "keepAll": filter["hasEvent"] = eventFilter if len(filter) > 0: self._queryObj["$filter"] = filter class ComplexEventQuery(_QueryCore): def __init__(self, query: Union["BaseQuery", "CombinedQuery"], minSentiment: Union[float, None] = None, maxSentiment: Union[float, None] = None): """ create an event query using a complex query @param query: an instance of CombinedQuery or BaseQuery to use to find events that match the conditions """ super(ComplexEventQuery, self).__init__() assert isinstance(query, (CombinedQuery, BaseQuery)), "query parameter was not a CombinedQuery or BaseQuery instance" filter = {} if minSentiment is not None: filter["minSentiment"] = minSentiment if maxSentiment is not None: filter["maxSentiment"] = maxSentiment if len(filter) > 0: self._queryObj["$filter"] = filter self._queryObj["$query"] = query.getQuery() class ComplexMentionQuery(_QueryCore): def __init__(self, query: Union["BaseQuery", "CombinedQuery"], minSentiment: Union[float, None] = None, maxSentiment: Union[float, None] = None, minSentenceIndex: Union[int, None] = None, maxSentenceIndex: Union[int, None] = None, showDuplicates: bool = False): """ create a mention query using a complex query @param query: an instance of CombinedQuery or BaseQuery to use to find events that match the conditions @param minSentiment: the minimum sentiment of the mentions to return @param maxSentiment: the maximum sentiment of the mentions to return @param minSentenceIndex: the minimum sentence index of the mentions to return @param maxSentenceIndex: the maximum sentence index of the mentions to return """ super(ComplexMentionQuery, self).__init__() assert isinstance(query, (CombinedQuery, BaseQuery)), "query parameter was not a CombinedQuery or BaseQuery instance" filter = {} if minSentiment is not None: filter["minSentiment"] = minSentiment if maxSentiment is not None: filter["maxSentiment"] = maxSentiment if minSentenceIndex is not None: filter["minSentenceIndex"] = minSentenceIndex if maxSentenceIndex is not None: filter["maxSentenceIndex"] = maxSentenceIndex if showDuplicates: filter["showDuplicates"] = showDuplicates if len(filter) > 0: self._queryObj["$filter"] = filter self._queryObj["$query"] = query.getQuery()
ecffd0cb40db3a2541dd08f1f6cbc13ea53320ed
ed0dd577f03a804cdc274f6c7558fafaac574dff
/python/pyre/weaver/mills/CxxMill.py
d5307d0adaae8fcbb9fa32dd74b5c3f627978cec
[ "Apache-2.0" ]
permissive
leandromoreira/vmaf
fd26e2859136126ecc8e9feeebe38a51d14db3de
a4cf599444701ea168f966162194f608b4e68697
refs/heads/master
2021-01-19T03:43:15.677322
2016-10-08T18:02:22
2016-10-08T18:02:22
70,248,500
3
0
null
2016-10-07T13:21:28
2016-10-07T13:21:27
null
UTF-8
Python
false
false
701
py
#!/usr/bin/env python # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # # Michael A.G. Aivazis # California Institute of Technology # (C) 1998-2005 All Rights Reserved # # <LicenseText> # # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ # from pyre.weaver.components.LineMill import LineMill class CxxMill(LineMill): names = ["c++", "cxx"] def __init__(self): LineMill.__init__(self, "//", "// -*- C++ -*-") return # version __id__ = "$Id: CxxMill.py,v 1.1.1.1 2006-11-27 00:10:09 aivazis Exp $" # End of file
e4819429a7fe9df8fcb508e0a4e58e531c3b358e
c6d4fa98b739a64bb55a8750b4aecd0fc0b105fd
/ScanPi/QRbytes/472.py
cd01befc97225989650424096a854c59ae1dc148
[]
no_license
NUSTEM-UK/Heart-of-Maker-Faire
de2c2f223c76f54a8b4c460530e56a5c74b65ca3
fa5a1661c63dac3ae982ed080d80d8da0480ed4e
refs/heads/master
2021-06-18T13:14:38.204811
2017-07-18T13:47:49
2017-07-18T13:47:49
73,701,984
2
0
null
null
null
null
UTF-8
Python
false
false
94,948
py
data = [ 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0F, 0xFF, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0xFF, 0xFF, 0xFF, 0x00, 0x00, 0x00, 0xFF, 0xF0, 0x00, 0xFF, 0xFF, 0xFF, 0xFF, 0xF0, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00 ]
d4adbe198d9a9e8a3154b16d3b046067822802d5
2836c3caf8ca332635640a27254a345afd449081
/iem/regain_hour_map.py
b88680e83d256a083176bb0d735984295da3bb65
[ "Apache-2.0", "MIT" ]
permissive
akrherz/DEV
27cf1bac978a0d6bbfba1851b90d2495a3bdcd66
3b1ef5841b25365d9b256467e774f35c28866961
refs/heads/main
2023-08-30T10:02:52.750739
2023-08-29T03:08:01
2023-08-29T03:08:01
65,409,757
2
0
MIT
2023-09-12T03:06:07
2016-08-10T19:16:28
Jupyter Notebook
UTF-8
Python
false
false
1,689
py
"""Plot the scam that is DST""" import ephem import mx.DateTime import tqdm from pyiem.plot import MapPlot def compute_sunrise(lat, long): arr = [] sun = ephem.Sun() ames = ephem.Observer() ames.lat = lat ames.long = long sts = mx.DateTime.DateTime(2018, 3, 10) interval = mx.DateTime.RelativeDateTime(days=1) now = sts doy = [] returnD = 0 ames.date = now.strftime("%Y/%m/%d") rise = mx.DateTime.strptime( str(ames.next_rising(sun)), "%Y/%m/%d %H:%M:%S" ) rise = rise.localtime() delta = rise.hour * 60 + rise.minute now += interval while True: ames.date = now.strftime("%Y/%m/%d") rise2 = mx.DateTime.strptime( str(ames.next_rising(sun)), "%Y/%m/%d %H:%M:%S" ) rise2 = rise2.localtime() delta2 = rise2.hour * 60 + rise2.minute if delta2 < delta: return (rise2 - rise).days now += interval return doy, arr, returnD def main(): """Go Main Go.""" lats = [] lons = [] vals = [] for lon in tqdm.tqdm(range(-130, -60, 2)): for lat in range(20, 55, 1): lats.append(lat) lons.append(lon) vals.append(compute_sunrise(str(lat), str(lon))) m = MapPlot( sector="conus", title="Days to Recover Morning Hour after Spring Saving Time Change", subtitle=( "days until local time of sunrise is earlier " "than on 10 March, local DST rules ignored for plot" ), ) m.contourf(lons, lats, vals, range(27, 78, 3), units="days") m.postprocess(filename="180313.png") if __name__ == "__main__": main()
dd2f007d5531fe7a3a72581701ad253e6d6eb614
9815041feb5bd2a89e39d86e544ca44c2e17e318
/config/settings.py
fdd605a6aef832ee03fc0708d15aa8ca4282d1b3
[]
no_license
raimbaev223/django-docker-postgres-template
5ecb62fdc57bb3af77815c3c4d1f03c98d0fdaf3
f97449cf90b87daed374576ba52e545fc1694be0
refs/heads/master
2023-04-03T05:22:38.668148
2021-04-05T10:47:52
2021-04-05T10:47:52
354,720,373
0
0
null
null
null
null
UTF-8
Python
false
false
3,247
py
""" Django settings for djangoforprofessionals_ch3 project. Generated by 'django-admin startproject' using Django 3.1.7. For more information on this file, see https://docs.djangoproject.com/en/3.1/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/3.1/ref/settings/ """ from pathlib import Path # Build paths inside the project like this: BASE_DIR / 'subdir'. BASE_DIR = Path(__file__).resolve().parent.parent # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = 'j)xol&2u_4%!32uegp@x)y*=hmn8!nlp4_1tfxq#zwu#0et$46' # SECURITY WARNING: don't run with debug turned on in production! DEBUG = True ALLOWED_HOSTS = [] # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', ] MIDDLEWARE = [ 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'djangoforprofessionals_ch3.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [BASE_DIR / 'templates'] , 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'djangoforprofessionals_ch3.wsgi.application' # Database # https://docs.djangoproject.com/en/3.1/ref/settings/#databases DATABASES = { 'default': { 'ENGINE': 'django.db.backends.postgresql', 'NAME': 'postgres', 'USER': 'postgres', 'PASSWORD': 'postgres', 'HOST': 'db', 'PORT': 5432 } } # Password validation # https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/3.1/topics/i18n/ LANGUAGE_CODE = 'en-us' TIME_ZONE = 'UTC' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/3.1/howto/static-files/ STATIC_URL = '/static/'
60d6cb2db006e20e4b18abeedfcd5b7a69a9801b
5d48aba44824ff9b9ae7e3616df10aad323c260e
/tree/653.two_sum_IV_input_is_a_BST.py
4ece9fd24ef54b435eb886456dcb70ac2d4e7d17
[]
no_license
eric496/leetcode.py
37eab98a68d6d3417780230f4b5a840f6d4bd2a6
32a76cf4ced6ed5f89b5fc98af4695b8a81b9f17
refs/heads/master
2021-07-25T11:08:36.776720
2021-07-01T15:49:31
2021-07-01T15:49:31
139,770,188
3
0
null
null
null
null
UTF-8
Python
false
false
940
py
""" Given a Binary Search Tree and a target number, return true if there exist two elements in the BST such that their sum is equal to the given target. Example 1: Input: 5 / \ 3 6 / \ \ 2 4 7 Target = 9 Output: True Example 2: Input: 5 / \ 3 6 / \ \ 2 4 7 Target = 28 Output: False """ # Definition for a binary tree node. class TreeNode: def __init__(self, x): self.val = x self.left = None self.right = None # Solution 1: class Solution: def findTarget(self, root: TreeNode, k: int) -> bool: target = set() return self.dfs(root, k, target) def dfs(self, root: TreeNode, k: int, target: int) -> bool: if not root: return False if root.val in target: return True else: target.add(k - root.val) return self.dfs(root.left, k, target) or self.dfs(root.right, k, target)
6c0c670495008cbd06140f21e047f3da7ee7a9c9
6ceea2578be0cbc1543be3649d0ad01dd55072aa
/src/examples/elphf/diffusion/mesh1D.py
b8a851a3a7a81fb4e593b52c70dcb733f0cf0331
[ "LicenseRef-scancode-public-domain" ]
permissive
regmi/fipy
57972add2cc8e6c04fda09ff2faca9a2c45ad19d
eb4aacf5a8e35cdb0e41beb0d79a93e7c8aacbad
refs/heads/master
2020-04-27T13:51:45.095692
2010-04-09T07:32:42
2010-04-09T07:32:42
602,099
1
0
null
null
null
null
UTF-8
Python
false
false
6,365
py
#!/usr/bin/env python ## # ################################################################### # FiPy - Python-based finite volume PDE solver # # FILE: "mesh1D.py" # # Author: Jonathan Guyer <[email protected]> # Author: Daniel Wheeler <[email protected]> # Author: James Warren <[email protected]> # mail: NIST # www: http://www.ctcms.nist.gov/fipy/ # # ======================================================================== # This software was developed at the National Institute of Standards # and Technology by employees of the Federal Government in the course # of their official duties. Pursuant to title 17 Section 105 of the # United States Code this software is not subject to copyright # protection and is in the public domain. FiPy is an experimental # system. NIST assumes no responsibility whatsoever for its use by # other parties, and makes no guarantees, expressed or implied, about # its quality, reliability, or any other characteristic. We would # appreciate acknowledgement if the software is used. # # This software can be redistributed and/or modified freely # provided that any derivative works bear some notice that they are # derived from it, and any modified versions bear some notice that # they have been modified. # ======================================================================== # # ################################################################### ## r""" A simple 1D example to test the setup of the multi-component diffusion equations. The diffusion equation for each species in single-phase multicomponent system can be expressed as .. math:: \frac{\partial C_j}{\partial t} = D_{jj}\nabla^2 C_j + D_{j}\nabla\cdot \frac{C_j}{1 - \sum_{\substack{k=2\\ k \neq j}}^{n-1} C_k} \sum_{\substack{i=2\\ i \neq j}}^{n-1} \nabla C_i where :math:`C_j` is the concentration of the :math:`j^\text{th}` species, :math:`t` is time, :math:`D_{jj}` is the self-diffusion coefficient of the :math:`j^\text{th}` species, and :math:`\sum_{\substack{i=2\\ i \neq j}}^{n-1}` represents the summation over all substitutional species in the system, excluding the solvent and the component of interest. We solve the problem on a 1D mesh >>> nx = 400 >>> dx = 0.01 >>> L = nx * dx >>> from fipy import * >>> mesh = Grid1D(dx = dx, nx = nx) One component in this ternary system will be designated the "solvent" >>> class ComponentVariable(CellVariable): ... def __init__(self, mesh, value = 0., name = '', ... standardPotential = 0., barrier = 0., ... diffusivity = None, valence = 0, equation = None): ... CellVariable.__init__(self, mesh = mesh, value = value, ... name = name) ... self.standardPotential = standardPotential ... self.barrier = barrier ... self.diffusivity = diffusivity ... self.valence = valence ... self.equation = equation ... ... def copy(self): ... return self.__class__(mesh = self.getMesh(), ... value = self.getValue(), ... name = self.getName(), ... standardPotential = ... self.standardPotential, ... barrier = self.barrier, ... diffusivity = self.diffusivity, ... valence = self.valence, ... equation = self.equation) >>> solvent = ComponentVariable(mesh = mesh, name = 'Cn', value = 1.) We can create an arbitrary number of components, simply by providing a :keyword:`tuple` or :keyword:`list` of components >>> substitutionals = [ ... ComponentVariable(mesh = mesh, name = 'C1', diffusivity = 1., ... standardPotential = 1., barrier = 1.), ... ComponentVariable(mesh = mesh, name = 'C2', diffusivity = 1., ... standardPotential = 1., barrier = 1.), ... ] >>> interstitials = [] >>> for component in substitutionals: ... solvent -= component We separate the solution domain into two different concentration regimes >>> x = mesh.getCellCenters()[0] >>> substitutionals[0].setValue(0.3) >>> substitutionals[0].setValue(0.6, where=x > L / 2) >>> substitutionals[1].setValue(0.6) >>> substitutionals[1].setValue(0.3, where=x > L / 2) We create one diffusion equation for each substitutional component >>> for Cj in substitutionals: ... CkSum = ComponentVariable(mesh = mesh, value = 0.) ... CkFaceSum = FaceVariable(mesh = mesh, value = 0.) ... for Ck in [Ck for Ck in substitutionals if Ck is not Cj]: ... CkSum += Ck ... CkFaceSum += Ck.getHarmonicFaceValue() ... ... convectionCoeff = CkSum.getFaceGrad() \ ... * (Cj.diffusivity / (1. - CkFaceSum)) ... ... Cj.equation = (TransientTerm() ... == DiffusionTerm(coeff=Cj.diffusivity) ... + PowerLawConvectionTerm(coeff=convectionCoeff)) If we are running interactively, we create a viewer to see the results >>> if __name__ == '__main__': ... viewer = Viewer(vars=[solvent] + substitutionals, ... datamin=0, datamax=1) ... viewer.plot() Now, we iterate the problem to equilibrium, plotting as we go >>> for i in range(40): ... for Cj in substitutionals: ... Cj.updateOld() ... for Cj in substitutionals: ... Cj.equation.solve(var = Cj, ... dt = 10000.) ... if __name__ == '__main__': ... viewer.plot() Since there is nothing to maintain the concentration separation in this problem, we verify that the concentrations have become uniform >>> substitutionals[0].allclose(0.45, rtol = 1e-7, atol = 1e-7).getValue() 1 >>> substitutionals[1].allclose(0.45, rtol = 1e-7, atol = 1e-7).getValue() 1 """ __docformat__ = 'restructuredtext' if __name__ == '__main__': ## from fipy.tools.profiler.profiler import Profiler ## from fipy.tools.profiler.profiler import calibrate_profiler # fudge = calibrate_profiler(10000) # profile = Profiler('profile', fudge=fudge) import fipy.tests.doctestPlus exec(fipy.tests.doctestPlus._getScript()) # profile.stop() raw_input("finished")
8a6f89c809cc96aab322db6b24f652d3838b3138
487fb49582444005fd55158b22b1428b8146adf4
/models/resnet.py
11f4cf9e8341e90600c0c78c9b3cd99c77f7d32e
[]
no_license
speciallan/BCNN
c7a7374931b581c73b8b24ec5e31b77578bdf6b0
6964181acb35c3005f65fb0aa38263a986efcaf0
refs/heads/master
2020-09-21T18:18:03.819322
2019-12-23T12:19:25
2019-12-23T12:19:25
224,879,530
0
0
null
null
null
null
UTF-8
Python
false
false
22,474
py
#!/usr/bin/env python # -*- coding:utf-8 -*- # Author:Speciallan """ResNet, ResNetV2, and ResNeXt models for Keras. # Reference papers - [Deep Residual Learning for Image Recognition] (https://arxiv.org/abs/1512.03385) (CVPR 2016 Best Paper Award) - [Identity Mappings in Deep Residual Networks] (https://arxiv.org/abs/1603.05027) (ECCV 2016) - [Aggregated Residual Transformations for Deep Neural Networks] (https://arxiv.org/abs/1611.05431) (CVPR 2017) # Reference implementations - [TensorNets] (https://github.com/taehoonlee/tensornets/blob/master/tensornets/resnets.py) - [Caffe ResNet] (https://github.com/KaimingHe/deep-residual-networks/tree/master/prototxt) - [Torch ResNetV2] (https://github.com/facebook/fb.resnet.torch/blob/master/models/preresnet.lua) - [Torch ResNeXt] (https://github.com/facebookresearch/ResNeXt/blob/master/models/resnext.lua) """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import numpy as np from . import get_submodules_from_kwargs from .imagenet_utils import _obtain_input_shape backend = None layers = None models = None keras_utils = None BASE_WEIGHTS_PATH = ( 'https://github.com/keras-team/keras-applications/' 'releases/download/resnet/') WEIGHTS_HASHES = { 'resnet50': ('2cb95161c43110f7111970584f804107', '4d473c1dd8becc155b73f8504c6f6626'), 'resnet101': ('f1aeb4b969a6efcfb50fad2f0c20cfc5', '88cf7a10940856eca736dc7b7e228a21'), 'resnet152': ('100835be76be38e30d865e96f2aaae62', 'ee4c566cf9a93f14d82f913c2dc6dd0c'), 'resnet50v2': ('3ef43a0b657b3be2300d5770ece849e0', 'fac2f116257151a9d068a22e544a4917'), 'resnet101v2': ('6343647c601c52e1368623803854d971', 'c0ed64b8031c3730f411d2eb4eea35b5'), 'resnet152v2': ('a49b44d1979771252814e80f8ec446f9', 'ed17cf2e0169df9d443503ef94b23b33'), 'resnext50': ('67a5b30d522ed92f75a1f16eef299d1a', '62527c363bdd9ec598bed41947b379fc'), 'resnext101': ('34fb605428fcc7aa4d62f44404c11509', '0f678c91647380debd923963594981b3') } def block1(x, filters, kernel_size=3, stride=1, conv_shortcut=True, name=None): """A residual block. # Arguments x: input tensor. filters: integer, filters of the bottleneck layer. kernel_size: default 3, kernel size of the bottleneck layer. stride: default 1, stride of the first layer. conv_shortcut: default True, use convolution shortcut if True, otherwise identity shortcut. name: string, block label. # Returns Output tensor for the residual block. """ bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1 if conv_shortcut is True: shortcut = layers.Conv2D(4 * filters, 1, strides=stride, name=name + '_0_conv')(x) shortcut = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_0_bn')(shortcut) else: shortcut = x x = layers.Conv2D(filters, 1, strides=stride, name=name + '_1_conv')(x) x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_1_bn')(x) x = layers.Activation('relu', name=name + '_1_relu')(x) x = layers.Conv2D(filters, kernel_size, padding='SAME', name=name + '_2_conv')(x) x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_2_bn')(x) x = layers.Activation('relu', name=name + '_2_relu')(x) x = layers.Conv2D(4 * filters, 1, name=name + '_3_conv')(x) x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_3_bn')(x) x = layers.Add(name=name + '_add')([shortcut, x]) x = layers.Activation('relu', name=name + '_out')(x) return x def stack1(x, filters, blocks, stride1=2, name=None): """A set of stacked residual blocks. # Arguments x: input tensor. filters: integer, filters of the bottleneck layer in a block. blocks: integer, blocks in the stacked blocks. stride1: default 2, stride of the first layer in the first block. name: string, stack label. # Returns Output tensor for the stacked blocks. """ x = block1(x, filters, stride=stride1, name=name + '_block1') for i in range(2, blocks + 1): x = block1(x, filters, conv_shortcut=False, name=name + '_block' + str(i)) return x def block2(x, filters, kernel_size=3, stride=1, conv_shortcut=False, name=None): """A residual block. # Arguments x: input tensor. filters: integer, filters of the bottleneck layer. kernel_size: default 3, kernel size of the bottleneck layer. stride: default 1, stride of the first layer. conv_shortcut: default False, use convolution shortcut if True, otherwise identity shortcut. name: string, block label. # Returns Output tensor for the residual block. """ bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1 preact = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_preact_bn')(x) preact = layers.Activation('relu', name=name + '_preact_relu')(preact) if conv_shortcut is True: shortcut = layers.Conv2D(4 * filters, 1, strides=stride, name=name + '_0_conv')(preact) else: shortcut = layers.MaxPooling2D(1, strides=stride)(x) if stride > 1 else x x = layers.Conv2D(filters, 1, strides=1, use_bias=False, name=name + '_1_conv')(preact) x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_1_bn')(x) x = layers.Activation('relu', name=name + '_1_relu')(x) x = layers.ZeroPadding2D(padding=((1, 1), (1, 1)), name=name + '_2_pad')(x) x = layers.Conv2D(filters, kernel_size, strides=stride, use_bias=False, name=name + '_2_conv')(x) x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_2_bn')(x) x = layers.Activation('relu', name=name + '_2_relu')(x) x = layers.Conv2D(4 * filters, 1, name=name + '_3_conv')(x) x = layers.Add(name=name + '_out')([shortcut, x]) return x def stack2(x, filters, blocks, stride1=2, name=None): """A set of stacked residual blocks. # Arguments x: input tensor. filters: integer, filters of the bottleneck layer in a block. blocks: integer, blocks in the stacked blocks. stride1: default 2, stride of the first layer in the first block. name: string, stack label. # Returns Output tensor for the stacked blocks. """ x = block2(x, filters, conv_shortcut=True, name=name + '_block1') for i in range(2, blocks): x = block2(x, filters, name=name + '_block' + str(i)) x = block2(x, filters, stride=stride1, name=name + '_block' + str(blocks)) return x def block3(x, filters, kernel_size=3, stride=1, groups=32, conv_shortcut=True, name=None): """A residual block. # Arguments x: input tensor. filters: integer, filters of the bottleneck layer. kernel_size: default 3, kernel size of the bottleneck layer. stride: default 1, stride of the first layer. groups: default 32, group size for grouped convolution. conv_shortcut: default True, use convolution shortcut if True, otherwise identity shortcut. name: string, block label. # Returns Output tensor for the residual block. """ bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1 if conv_shortcut is True: shortcut = layers.Conv2D((64 // groups) * filters, 1, strides=stride, use_bias=False, name=name + '_0_conv')(x) shortcut = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_0_bn')(shortcut) else: shortcut = x x = layers.Conv2D(filters, 1, use_bias=False, name=name + '_1_conv')(x) x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_1_bn')(x) x = layers.Activation('relu', name=name + '_1_relu')(x) c = filters // groups x = layers.ZeroPadding2D(padding=((1, 1), (1, 1)), name=name + '_2_pad')(x) x = layers.DepthwiseConv2D(kernel_size, strides=stride, depth_multiplier=c, use_bias=False, name=name + '_2_conv')(x) kernel = np.zeros((1, 1, filters * c, filters), dtype=np.float32) for i in range(filters): start = (i // c) * c * c + i % c end = start + c * c kernel[:, :, start:end:c, i] = 1. x = layers.Conv2D(filters, 1, use_bias=False, trainable=False, kernel_initializer={'class_name': 'Constant', 'config': {'value': kernel}}, name=name + '_2_gconv')(x) x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_2_bn')(x) x = layers.Activation('relu', name=name + '_2_relu')(x) x = layers.Conv2D((64 // groups) * filters, 1, use_bias=False, name=name + '_3_conv')(x) x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name=name + '_3_bn')(x) x = layers.Add(name=name + '_add')([shortcut, x]) x = layers.Activation('relu', name=name + '_out')(x) return x def stack3(x, filters, blocks, stride1=2, groups=32, name=None): """A set of stacked residual blocks. # Arguments x: input tensor. filters: integer, filters of the bottleneck layer in a block. blocks: integer, blocks in the stacked blocks. stride1: default 2, stride of the first layer in the first block. groups: default 32, group size for grouped convolution. name: string, stack label. # Returns Output tensor for the stacked blocks. """ x = block3(x, filters, stride=stride1, groups=groups, name=name + '_block1') for i in range(2, blocks + 1): x = block3(x, filters, groups=groups, conv_shortcut=False, name=name + '_block' + str(i)) return x def ResNet(stack_fn, preact, use_bias, model_name='resnet', include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): """Instantiates the ResNet, ResNetV2, and ResNeXt architecture. Optionally loads weights pre-trained on ImageNet. Note that the data format convention used by the model is the one specified in your Keras config at `~/.keras/keras.json`. # Arguments stack_fn: a function that returns output tensor for the stacked residual blocks. preact: whether to use pre-activation or not (True for ResNetV2, False for ResNet and ResNeXt). use_bias: whether to use biases for convolutional layers or not (True for ResNet and ResNetV2, False for ResNeXt). model_name: string, model name. include_top: whether to include the fully-connected layer at the top of the network. weights: one of `None` (random initialization), 'imagenet' (pre-training on ImageNet), or the path to the weights file to be loaded. input_tensor: optional Keras tensor (i.e. output of `layers.Input()`) to use as image input for the model. input_shape: optional shape tuple, only to be specified if `include_top` is False (otherwise the input shape has to be `(224, 224, 3)` (with `channels_last` data format) or `(3, 224, 224)` (with `channels_first` data format). It should have exactly 3 inputs channels. pooling: optional pooling mode for feature extraction when `include_top` is `False`. - `None` means that the output of the model will be the 4D tensor output of the last convolutional layer. - `avg` means that global average pooling will be applied to the output of the last convolutional layer, and thus the output of the model will be a 2D tensor. - `max` means that global max pooling will be applied. classes: optional number of classes to classify images into, only to be specified if `include_top` is True, and if no `weights` argument is specified. # Returns A Keras model instance. # Raises ValueError: in case of invalid argument for `weights`, or invalid input shape. """ global backend, layers, models, keras_utils backend, layers, models, keras_utils = get_submodules_from_kwargs(kwargs) if not (weights in {'imagenet', None} or os.path.exists(weights)): raise ValueError('The `weights` argument should be either ' '`None` (random initialization), `imagenet` ' '(pre-training on ImageNet), ' 'or the path to the weights file to be loaded.') if weights == 'imagenet' and include_top and classes != 1000: raise ValueError('If using `weights` as `"imagenet"` with `include_top`' ' as true, `classes` should be 1000') # Determine proper input shape input_shape = _obtain_input_shape(input_shape, default_size=224, min_size=32, data_format=backend.image_data_format(), require_flatten=include_top, weights=weights) if input_tensor is None: img_input = layers.Input(shape=input_shape) else: if not backend.is_keras_tensor(input_tensor): img_input = layers.Input(tensor=input_tensor, shape=input_shape) else: img_input = input_tensor bn_axis = 3 if backend.image_data_format() == 'channels_last' else 1 x = layers.ZeroPadding2D(padding=((3, 3), (3, 3)), name='conv1_pad')(img_input) x = layers.Conv2D(64, 7, strides=2, use_bias=use_bias, name='conv1_conv')(x) if preact is False: x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name='conv1_bn')(x) x = layers.Activation('relu', name='conv1_relu')(x) x = layers.ZeroPadding2D(padding=((1, 1), (1, 1)), name='pool1_pad')(x) x = layers.MaxPooling2D(3, strides=2, name='pool1_pool')(x) x = stack_fn(x) if preact is True: x = layers.BatchNormalization(axis=bn_axis, epsilon=1.001e-5, name='post_bn')(x) x = layers.Activation('relu', name='post_relu')(x) if include_top: x = layers.GlobalAveragePooling2D(name='avg_pool')(x) x = layers.Dense(classes, activation='softmax', name='probs')(x) else: if pooling == 'avg': x = layers.GlobalAveragePooling2D(name='avg_pool')(x) elif pooling == 'max': x = layers.GlobalMaxPooling2D(name='max_pool')(x) # Ensure that the model takes into account # any potential predecessors of `input_tensor`. if input_tensor is not None: inputs = keras_utils.get_source_inputs(input_tensor) else: inputs = img_input # Create model. model = models.Model(inputs, x, name=model_name) # Load weights. if (weights == 'imagenet') and (model_name in WEIGHTS_HASHES): if include_top: file_name = model_name + '_weights_tf_dim_ordering_tf_kernels.h5' file_hash = WEIGHTS_HASHES[model_name][0] else: file_name = model_name + '_weights_tf_dim_ordering_tf_kernels_notop.h5' file_hash = WEIGHTS_HASHES[model_name][1] weights_path = keras_utils.get_file(file_name, BASE_WEIGHTS_PATH + file_name, cache_subdir='models', file_hash=file_hash) by_name = True if 'resnext' in model_name else False model.load_weights(weights_path, by_name=by_name) elif weights is not None: model.load_weights(weights) return model def ResNet50(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): def stack_fn(x): x = stack1(x, 64, 3, stride1=1, name='conv2') x = stack1(x, 128, 4, name='conv3') x = stack1(x, 256, 6, name='conv4') x = stack1(x, 512, 3, name='conv5') return x return ResNet(stack_fn, False, True, 'resnet50', include_top, weights, input_tensor, input_shape, pooling, classes, **kwargs) def ResNet101(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): def stack_fn(x): x = stack1(x, 64, 3, stride1=1, name='conv2') x = stack1(x, 128, 4, name='conv3') x = stack1(x, 256, 23, name='conv4') x = stack1(x, 512, 3, name='conv5') return x return ResNet(stack_fn, False, True, 'resnet101', include_top, weights, input_tensor, input_shape, pooling, classes, **kwargs) def ResNet152(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): def stack_fn(x): x = stack1(x, 64, 3, stride1=1, name='conv2') x = stack1(x, 128, 8, name='conv3') x = stack1(x, 256, 36, name='conv4') x = stack1(x, 512, 3, name='conv5') return x return ResNet(stack_fn, False, True, 'resnet152', include_top, weights, input_tensor, input_shape, pooling, classes, **kwargs) def ResNet50V2(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): def stack_fn(x): x = stack2(x, 64, 3, name='conv2') x = stack2(x, 128, 4, name='conv3') x = stack2(x, 256, 6, name='conv4') x = stack2(x, 512, 3, stride1=1, name='conv5') return x return ResNet(stack_fn, True, True, 'resnet50v2', include_top, weights, input_tensor, input_shape, pooling, classes, **kwargs) def ResNet101V2(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): def stack_fn(x): x = stack2(x, 64, 3, name='conv2') x = stack2(x, 128, 4, name='conv3') x = stack2(x, 256, 23, name='conv4') x = stack2(x, 512, 3, stride1=1, name='conv5') return x return ResNet(stack_fn, True, True, 'resnet101v2', include_top, weights, input_tensor, input_shape, pooling, classes, **kwargs) def ResNet152V2(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): def stack_fn(x): x = stack2(x, 64, 3, name='conv2') x = stack2(x, 128, 8, name='conv3') x = stack2(x, 256, 36, name='conv4') x = stack2(x, 512, 3, stride1=1, name='conv5') return x return ResNet(stack_fn, True, True, 'resnet152v2', include_top, weights, input_tensor, input_shape, pooling, classes, **kwargs) def ResNeXt50(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): def stack_fn(x): x = stack3(x, 128, 3, stride1=1, name='conv2') x = stack3(x, 256, 4, name='conv3') x = stack3(x, 512, 6, name='conv4') x = stack3(x, 1024, 3, name='conv5') return x return ResNet(stack_fn, False, False, 'resnext50', include_top, weights, input_tensor, input_shape, pooling, classes, **kwargs) def ResNeXt101(include_top=True, weights='imagenet', input_tensor=None, input_shape=None, pooling=None, classes=1000, **kwargs): def stack_fn(x): x = stack3(x, 128, 3, stride1=1, name='conv2') x = stack3(x, 256, 4, name='conv3') x = stack3(x, 512, 23, name='conv4') x = stack3(x, 1024, 3, name='conv5') return x return ResNet(stack_fn, False, False, 'resnext101', include_top, weights, input_tensor, input_shape, pooling, classes, **kwargs) setattr(ResNet50, '__doc__', ResNet.__doc__) setattr(ResNet101, '__doc__', ResNet.__doc__) setattr(ResNet152, '__doc__', ResNet.__doc__) setattr(ResNet50V2, '__doc__', ResNet.__doc__) setattr(ResNet101V2, '__doc__', ResNet.__doc__) setattr(ResNet152V2, '__doc__', ResNet.__doc__) setattr(ResNeXt50, '__doc__', ResNet.__doc__) setattr(ResNeXt101, '__doc__', ResNet.__doc__)
f1bfe5de0e26671054c332bdfc93d2f0d9d4265e
53fab060fa262e5d5026e0807d93c75fb81e67b9
/backup/user_070/ch74_2020_04_06_14_56_45_688915.py
fbd6fc22f27f23767e365de5db1099f9b9558694
[]
no_license
gabriellaec/desoft-analise-exercicios
b77c6999424c5ce7e44086a12589a0ad43d6adca
01940ab0897aa6005764fc220b900e4d6161d36b
refs/heads/main
2023-01-31T17:19:42.050628
2020-12-16T05:21:31
2020-12-16T05:21:31
306,735,108
0
0
null
null
null
null
UTF-8
Python
false
false
197
py
def conta_bigramas(x): dicionario = {} for i in range(len(x)-1): dicionario[x[i],x[i+1]] = 0 for i in range(len(x)-1): dicionario[x[i],x[i+1]] += 1 return dicionario
9c71981567308ad84f8cdd6d9663bb32cd4dd6f4
bca9c2fa3c4c3d06dd612280ce39090a9dfab9bd
/neekanee/neekanee_solr/solr_query_builder.py
bb3792600dc6a9c0af8eefbc4cd05bff2fbb4fb6
[]
no_license
thayton/neekanee
0890dd5e5cf5bf855d4867ae02de6554291dc349
f2b2a13e584469d982f7cc20b49a9b19fed8942d
refs/heads/master
2021-03-27T11:10:07.633264
2018-07-13T14:19:30
2018-07-13T14:19:30
11,584,212
2
0
null
null
null
null
UTF-8
Python
false
false
5,806
py
KM_PER_MILE = 1.61 class SOLRQueryBuilder(): """ Build a SOLR query given a GET QueryDict for a job search. """ def __init__(self): self.qdict = {} # # Mapping of refine search query parameter names to SOLR doc # field names. All refine search query parameters are implemented # as filter queries. For each param in GET from the left column # below, we add a new filter query using the field name in the # right column and value GET[param]. # self.refine_search_fq = { # # param SOLR field name # ----- --------------- 'tld': 'tld', 'title': 'title', 'company': 'company_name', 'size': 'company_size', 'tags': 'company_tags', 'ltags': 'company_location_tags', 'awards': 'company_awards', 'vacation': 'vacation_year_1', 'country': 'country', 'state': 'state', 'city': 'city', } def add_fq(self, filt, val): if filt != 'vacation_year_1': new_fq = '%s:"%s"' % (filt,val) else: new_fq = '%s:%s' % (filt,val) if self.qdict.has_key('fq'): self.qdict['fq'].append(new_fq) else: self.qdict['fq'] = [new_fq] def build_query(self, GET): """ GET : QueryDict object for an HTTP GET request """ self.qdict['q'] = '{!q.op=AND}' + GET.get('q', '*:*') self.qdict['wt'] = 'json' if 'lat' in GET and 'lng' and GET: self.qdict['fq'] = [ '{!bbox}' ] self.qdict['sfield'] = 'latlng' self.qdict['pt'] = '%.2f,%.2f' % (float(GET['lat']),float(GET['lng'])) self.qdict['d'] = '%.2f' % (float(GET['radius']) * KM_PER_MILE) for parm,filt in self.refine_search_fq.items(): val = GET.get(parm, None) if val is None: continue if parm == 'tags' or parm == 'ltags' or parm == 'awards': # multivalued for v in val.split(): self.add_fq(filt, v) elif parm == 'vacation': self.add_fq(filt, '[%d TO %d]' % (int(val), int(val)+4)) else: self.add_fq(filt, val) return self.qdict class SOLRJobSearchQueryBuilder(SOLRQueryBuilder): def __init__(self, items_per_page): SOLRQueryBuilder.__init__(self) self.items_per_page = items_per_page # # Pararms specific to job search query with faceting for the # sidebar. The state facet field is set to 51 so that all of # the states will show up in the map (and not just 10 of them). # params = { 'fl': 'id,title,url,url_data,company_id,company_name,company_ats,company_jobs_page_url,city,state,country', 'facet': 'true', 'facet.field': ['country', 'state', 'city', 'tld', 'company_size', 'company_name', 'company_tags', 'company_location_tags', 'company_awards'], 'facet.mincount': '1', 'facet.limit': '10', 'f.company_tags.facet.limit': '32', 'f.country.facet.limit': '200', 'f.state.facet.limit': '51', 'facet.range': 'vacation_year_1', 'facet.range.start': '10', 'facet.range.end': '50', 'facet.range.gap': '5', 'hl': 'true', 'hl.fl': 'desc', 'hl.snippets': 2, 'hl.alternateField': 'desc', 'hl.maxAlternateFieldLength': '210', 'rows': '%d' % self.items_per_page } self.qdict.update(params) def build_query(self, GET): page_number = int(GET.get('page', '1')) self.qdict.update({'start': '%d' % (self.items_per_page * (page_number - 1))}) return SOLRQueryBuilder.build_query(self, GET) class SOLRCompanyFacetQueryBuilder(SOLRQueryBuilder): def __init__(self): SOLRQueryBuilder.__init__(self) params = { 'fl': 'id', 'facet': 'true', 'facet.field': ['country', 'state', 'city', 'tld', 'company_size', 'company_tags', 'company_location_tags', 'company_awards', 'company_id'], 'facet.mincount': '1', 'facet.limit': '10', 'facet.range': 'vacation_year_1', 'facet.range.start': '10', 'facet.range.end': '50', 'facet.range.gap': '5', 'f.company_tags.facet.limit': '32', 'f.country.facet.limit': '200', 'f.state.facet.limit': '51', 'f.company_id.facet.limit': '-1' } self.qdict.update(params) def build_query(self, GET): return SOLRQueryBuilder.build_query(self, GET) class SOLRLocationFacetQueryBuilder(SOLRQueryBuilder): def __init__(self): SOLRQueryBuilder.__init__(self) params = { 'fl': 'id', 'facet': 'true', 'facet.field': ['country', 'state', 'city', 'tld', 'company_size', 'company_name', 'company_tags', 'company_location_tags', 'company_awards'], 'facet.mincount': '1', 'facet.limit': '10', 'facet.range': 'vacation_year_1', 'facet.range.start': '10', 'facet.range.end': '50', 'facet.range.gap': '5', 'f.company_tags.facet.limit': '32', 'f.country.facet.limit': '200', 'f.state.facet.limit': '60', 'f.city.facet.limit': '-1' } self.qdict.update(params) def build_query(self, GET): return SOLRQueryBuilder.build_query(self, GET) class SOLRJobTitleFacetQueryBuilder(SOLRQueryBuilder): pass
5b08a8497e67c4ad8f4ea6744c7a8bec7de04b04
d7016f69993570a1c55974582cda899ff70907ec
/sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2022_08_01/aio/operations/_ssh_public_keys_operations.py
3272fd669e75a7dc41ea48ede8265410af966dc1
[ "LicenseRef-scancode-generic-cla", "MIT", "LGPL-2.1-or-later" ]
permissive
kurtzeborn/azure-sdk-for-python
51ca636ad26ca51bc0c9e6865332781787e6f882
b23e71b289c71f179b9cf9b8c75b1922833a542a
refs/heads/main
2023-03-21T14:19:50.299852
2023-02-15T13:30:47
2023-02-15T13:30:47
157,927,277
0
0
MIT
2022-07-19T08:05:23
2018-11-16T22:15:30
Python
UTF-8
Python
false
false
30,872
py
# pylint: disable=too-many-lines # coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- import sys from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload import urllib.parse from azure.core.async_paging import AsyncItemPaged, AsyncList from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, ResourceNotModifiedError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import AsyncHttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from azure.core.tracing.decorator_async import distributed_trace_async from azure.core.utils import case_insensitive_dict from azure.mgmt.core.exceptions import ARMErrorFormat from ... import models as _models from ..._vendor import _convert_request from ...operations._ssh_public_keys_operations import ( build_create_request, build_delete_request, build_generate_key_pair_request, build_get_request, build_list_by_resource_group_request, build_list_by_subscription_request, build_update_request, ) if sys.version_info >= (3, 8): from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports else: from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]] class SshPublicKeysOperations: """ .. warning:: **DO NOT** instantiate this class directly. Instead, you should access the following operations through :class:`~azure.mgmt.compute.v2022_08_01.aio.ComputeManagementClient`'s :attr:`ssh_public_keys` attribute. """ models = _models def __init__(self, *args, **kwargs) -> None: input_args = list(args) self._client = input_args.pop(0) if input_args else kwargs.pop("client") self._config = input_args.pop(0) if input_args else kwargs.pop("config") self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer") self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer") @distributed_trace def list_by_subscription(self, **kwargs: Any) -> AsyncIterable["_models.SshPublicKeyResource"]: """Lists all of the SSH public keys in the subscription. Use the nextLink property in the response to get the next page of SSH public keys. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SshPublicKeyResource or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: Literal["2022-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) cls: ClsType[_models.SshPublicKeysGroupListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_by_subscription_request( subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.list_by_subscription.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: # make call to next link with the client's api-version _parsed_next_link = urllib.parse.urlparse(next_link) _next_request_params = case_insensitive_dict( { key: [urllib.parse.quote(v) for v in value] for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() } ) _next_request_params["api-version"] = self._config.api_version request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("SshPublicKeysGroupListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged(get_next, extract_data) list_by_subscription.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Compute/sshPublicKeys"} @distributed_trace def list_by_resource_group( self, resource_group_name: str, **kwargs: Any ) -> AsyncIterable["_models.SshPublicKeyResource"]: """Lists all of the SSH public keys in the specified resource group. Use the nextLink property in the response to get the next page of SSH public keys. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either SshPublicKeyResource or the result of cls(response) :rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource] :raises ~azure.core.exceptions.HttpResponseError: """ _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: Literal["2022-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) cls: ClsType[_models.SshPublicKeysGroupListResult] = kwargs.pop("cls", None) error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) def prepare_request(next_link=None): if not next_link: request = build_list_by_resource_group_request( resource_group_name=resource_group_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.list_by_resource_group.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) else: # make call to next link with the client's api-version _parsed_next_link = urllib.parse.urlparse(next_link) _next_request_params = case_insensitive_dict( { key: [urllib.parse.quote(v) for v in value] for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items() } ) _next_request_params["api-version"] = self._config.api_version request = HttpRequest( "GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params ) request = _convert_request(request) request.url = self._client.format_url(request.url) request.method = "GET" return request async def extract_data(pipeline_response): deserialized = self._deserialize("SshPublicKeysGroupListResult", pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) # type: ignore return deserialized.next_link or None, AsyncList(list_of_elem) async def get_next(next_link=None): request = prepare_request(next_link) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return AsyncItemPaged(get_next, extract_data) list_by_resource_group.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys" } @overload async def create( self, resource_group_name: str, ssh_public_key_name: str, parameters: _models.SshPublicKeyResource, *, content_type: str = "application/json", **kwargs: Any ) -> _models.SshPublicKeyResource: """Creates a new SSH public key resource. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :param parameters: Parameters supplied to create the SSH public key. Required. :type parameters: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SshPublicKeyResource or the result of cls(response) :rtype: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource :raises ~azure.core.exceptions.HttpResponseError: """ @overload async def create( self, resource_group_name: str, ssh_public_key_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any ) -> _models.SshPublicKeyResource: """Creates a new SSH public key resource. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :param parameters: Parameters supplied to create the SSH public key. Required. :type parameters: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SshPublicKeyResource or the result of cls(response) :rtype: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async async def create( self, resource_group_name: str, ssh_public_key_name: str, parameters: Union[_models.SshPublicKeyResource, IO], **kwargs: Any ) -> _models.SshPublicKeyResource: """Creates a new SSH public key resource. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :param parameters: Parameters supplied to create the SSH public key. Is either a model type or a IO type. Required. :type parameters: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SshPublicKeyResource or the result of cls(response) :rtype: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: Literal["2022-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SshPublicKeyResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None if isinstance(parameters, (IO, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "SshPublicKeyResource") request = build_create_request( resource_group_name=resource_group_name, ssh_public_key_name=ssh_public_key_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, template_url=self.create.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize("SshPublicKeyResource", pipeline_response) if response.status_code == 201: deserialized = self._deserialize("SshPublicKeyResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) # type: ignore return deserialized # type: ignore create.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}" } @overload async def update( self, resource_group_name: str, ssh_public_key_name: str, parameters: _models.SshPublicKeyUpdateResource, *, content_type: str = "application/json", **kwargs: Any ) -> _models.SshPublicKeyResource: """Updates a new SSH public key resource. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :param parameters: Parameters supplied to update the SSH public key. Required. :type parameters: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyUpdateResource :keyword content_type: Body Parameter content-type. Content type parameter for JSON body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SshPublicKeyResource or the result of cls(response) :rtype: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource :raises ~azure.core.exceptions.HttpResponseError: """ @overload async def update( self, resource_group_name: str, ssh_public_key_name: str, parameters: IO, *, content_type: str = "application/json", **kwargs: Any ) -> _models.SshPublicKeyResource: """Updates a new SSH public key resource. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :param parameters: Parameters supplied to update the SSH public key. Required. :type parameters: IO :keyword content_type: Body Parameter content-type. Content type parameter for binary body. Default value is "application/json". :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SshPublicKeyResource or the result of cls(response) :rtype: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource :raises ~azure.core.exceptions.HttpResponseError: """ @distributed_trace_async async def update( self, resource_group_name: str, ssh_public_key_name: str, parameters: Union[_models.SshPublicKeyUpdateResource, IO], **kwargs: Any ) -> _models.SshPublicKeyResource: """Updates a new SSH public key resource. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :param parameters: Parameters supplied to update the SSH public key. Is either a model type or a IO type. Required. :type parameters: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyUpdateResource or IO :keyword content_type: Body Parameter content-type. Known values are: 'application/json'. Default value is None. :paramtype content_type: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SshPublicKeyResource or the result of cls(response) :rtype: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = case_insensitive_dict(kwargs.pop("headers", {}) or {}) _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: Literal["2022-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None)) cls: ClsType[_models.SshPublicKeyResource] = kwargs.pop("cls", None) content_type = content_type or "application/json" _json = None _content = None if isinstance(parameters, (IO, bytes)): _content = parameters else: _json = self._serialize.body(parameters, "SshPublicKeyUpdateResource") request = build_update_request( resource_group_name=resource_group_name, ssh_public_key_name=ssh_public_key_name, subscription_id=self._config.subscription_id, api_version=api_version, content_type=content_type, json=_json, content=_content, template_url=self.update.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize("SshPublicKeyResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized update.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}" } @distributed_trace_async async def delete( # pylint: disable=inconsistent-return-statements self, resource_group_name: str, ssh_public_key_name: str, **kwargs: Any ) -> None: """Delete an SSH public key. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: None or the result of cls(response) :rtype: None :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: Literal["2022-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) cls: ClsType[None] = kwargs.pop("cls", None) request = build_delete_request( resource_group_name=resource_group_name, ssh_public_key_name=ssh_public_key_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.delete.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) delete.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}" } @distributed_trace_async async def get( self, resource_group_name: str, ssh_public_key_name: str, **kwargs: Any ) -> _models.SshPublicKeyResource: """Retrieves information about an SSH public key. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SshPublicKeyResource or the result of cls(response) :rtype: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyResource :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: Literal["2022-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) cls: ClsType[_models.SshPublicKeyResource] = kwargs.pop("cls", None) request = build_get_request( resource_group_name=resource_group_name, ssh_public_key_name=ssh_public_key_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.get.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize("SshPublicKeyResource", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}" } @distributed_trace_async async def generate_key_pair( self, resource_group_name: str, ssh_public_key_name: str, **kwargs: Any ) -> _models.SshPublicKeyGenerateKeyPairResult: """Generates and returns a public/private key pair and populates the SSH public key resource with the public key. The length of the key will be 3072 bits. This operation can only be performed once per SSH public key resource. :param resource_group_name: The name of the resource group. Required. :type resource_group_name: str :param ssh_public_key_name: The name of the SSH public key. Required. :type ssh_public_key_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: SshPublicKeyGenerateKeyPairResult or the result of cls(response) :rtype: ~azure.mgmt.compute.v2022_08_01.models.SshPublicKeyGenerateKeyPairResult :raises ~azure.core.exceptions.HttpResponseError: """ error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError, 304: ResourceNotModifiedError, } error_map.update(kwargs.pop("error_map", {}) or {}) _headers = kwargs.pop("headers", {}) or {} _params = case_insensitive_dict(kwargs.pop("params", {}) or {}) api_version: Literal["2022-08-01"] = kwargs.pop("api_version", _params.pop("api-version", "2022-08-01")) cls: ClsType[_models.SshPublicKeyGenerateKeyPairResult] = kwargs.pop("cls", None) request = build_generate_key_pair_request( resource_group_name=resource_group_name, ssh_public_key_name=ssh_public_key_name, subscription_id=self._config.subscription_id, api_version=api_version, template_url=self.generate_key_pair.metadata["url"], headers=_headers, params=_params, ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access request, stream=False, **kwargs ) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize("SshPublicKeyGenerateKeyPairResult", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized generate_key_pair.metadata = { "url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/sshPublicKeys/{sshPublicKeyName}/generateKeyPair" }