blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
83ea1cd60b07b7661cf4d71ba9914ae9e4c12194 | deda76cdf57c2a178f7a6af0ef8abf0c239d0fbf | /post/migrations/0010_auto_20170225_1412.py | 3b015c6440e5213db5654f71dec65202d685fad1 | []
| no_license | mikekeda/Jess-blog | 55c03deaa3587f4280cb77f4c33a4728965f7503 | 0498450c671b7116e759ee608b60a56cf5c1722c | refs/heads/master | 2023-07-26T21:25:31.519523 | 2023-06-06T17:27:13 | 2023-06-06T17:27:13 | 69,493,913 | 2 | 0 | null | 2023-07-05T22:57:16 | 2016-09-28T18:53:57 | Python | UTF-8 | Python | false | false | 458 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-25 14:12
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('post', '0009_auto_20170225_1410'),
]
operations = [
migrations.AlterField(
model_name='post',
name='slug',
field=models.SlugField(editable=False, unique=True),
),
]
| [
"[email protected]"
]
| |
2eab272b612bf0026bdfa21c63ff576d34fd8dde | 8780bc7f252f14ff5406ce965733c099034920b7 | /pyCode/novel_Mongodb/novel/settings.py | e49a3b889bb618337c2e82ee26bcf3662da38c06 | []
| no_license | 13661892653/workspace | 5e4e458d31b9355c67d67ba7d9faccbcc1ac9f6b | 17960becabb3b4f0fc30009c71a11c4f7a5f8330 | refs/heads/master | 2020-12-24T20:00:15.541432 | 2018-08-14T13:56:15 | 2018-08-14T13:56:15 | 86,225,975 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,342 | py | # -*- coding: utf-8 -*-
# Scrapy settings for novel project
#
# For simplicity, this file contains only settings considered important or
# commonly used. You can find more settings consulting the documentation:
#
# http://doc.scrapy.org/en/latest/topics/settings.html
# http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
# http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
BOT_NAME = 'novel'
SPIDER_MODULES = ['novel.spiders']
NEWSPIDER_MODULE = 'novel.spiders'
# Crawl responsibly by identifying yourself (and your website) on the user-agent
#USER_AGENT = 'novel (+http://www.yourdomain.com)'
USER_AGENT = 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:23.0) Gecko/20100101 Firefox/23.0'
# Obey robots.txt rules
ROBOTSTXT_OBEY = True
ITEM_PIPELINES = {
'novel.pipelines.MongoDBPipeline': 300,
}
MONGODB_HOST = "127.0.0.1"
MONGODB_PORT = 27017
MONGODB_DB = 'Jikexueyuan'
MONGODB_COLL = 'novel'
# Configure maximum concurrent requests performed by Scrapy (default: 16)
#CONCURRENT_REQUESTS = 32
# Configure a delay for requests for the same website (default: 0)
# See http://scrapy.readthedocs.org/en/latest/topics/settings.html#download-delay
# See also autothrottle settings and docs
#DOWNLOAD_DELAY = 3
# The download delay setting will honor only one of:
#CONCURRENT_REQUESTS_PER_DOMAIN = 16
#CONCURRENT_REQUESTS_PER_IP = 16
# Disable cookies (enabled by default)
# Disable Telnet Console (enabled by default)
#TELNETCONSOLE_ENABLED = False
# Override the default request headers:
#DEFAULT_REQUEST_HEADERS = {
# 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
# 'Accept-Language': 'en',
#}
# Enable or disable spider middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/spider-middleware.html
#SPIDER_MIDDLEWARES = {
# 'novel.middlewares.NovelSpiderMiddleware': 543,
#}
# Enable or disable downloader middlewares
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html
#DOWNLOADER_MIDDLEWARES = {
# 'novel.middlewares.MyCustomDownloaderMiddleware': 543,
#}
# Enable or disable extensions
# See http://scrapy.readthedocs.org/en/latest/topics/extensions.html
#EXTENSIONS = {
# 'scrapy.extensions.telnet.TelnetConsole': None,
#}
# Configure item pipelines
# See http://scrapy.readthedocs.org/en/latest/topics/item-pipeline.html
#ITEM_PIPELINES = {
# 'novel.pipelines.MongoDBPipeline': 300,
#}
# Enable and configure the AutoThrottle extension (disabled by default)
# See http://doc.scrapy.org/en/latest/topics/autothrottle.html
#AUTOTHROTTLE_ENABLED = True
# The initial download delay
#AUTOTHROTTLE_START_DELAY = 5
# The maximum download delay to be set in case of high latencies
#AUTOTHROTTLE_MAX_DELAY = 60
# The average number of requests Scrapy should be sending in parallel to
# each remote server
#AUTOTHROTTLE_TARGET_CONCURRENCY = 1.0
# Enable showing throttling stats for every response received:
#AUTOTHROTTLE_DEBUG = False
# Enable and configure HTTP caching (disabled by default)
# See http://scrapy.readthedocs.org/en/latest/topics/downloader-middleware.html#httpcache-middleware-settings
#HTTPCACHE_ENABLED = True
#HTTPCACHE_EXPIRATION_SECS = 0
#HTTPCACHE_DIR = 'httpcache'
#HTTPCACHE_IGNORE_HTTP_CODES = []
#HTTPCACHE_STORAGE = 'scrapy.extensions.httpcache.FilesystemCacheStorage'
| [
"[email protected]"
]
| |
a8eb9ab1b268fd5c994efa2afb0f3d85321e1cf9 | 06d86ca0465405a7d1a64fc6dbf4980f76565e54 | /torchnlp/nn/__init__.py | be635ea9026ee2911bec1756db558956b484099d | [
"BSD-3-Clause"
]
| permissive | PetrochukM/PyTorch-NLP | 22b7f2628d6545270bc36964ce4551609f84ca9f | 53d7edcb8e0c099efce7c2ddf8cd7c44157fcac3 | refs/heads/master | 2023-08-05T20:15:06.954467 | 2023-07-04T21:11:26 | 2023-07-04T21:11:26 | 122,806,629 | 2,304 | 290 | BSD-3-Clause | 2022-07-16T23:44:23 | 2018-02-25T05:00:36 | Python | UTF-8 | Python | false | false | 499 | py | from torchnlp.nn.attention import Attention
from torchnlp.nn.lock_dropout import LockedDropout
from torchnlp.nn.weight_drop import WeightDropGRU
from torchnlp.nn.weight_drop import WeightDropLSTM
from torchnlp.nn.weight_drop import WeightDropLinear
from torchnlp.nn.weight_drop import WeightDrop
from torchnlp.nn.cnn_encoder import CNNEncoder
__all__ = [
'LockedDropout',
'Attention',
'CNNEncoder',
'WeightDrop',
'WeightDropGRU',
'WeightDropLSTM',
'WeightDropLinear',
]
| [
"[email protected]"
]
| |
412a3e50c46c08906300fcef62d66e697c2954e4 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_2_neat/16_0_2_Newyork167_RevengeOfThePancakes.py | b033b830db3257268cd1574abffce775324afeeb | []
| no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 503 | py | def start():
output_file = open('output.txt', 'w+')
t = int(raw_input().strip())
for x in range(t):
flip_count = 0
p = raw_input().strip()
for y in xrange(len(p) - 1, -1, -1):
if p[y] == "-":
flip_count += 1
p = flip(p[:y + 1]) + p[y + 1:]
output_file.write("Case #{case}: {result}\n".format(case=x+1, result=flip_count))
def flip(p):
return ''.join([''.join(x) for x in ["+" if x == "-" else "-" for x in p]]) | [
"[[email protected]]"
]
| |
d653b9469d563ede22ef0db716328cbd291036b3 | 6466eef5477db250879a74935b3b776dc878ff3b | /iprofile/migrations/0002_auto_20210302_0739.py | d5634f9ccf7c6c6bfbe7220e774c7d58326a69ca | []
| no_license | BakdauletBolatE/django_ideas | 8edb61a569f436865283e82edba3377a150665a8 | ef0258f3aae0c090d38a5098d175bceaddcf67af | refs/heads/master | 2023-03-12T00:02:04.969353 | 2021-03-02T19:41:00 | 2021-03-02T19:41:00 | 324,287,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | # Generated by Django 3.1.4 on 2021-03-02 07:39
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('iprofile', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='specialization',
field=models.ForeignKey(blank=True, default=None, null=True, on_delete=django.db.models.deletion.CASCADE, to='iprofile.specialization'),
),
]
| [
"[email protected]"
]
| |
1e2c08e559a9397aff2ade4a07738486067ea297 | 77428d258556f1cae13c7435bcb5ee387d2f7ed9 | /src/program/python/snippet/ProcExit.py | 23d60713b22cf13747dd1e925f813b5c6bffea4a | []
| no_license | imxood/imxood.github.io | d598d3d991f7e7d39787ecb2415ffe48489d9fd6 | a6fe8fe069b8af9d65b6afaabecfcfe99ed1ed21 | refs/heads/main | 2022-10-23T12:52:11.966389 | 2022-10-04T06:04:59 | 2022-10-04T06:04:59 | 47,911,256 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 171 | py | class ProcExit(Exception):
def __init__(self, message, status):
super().__init__(message, status)
self.message = message
self.status = status
| [
"[email protected]"
]
| |
775b89ca5e6b11635f184ce04290426d9263a5c2 | 80301f1cffc5afce13256e2ecab6323c5df00194 | /en.sc/py/C1601.py | 9235408d65c17a872e648fbd8198636110ece8af | []
| no_license | ZhenjianYang/SoraVoiceScripts | c1ddf7c1bbcb933243754f9669bd6b75777c87b9 | 94a948090aba0f63b10b2c69dc845dc99c822fc4 | refs/heads/master | 2023-04-18T04:54:44.306652 | 2023-04-06T11:15:17 | 2023-04-06T11:15:17 | 103,167,541 | 43 | 11 | null | 2021-03-06T08:52:54 | 2017-09-11T17:36:55 | Python | UTF-8 | Python | false | false | 24,450 | py | from ED6SCScenarioHelper import *
def main():
SetCodePage("ms932")
CreateScenaFile(
FileName = 'C1601 ._SN',
MapName = 'Bose',
Location = 'C1601.x',
MapIndex = 250,
MapDefaultBGM = "ed60125",
Flags = 0,
EntryFunctionIndex = 0xFFFF,
Reserved = 0,
IncludedScenario = [
'',
'',
'',
'',
'',
'',
'',
''
],
)
BuildStringList(
'@FileName', # 8
'', # 9
'', # 10
'', # 11
'', # 12
'', # 13
'', # 14
'', # 15
'', # 16
'', # 17
)
DeclEntryPoint(
Unknown_00 = 0,
Unknown_04 = 0,
Unknown_08 = 6000,
Unknown_0C = 4,
Unknown_0E = 0,
Unknown_10 = 0,
Unknown_14 = 9500,
Unknown_18 = -10000,
Unknown_1C = 0,
Unknown_20 = 0,
Unknown_24 = 0,
Unknown_28 = 2800,
Unknown_2C = 262,
Unknown_30 = 45,
Unknown_32 = 0,
Unknown_34 = 360,
Unknown_36 = 0,
Unknown_38 = 0,
Unknown_3A = 0,
InitScenaIndex = 0,
InitFunctionIndex = 0,
EntryScenaIndex = 0,
EntryFunctionIndex = 1,
)
AddCharChip(
'ED6_DT29/CH12450 ._CH', # 00
'ED6_DT29/CH12451 ._CH', # 01
'ED6_DT09/CH10840 ._CH', # 02
'ED6_DT09/CH10841 ._CH', # 03
'ED6_DT29/CH12460 ._CH', # 04
'ED6_DT29/CH12461 ._CH', # 05
'ED6_DT09/CH10550 ._CH', # 06
'ED6_DT09/CH10551 ._CH', # 07
'ED6_DT29/CH12500 ._CH', # 08
'ED6_DT29/CH12501 ._CH', # 09
'ED6_DT29/CH12560 ._CH', # 0A
'ED6_DT29/CH12561 ._CH', # 0B
)
AddCharChipPat(
'ED6_DT29/CH12450P._CP', # 00
'ED6_DT29/CH12451P._CP', # 01
'ED6_DT09/CH10840P._CP', # 02
'ED6_DT09/CH10841P._CP', # 03
'ED6_DT29/CH12460P._CP', # 04
'ED6_DT29/CH12461P._CP', # 05
'ED6_DT09/CH10550P._CP', # 06
'ED6_DT09/CH10551P._CP', # 07
'ED6_DT29/CH12500P._CP', # 08
'ED6_DT29/CH12501P._CP', # 09
'ED6_DT29/CH12560P._CP', # 0A
'ED6_DT29/CH12561P._CP', # 0B
)
DeclMonster(
X = 53080,
Z = 0,
Y = 1650,
Unknown_0C = 180,
Unknown_0E = 8,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C7,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclMonster(
X = 24670,
Z = 0,
Y = 22960,
Unknown_0C = 180,
Unknown_0E = 6,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C6,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclMonster(
X = -8910,
Z = 0,
Y = 21050,
Unknown_0C = 180,
Unknown_0E = 10,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C8,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclMonster(
X = -20410,
Z = -500,
Y = 27460,
Unknown_0C = 180,
Unknown_0E = 8,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C7,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclMonster(
X = -41620,
Z = 1000,
Y = 45310,
Unknown_0C = 180,
Unknown_0E = 10,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C8,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclMonster(
X = -32180,
Z = 0,
Y = 61060,
Unknown_0C = 180,
Unknown_0E = 6,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C6,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclMonster(
X = -48200,
Z = 0,
Y = 5330,
Unknown_0C = 180,
Unknown_0E = 10,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C8,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclMonster(
X = -9720,
Z = 0,
Y = -137210,
Unknown_0C = 180,
Unknown_0E = 8,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C7,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclMonster(
X = 217380,
Z = 0,
Y = 12010,
Unknown_0C = 180,
Unknown_0E = 8,
Unknown_10 = 65,
Unknown_11 = 1,
Unknown_12 = 0xFFFFFFFF,
BattleIndex = 0x3C7,
Unknown_18 = 0,
Unknown_1A = 0,
)
DeclActor(
TriggerX = -7780,
TriggerZ = 0,
TriggerY = -128550,
TriggerRange = 1000,
ActorX = -7780,
ActorZ = 0,
ActorY = -127890,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 2,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 227170,
TriggerZ = 0,
TriggerY = 11830,
TriggerRange = 1000,
ActorX = 227870,
ActorZ = 0,
ActorY = 11830,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 3,
Unknown_22 = 0,
)
DeclActor(
TriggerX = -29260,
TriggerZ = 0,
TriggerY = 54810,
TriggerRange = 1000,
ActorX = -29260,
ActorZ = 0,
ActorY = 54110,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 4,
Unknown_22 = 0,
)
DeclActor(
TriggerX = -34020,
TriggerZ = 0,
TriggerY = 67570,
TriggerRange = 1000,
ActorX = -34020,
ActorZ = 0,
ActorY = 68190,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 5,
Unknown_22 = 0,
)
DeclActor(
TriggerX = -940,
TriggerZ = 0,
TriggerY = -135030,
TriggerRange = 1000,
ActorX = -180,
ActorZ = 0,
ActorY = -135030,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 6,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 215690,
TriggerZ = 0,
TriggerY = 23150,
TriggerRange = 1000,
ActorX = 215690,
ActorZ = 0,
ActorY = 23810,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 7,
Unknown_22 = 0,
)
DeclActor(
TriggerX = 206060,
TriggerZ = 0,
TriggerY = 12220,
TriggerRange = 1000,
ActorX = 205400,
ActorZ = 0,
ActorY = 12180,
Flags = 0x7C,
TalkScenaIndex = 0,
TalkFunctionIndex = 8,
Unknown_22 = 0,
)
ScpFunction(
"Function_0_302", # 00, 0
"Function_1_333", # 01, 1
"Function_2_43E", # 02, 2
"Function_3_546", # 03, 3
"Function_4_6A6", # 04, 4
"Function_5_800", # 05, 5
"Function_6_96A", # 06, 6
"Function_7_AC1", # 07, 7
"Function_8_BE2", # 08, 8
"Function_9_D38", # 09, 9
)
def Function_0_302(): pass
label("Function_0_302")
OP_11(0xFF, 0xFF, 0xFF, 0x9C40, 0x12110, 0x0)
Switch(
(scpexpr(EXPR_PUSH_VALUE_INDEX, 0x0), scpexpr(EXPR_END)),
(100, "loc_31E"),
(SWITCH_DEFAULT, "loc_332"),
)
label("loc_31E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x350, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_32F")
SetMapFlags(0x10000000)
Event(0, 9)
label("loc_32F")
Jump("loc_332")
label("loc_332")
Return()
# Function_0_302 end
def Function_1_333(): pass
label("Function_1_333")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_345")
OP_6F(0x0, 0)
Jump("loc_34C")
label("loc_345")
OP_6F(0x0, 60)
label("loc_34C")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_35E")
OP_6F(0x1, 0)
Jump("loc_365")
label("loc_35E")
OP_6F(0x1, 60)
label("loc_365")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_377")
OP_6F(0x2, 0)
Jump("loc_37E")
label("loc_377")
OP_6F(0x2, 60)
label("loc_37E")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_390")
OP_6F(0x3, 0)
Jump("loc_397")
label("loc_390")
OP_6F(0x3, 60)
label("loc_397")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3A9")
OP_6F(0x4, 0)
Jump("loc_3B0")
label("loc_3A9")
OP_6F(0x4, 60)
label("loc_3B0")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x371, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3C2")
OP_6F(0x5, 0)
Jump("loc_3C9")
label("loc_3C2")
OP_6F(0x5, 60)
label("loc_3C9")
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x371, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_3DB")
OP_6F(0x6, 0)
Jump("loc_3E2")
label("loc_3DB")
OP_6F(0x6, 60)
label("loc_3E2")
OP_E0(0x4, 0x10, 0xFF, 0xFF, 0xFF, 0x0, 0x0, 0x0, 0x0, 0x8A, 0xF0, 0xFD, 0xFF)
OP_51(0x8, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xDF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0xA, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xDF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0xB, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xDF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0xC, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xDF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0xE, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xDF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0xF, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xDF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
OP_51(0x10, 0x24, (scpexpr(EXPR_PUSH_LONG, 0xDF), scpexpr(EXPR_STUB), scpexpr(EXPR_END)))
Return()
# Function_1_333 end
def Function_2_43E(): pass
label("Function_2_43E")
OP_EA(0x2, 0x5C, 0x0, 0x0)
SetMapFlags(0x8000000)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_516")
OP_22(0x2B, 0x0, 0x64)
OP_70(0x0, 0x3C)
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_3E(0x17B, 1)"), scpexpr(EXPR_END)), "loc_4AF")
FadeToDark(300, 0, 100)
OP_22(0x11, 0x0, 0x64)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk( #0
"Found \x1F\x7B\x01\x07\x00.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_A2(0x1B80)
Jump("loc_513")
label("loc_4AF")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #1
(
"Found \x1F\x7B\x01\x07\x00 in chest.\x01",
"Inventory full so gave up \x1F\x7B\x01\x07\x00.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_22(0x2C, 0x0, 0x64)
OP_6F(0x0, 60)
OP_70(0x0, 0x0)
label("loc_513")
Jump("loc_538")
label("loc_516")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #2
"\x07\x05No.\x07\x00\x02",
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
label("loc_538")
Sleep(30)
TalkEnd(0xFF)
ClearMapFlags(0x8000000)
Return()
# Function_2_43E end
def Function_3_546(): pass
label("Function_3_546")
OP_EA(0x2, 0x5D, 0x0, 0x0)
SetMapFlags(0x8000000)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 2)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_61E")
OP_22(0x2B, 0x0, 0x64)
OP_70(0x1, 0x3C)
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_3E(0x16C, 1)"), scpexpr(EXPR_END)), "loc_5B7")
FadeToDark(300, 0, 100)
OP_22(0x11, 0x0, 0x64)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk( #3
"Found \x1F\x6C\x01\x07\x00.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_A2(0x1B82)
Jump("loc_61B")
label("loc_5B7")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #4
(
"Found \x1F\x6C\x01\x07\x00 in chest.\x01",
"Inventory full so gave up \x1F\x6C\x01\x07\x00.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_22(0x2C, 0x0, 0x64)
OP_6F(0x1, 60)
OP_70(0x1, 0x0)
label("loc_61B")
Jump("loc_698")
label("loc_61E")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #5
(
"\x07\x05There's nothing in the chest. You briefly\x01",
"wonder if you could use it as a makeshift\x01",
"boat...\x07\x00\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
label("loc_698")
Sleep(30)
TalkEnd(0xFF)
ClearMapFlags(0x8000000)
Return()
# Function_3_546 end
def Function_4_6A6(): pass
label("Function_4_6A6")
OP_EA(0x2, 0x5E, 0x0, 0x0)
SetMapFlags(0x8000000)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 4)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_77E")
OP_22(0x2B, 0x0, 0x64)
OP_70(0x2, 0x3C)
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_3E(0xC5, 1)"), scpexpr(EXPR_END)), "loc_717")
FadeToDark(300, 0, 100)
OP_22(0x11, 0x0, 0x64)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk( #6
"Found \x1F\xC5\x00\x07\x00.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_A2(0x1B84)
Jump("loc_77B")
label("loc_717")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #7
(
"Found \x1F\xC5\x00\x07\x00 in chest.\x01",
"Inventory full so gave up \x1F\xC5\x00\x07\x00.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_22(0x2C, 0x0, 0x64)
OP_6F(0x2, 60)
OP_70(0x2, 0x0)
label("loc_77B")
Jump("loc_7F2")
label("loc_77E")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #8
(
"\x07\x05There is nothing in the chest but spiders now.\x01",
"They all stare at you and clap. Bravo.\x07\x00\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
label("loc_7F2")
Sleep(30)
TalkEnd(0xFF)
ClearMapFlags(0x8000000)
Return()
# Function_4_6A6 end
def Function_5_800(): pass
label("Function_5_800")
OP_EA(0x2, 0x5F, 0x0, 0x0)
SetMapFlags(0x8000000)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 6)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_8D8")
OP_22(0x2B, 0x0, 0x64)
OP_70(0x3, 0x3C)
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_3E(0x1FE, 1)"), scpexpr(EXPR_END)), "loc_871")
FadeToDark(300, 0, 100)
OP_22(0x11, 0x0, 0x64)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk( #9
"Found \x1F\xFE\x01\x07\x00.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_A2(0x1B86)
Jump("loc_8D5")
label("loc_871")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #10
(
"Found \x1F\xFE\x01\x07\x00 in chest.\x01",
"Inventory full so gave up \x1F\xFE\x01\x07\x00.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_22(0x2C, 0x0, 0x64)
OP_6F(0x3, 60)
OP_70(0x3, 0x0)
label("loc_8D5")
Jump("loc_95C")
label("loc_8D8")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #11
(
"\x07\x05You look under the chest, hoping to find the\x01",
"keys to a BRAND NEW AIRSHIP! ...You don't find\x01",
"anything.\x07\x00\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
label("loc_95C")
Sleep(30)
TalkEnd(0xFF)
ClearMapFlags(0x8000000)
Return()
# Function_5_800 end
def Function_6_96A(): pass
label("Function_6_96A")
OP_EA(0x2, 0x60, 0x0, 0x0)
SetMapFlags(0x8000000)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x370, 7)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_A42")
OP_22(0x2B, 0x0, 0x64)
OP_70(0x4, 0x3C)
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_3E(0x1F6, 1)"), scpexpr(EXPR_END)), "loc_9DB")
FadeToDark(300, 0, 100)
OP_22(0x11, 0x0, 0x64)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk( #12
"Found \x1F\xF6\x01\x07\x00.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_A2(0x1B87)
Jump("loc_A3F")
label("loc_9DB")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #13
(
"Found \x1F\xF6\x01\x07\x00 in chest.\x01",
"Inventory full so gave up \x1F\xF6\x01\x07\x00.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_22(0x2C, 0x0, 0x64)
OP_6F(0x4, 60)
OP_70(0x4, 0x0)
label("loc_A3F")
Jump("loc_AB3")
label("loc_A42")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #14
(
"\x07\x05This treasure chest graduated at the top of its\x01",
"class at the Riches Royal Academy.\x07\x00\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
label("loc_AB3")
Sleep(30)
TalkEnd(0xFF)
ClearMapFlags(0x8000000)
Return()
# Function_6_96A end
def Function_7_AC1(): pass
label("Function_7_AC1")
OP_EA(0x2, 0x61, 0x0, 0x0)
SetMapFlags(0x8000000)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x371, 0)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_B99")
OP_22(0x2B, 0x0, 0x64)
OP_70(0x5, 0x3C)
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_3E(0x202, 1)"), scpexpr(EXPR_END)), "loc_B32")
FadeToDark(300, 0, 100)
OP_22(0x11, 0x0, 0x64)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk( #15
"Found \x1F\x02\x02\x07\x00.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_A2(0x1B88)
Jump("loc_B96")
label("loc_B32")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #16
(
"Found \x1F\x02\x02\x07\x00 in chest.\x01",
"Inventory full so gave up \x1F\x02\x02\x07\x00.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_22(0x2C, 0x0, 0x64)
OP_6F(0x5, 60)
OP_70(0x5, 0x0)
label("loc_B96")
Jump("loc_BD4")
label("loc_B99")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #17
"\x07\x05I dunno what you expected...\x07\x00\x02",
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
label("loc_BD4")
Sleep(30)
TalkEnd(0xFF)
ClearMapFlags(0x8000000)
Return()
# Function_7_AC1 end
def Function_8_BE2(): pass
label("Function_8_BE2")
OP_EA(0x2, 0x62, 0x0, 0x0)
SetMapFlags(0x8000000)
Jc((scpexpr(EXPR_TEST_SCENA_FLAGS, MakeScenarioFlags(0x371, 1)), scpexpr(EXPR_EQUZ), scpexpr(EXPR_END)), "loc_CBA")
OP_22(0x2B, 0x0, 0x64)
OP_70(0x6, 0x3C)
Sleep(500)
Jc((scpexpr(EXPR_EXEC_OP, "OP_3E(0x1FC, 1)"), scpexpr(EXPR_END)), "loc_C53")
FadeToDark(300, 0, 100)
OP_22(0x11, 0x0, 0x64)
SetMessageWindowPos(-1, -1, -1, -1)
SetChrName("")
AnonymousTalk( #18
"Found \x1F\xFC\x01\x07\x00.\x02",
)
CloseMessageWindow()
OP_56(0x0)
SetMessageWindowPos(72, 320, 56, 3)
FadeToBright(300, 0)
OP_A2(0x1B89)
Jump("loc_CB7")
label("loc_C53")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #19
(
"Found \x1F\xFC\x01\x07\x00 in chest.\x01",
"Inventory full so gave up \x1F\xFC\x01\x07\x00.\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
OP_22(0x2C, 0x0, 0x64)
OP_6F(0x6, 60)
OP_70(0x6, 0x0)
label("loc_CB7")
Jump("loc_D2A")
label("loc_CBA")
FadeToDark(300, 0, 100)
SetChrName("")
AnonymousTalk( #20
(
"\x07\x05This chest remembers your previous visit and is\x01",
"none too happy to see you return.\x07\x00\x02",
)
)
CloseMessageWindow()
OP_56(0x0)
FadeToBright(300, 0)
label("loc_D2A")
Sleep(30)
TalkEnd(0xFF)
ClearMapFlags(0x8000000)
Return()
# Function_8_BE2 end
def Function_9_D38(): pass
label("Function_9_D38")
OP_C8(0x200, 0x32, "C_PLAC17._CH", 0x0, 0x3E8)
OP_DE("Ancient Dragon Nest")
OP_A2(0x1A84)
Return()
# Function_9_D38 end
SaveToFile()
Try(main)
| [
"[email protected]"
]
| |
63c08bd189da2b98bb7420b249deca19b3e20d5d | d3b2d0432a4f488128c2798829d017a1a2377318 | /caffe/python/caffe/custom_layers/split_loss_layer.py | befe7e3d2fa52f704efd66073c0c9e3e1e3e523b | [
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"BSD-3-Clause"
]
| permissive | openvinotoolkit/training_toolbox_caffe | 94b65290724e839cdb50e4accf6a79776190402d | 5e543a49c73190a091fe6ff364444a9714962ec0 | refs/heads/develop | 2023-01-05T17:07:42.279772 | 2022-12-21T06:50:52 | 2022-12-21T06:50:52 | 165,683,591 | 4 | 3 | Apache-2.0 | 2022-12-21T06:50:53 | 2019-01-14T15:19:52 | Jupyter Notebook | UTF-8 | Python | false | false | 11,748 | py | """
Copyright (c) 2018 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import traceback
from builtins import range
from collections import namedtuple
import numpy as np
from caffe._caffe import log as LOG
from caffe._caffe import Layer as BaseLayer
BBoxDesc = namedtuple('BBoxDesc', 'item, det_conf, anchor, action, xmin, ymin, xmax, ymax, x, y')
MATCHED_RECORD_SIZE = 11
class SplitLossLayer(BaseLayer):
"""One of Metric-learning losses which forces incorrect predicted samples from
different classes and from neighboring cells to be far from each other.
Current implementation is able to extract embeddings from the anchor branches
by the specified list of detections.
"""
@staticmethod
def _translate_matched_prediction(record):
"""Decodes the input record into the human-readable format.
:param record: Input single record for decoding
:return: Human-readable record
"""
bbox = BBoxDesc(item=int(record[0]),
det_conf=float(record[1]),
anchor=int(record[6]),
action=int(record[8]),
xmin=float(record[2]),
ymin=float(record[3]),
xmax=float(record[4]),
ymax=float(record[5]),
x=int(record[9]),
y=int(record[10]))
return bbox
@staticmethod
def _read_detections(data, record_size, converter, valid_action_ids, min_conf):
"""Convert input blob into list of human-readable records.
:param data: Input blob
:param record_size: Size of each input record
:param converter: Function to convert input record
:param valid_action_ids:
:param min_conf: List of IDs of valid actions
:return: List of detections
"""
assert data.size % record_size == 0, 'incorrect record_size'
records = data.reshape([-1, record_size])
detections = {}
for record in records:
detection = converter(record)
if detection.det_conf < min_conf or detection.item < 0:
continue
if valid_action_ids is None or detection.action in valid_action_ids:
detections[detection.item] = detections.get(detection.item, []) + [detection]
return detections
@staticmethod
def _iou(box_a, box_b):
""" Calculates Intersection over Union (IoU) metric.
:param box_a: First bbox
:param box_b: Second bbox
:return: Scalar value of metric
"""
top_left_x = max(box_a.xmin, box_b.xmin)
top_left_y = max(box_a.ymin, box_b.ymin)
intersect_width = max(0.0, min(box_a.xmax, box_b.xmax) - top_left_x)
intersect_height = max(0.0, min(box_a.ymax, box_b.ymax) - top_left_y)
intersection_area = float(intersect_width * intersect_height)
area1 = (box_a.xmax - box_a.xmin) * (box_a.ymax - box_a.ymin)
area2 = (box_b.xmax - box_b.xmin) * (box_b.ymax - box_b.ymin)
union_area = float(area1 + area2 - intersection_area)
return intersection_area / union_area if union_area > 0.0 else 0.0
def _load_params(self, param_str):
"""Loads layer parameters.
:param param_str: Input str of parameters
"""
layer_params = eval(param_str)
assert 'num_anchors' in layer_params
self._num_anchors = layer_params['num_anchors']
self._margin = float(layer_params['margin']) if 'margin' in layer_params else 0.6
self._min_overlap = float(layer_params['min_overlap']) if 'min_overlap' in layer_params else 0.3
self._min_conf = float(layer_params['min_conf']) if 'min_conf' in layer_params else 0.01
self._valid_action_ids = layer_params['valid_action_ids'] if 'valid_action_ids' in layer_params else None
self._candidates = []
self._embeddings = []
def setup(self, bottom, top):
"""Initializes layer.
:param bottom: List of bottom blobs
:param top: List of top blobs
"""
self._load_params(self.param_str)
def forward(self, bottom, top):
"""Carry out forward pass.
:param bottom: List of bottom blobs
:param top: List of top blobs
"""
try:
assert len(bottom) == self._num_anchors + 2
assert len(top) == 1 or len(top) == 4
detections_data = np.array(bottom[0].data)
batch_detections = self._read_detections(detections_data, MATCHED_RECORD_SIZE,
self._translate_matched_prediction,
self._valid_action_ids, self._min_conf)
centers = np.array(bottom[1].data)
self._embeddings = []
for i in range(self._num_anchors):
self._embeddings.append(np.array(bottom[i + 2].data))
all_candidates = []
total_num_pairs = 0
total_num_overlapped = 0
total_num_incorrect = 0
for item_id in batch_detections:
detections = batch_detections[item_id]
for i, _ in enumerate(detections):
anchor_det = detections[i]
for j in range(i + 1, len(detections)):
ref_det = detections[j]
# exclude same class predictions
if anchor_det.action == ref_det.action:
continue
overlap = self._iou(anchor_det, ref_det)
if overlap < self._min_overlap:
continue
total_num_overlapped += 1
anchor_embed = self._embeddings[anchor_det.anchor][anchor_det.item, :,
anchor_det.y, anchor_det.x]
ref_embed = self._embeddings[ref_det.anchor][ref_det.item, :, ref_det.y, ref_det.x]
anchor_center_distances =\
(1.0 - np.matmul(centers, anchor_embed.reshape([-1, 1]))).reshape([-1])
ref_center_distances =\
(1.0 - np.matmul(centers, ref_embed.reshape([-1, 1]))).reshape([-1])
anchor_action = np.argmin(anchor_center_distances)
ref_action = np.argmin(ref_center_distances)
# exclude well-separated predictions
if anchor_action != ref_action:
continue
# exclude predictions with both incorrect labels
if anchor_action != anchor_det.action and ref_action != ref_det.action:
continue
total_num_incorrect += 1
embed_dist = 1.0 - np.sum(anchor_embed * ref_embed)
if anchor_action != anchor_det.action:
loss = self._margin + anchor_center_distances[anchor_det.action] - embed_dist
if loss > 0.0:
all_candidates.append((loss, embed_dist, anchor_det, ref_det))
total_num_pairs += 1
if ref_action != ref_det.action:
loss = self._margin + ref_center_distances[ref_det.action] - embed_dist
if loss > 0.0:
all_candidates.append((loss, embed_dist, ref_det, anchor_det))
total_num_pairs += 1
if len(all_candidates) == 0:
self._candidates = []
top[0].data[...] = 0.0
if len(top) == 4:
top[1].data[...] = 0.0
top[2].data[...] = 0.0
top[3].data[...] = 0.0
else:
self._candidates = all_candidates
loss = np.sum([tup[0] for tup in self._candidates]) / float(len(self._candidates))
top[0].data[...] = loss
if len(top) == 4:
top[1].data[...] = np.median([tup[1] for tup in self._candidates])
top[2].data[...] = float(len(self._candidates)) / float(total_num_pairs)
top[3].data[...] = float(total_num_incorrect) / float(total_num_overlapped)
except Exception:
LOG('SplitLossLayer forward pass exception: {}'.format(traceback.format_exc()))
exit()
def backward(self, top, propagate_down, bottom):
"""Carry out backward pass.
:param top: List of top blobs
:param propagate_down: List of indicators to carry out back-propagation for
the specified bottom blob
:param bottom: List of bottom blobs
"""
try:
if propagate_down[0]:
raise Exception('Cannot propagate down through the matched detections')
centers_data = np.array(bottom[1].data)
centers_diff_data = np.zeros(bottom[1].data.shape) if propagate_down[1] else None
diff_data = {}
for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]:
diff_data[anchor_id] = np.zeros(bottom[anchor_id + 2].data.shape)
if len(self._candidates) > 0:
factor = top[0].diff[0] / float(len(self._candidates))
for _, _, anchor_det, ref_det in self._candidates:
anchor_embed = self._embeddings[anchor_det.anchor][anchor_det.item, :, anchor_det.y, anchor_det.x]
ref_embedding = self._embeddings[ref_det.anchor][ref_det.item, :, ref_det.y, ref_det.x]
if propagate_down[anchor_det.anchor + 2]:
diff_data[anchor_det.anchor][anchor_det.item, :, anchor_det.y, anchor_det.x] \
+= factor * (ref_embedding - centers_data[anchor_det.action])
if propagate_down[ref_det.anchor + 2]:
diff_data[ref_det.anchor][ref_det.item, :, ref_det.y, ref_det.x] += factor * anchor_embed
if centers_diff_data is not None:
centers_diff_data[anchor_det.action] += -factor * anchor_embed
if centers_diff_data is not None:
bottom[1].diff[...] = centers_diff_data
for anchor_id in range(self._num_anchors):
if propagate_down[anchor_id + 2]:
bottom[anchor_id + 2].diff[...] = diff_data[anchor_id]
except Exception:
LOG('SplitLossLayer backward pass exception: {}'.format(traceback.format_exc()))
exit()
def reshape(self, bottom, top):
"""Carry out blob reshaping.
:param bottom: List of bottom blobs
:param top: List of top blobs
"""
top[0].reshape(1)
if len(top) == 4:
top[1].reshape(1)
top[2].reshape(1)
top[3].reshape(1)
| [
"[email protected]"
]
| |
b2821e252c94e69a8e4c84bc04d5b2f793b836f8 | c10f20abec372f81dbd6468ead208543f60940f1 | /learning/22.LDA/22.3.reuters.py | 9fcc780c9a33d39ab337bd60e3a9bc6a7ec0b357 | []
| no_license | alenzhd/meachineLearning | 64876e7a6c0b8b39a63a9eb586d306a3489b4447 | 1b66ce2f73b226548f07e45c8537b8286635a048 | refs/heads/master | 2021-08-24T10:55:52.056439 | 2017-12-09T10:26:37 | 2017-12-09T10:26:37 | 112,688,163 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,864 | py | # !/usr/bin/python
# -*- coding:utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import matplotlib as mpl
import lda
import lda.datasets
from pprint import pprint
if __name__ == "__main__":
# document-term matrix
X = lda.datasets.load_reuters()
print("type(X): {}".format(type(X)))
print("shape: {}\n".format(X.shape))
print(X[:10, :10])
# the vocab
vocab = lda.datasets.load_reuters_vocab()
print("type(vocab): {}".format(type(vocab)))
print("len(vocab): {}\n".format(len(vocab)))
print(vocab[:10])
# titles for each story
titles = lda.datasets.load_reuters_titles()
print("type(titles): {}".format(type(titles)))
print("len(titles): {}\n".format(len(titles)))
pprint(titles[:10])
print ('LDA start ----')
topic_num = 20
model = lda.LDA(n_topics=topic_num, n_iter=800, random_state=1)
model.fit(X)
# topic-word
topic_word = model.topic_word_
print("type(topic_word): {}".format(type(topic_word)))
print("shape: {}".format(topic_word.shape))
print(vocab[:5])
print(topic_word[:, :5])
# Print Topic distribution
n = 7
for i, topic_dist in enumerate(topic_word):
topic_words = np.array(vocab)[np.argsort(topic_dist)][:-(n + 1):-1]
print('*Topic {}\n- {}'.format(i, ' '.join(topic_words)))
# Document - topic
doc_topic = model.doc_topic_
print("type(doc_topic): {}".format(type(doc_topic)))
print("shape: {}".format(doc_topic.shape))
for i in range(10):
topic_most_pr = doc_topic[i].argmax()
print(u"文档: {} 主题: {} value: {}".format(i, topic_most_pr, doc_topic[i][topic_most_pr]))
mpl.rcParams['font.sans-serif'] = [u'SimHei']
mpl.rcParams['axes.unicode_minus'] = False
# Topic - word
plt.figure(figsize=(8, 9))
# f, ax = plt.subplots(5, 1, sharex=True)
for i, k in enumerate([0, 5, 9, 14, 19]):
ax = plt.subplot(5, 1, i+1)
ax.plot(topic_word[k, :], 'r-')
ax.set_xlim(-50, 4350) # [0,4258]
ax.set_ylim(0, 0.08)
ax.set_ylabel(u"概率")
ax.set_title(u"主题 {}".format(k))
plt.xlabel(u"词", fontsize=14)
plt.tight_layout()
plt.suptitle(u'主题的词分布', fontsize=18)
plt.subplots_adjust(top=0.9)
plt.show()
# Document - Topic
plt.figure(figsize=(8, 9))
# f, ax= plt.subplots(5, 1, figsize=(8, 6), sharex=True)
for i, k in enumerate([1, 3, 4, 8, 9]):
ax = plt.subplot(5, 1, i+1)
ax.stem(doc_topic[k, :], linefmt='g-', markerfmt='ro')
ax.set_xlim(-1, topic_num+1)
ax.set_ylim(0, 1)
ax.set_ylabel(u"概率")
ax.set_title(u"文档 {}".format(k))
plt.xlabel(u"主题", fontsize=14)
plt.suptitle(u'文档的主题分布', fontsize=18)
plt.tight_layout()
plt.subplots_adjust(top=0.9)
plt.show()
| [
"[email protected]"
]
| |
b248e867f89bea426fd4105ea1bc0119dbf2cc49 | 56bcae383daea12cc1818a19a6415e0d9b58bd0c | /month01/day10/exercise04.py | be84b5e64c084983ff26f235609fb67556cb5474 | []
| no_license | wpy-111/python | 97ede872cf6b17f8c229cee9ecfb7df25363a37a | afbd0e081763c53833617a4892d03043e644d641 | refs/heads/main | 2023-08-03T18:26:25.656984 | 2021-09-26T08:47:21 | 2021-09-26T08:47:21 | 323,897,439 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,119 | py | class Student:
def __init__(self, name, old, achement, gender):
self.name = name
self.old = old
self.achement = achement
self.gender = gender
def print_personal_info(self):
print("学生姓名:", self.name, "年龄:", self.old, "成绩:", self.achement, "性别:", self.gender)
list_student = [
Student("悟空", 27, 100, "男"),
Student("八戒", 30, 60, "男"),
Student("沙僧", 33, 70, "男"),
Student("唐僧", 20, 65, "女")
]
# 5.根据年龄,并按照升序将学生列表进行排序
def ascending():
for i in range(0, len(list_student) - 1):
for m in range(i + 1, len(list_student)):
if list_student[i].old > list_student[m].old:
list_student[i], list_student[m] = list_student[m], list_student[i]
ascending()
for item in list_student:
item.print_personal_info()
# 6.将学上列表中所有男生删除
def del_nan():
for item in list_student[::-1]:
if item.gender=="男":
list_student.remove(item)
del_nan()
for i in list_student:
i.print_personal_info() | [
"[email protected]"
]
| |
2b6f97889be157eea94f461089f319b502ea9cb5 | d4c74a8001451840f3efb87f15856cdb9d5e9eb6 | /tools/bin/s19toslm-new.py | d67635c2fd33f93ce452d69cbae1f9f87e361ddf | [
"Apache-2.0"
]
| permissive | danieldennett/gap_sdk | 3e4b4d187f03a28a761b08aed36a5e6a06f48e8d | 5667c899025a3a152dbf91e5c18e5b3e422d4ea6 | refs/heads/master | 2020-12-19T19:17:40.083131 | 2020-02-27T14:51:48 | 2020-02-27T14:51:48 | 235,814,026 | 1 | 0 | Apache-2.0 | 2020-01-23T14:39:59 | 2020-01-23T14:39:58 | null | UTF-8 | Python | false | false | 10,343 | py | #!/usr/bin/python
# ////////////////////////////////////////////////////////////////////////////////
# // Company: Multitherman Laboratory @ DEIS - University of Bologna //
# // Viale Risorgimento 2 40136 //
# // Bologna - fax 0512093785 - //
# // //
# // Engineer: Davide Rossi - [email protected] //
# // //
# // Additional contributions by: //
# // Andreas Traber - [email protected] //
# // //
# // Create Date: 05/04/2013 //
# // Design Name: ULPSoC //
# // Project Name: ULPSoC //
# // Language: tcl, now python //
# // //
# // Description: s19 to slm conversion tool for stxp70 cluster compilation //
# // //
# // Revision: //
# // Revision v0.1 - File Created //
# // Revision v0.2 - Modification: Compiler does now generate little endian //
# // directly. revert bytes! //
# // Revision v0.3 - Moved from 128 bit s19 to 8 bit s19 file. This solves our //
# // problems with misaligned addresses in s19 files. //
# // Revision v0.4 - Added TCDM memory initialization //
# // Revision v0.5 - Rewrote the whole thing in python as tcl cannot handle //
# // long file names properly
# ////////////////////////////////////////////////////////////////////////////////
import sys
import math
class MemoryArea(object):
def __init__(self):
self.size = 0
self.base = None
def regWord(self, addr):
if self.base == None:
self.base = addr
self.size += 4
return True
elif addr == self.base + self.size:
self.size += 4
return True
elif addr == self.base - 4:
self.base -= 4
self.size += 4
return True
return False
class Stim_file(object):
def __init__(self, path, base, size):
self.path = path
self.file = None
self.base = base
self.size = size
def getFile(self):
if self.file == None and self.path != None:
self.file = open(self.path, 'w')
return self.file
def regWord(self, addr, data):
if addr >= self.base and addr < self.base + self.size:
if self.getFile() != None:
self.getFile().write("@%08X %s\n" % ((addr - self.base) >> 2, data))
return True
return False
def close(self):
if self.file != None:
self.file.close()
class FlashStimFile(object):
def __init__(self, path, archi=None, core=None):
self.file = open(path, 'w')
self.blockSize = 4096
self.nbAreas = 0
if archi == 'patronus' and core == 'secure':
self.entry = 0x1c000100
else:
self.entry = 0x1c000080
self.bootaddr = 0x1c000000
def close(self):
self.file.close()
def writesArea(self, areas):
flashOffset = 0
print ("Generating flash stimuli")
print (" Nb areas: %d" % len(areas))
# First compute areas flash information
flashOffset = 4 + 4 + 4 + len(areas) * 4 * 4
index = 0
for area in areas:
area.nbBlocks = (area.size + self.blockSize - 1) / self.blockSize
area.offset = flashOffset
flashOffset += area.nbBlocks * 4096
print (" Area %d: offset: 0x%x, base: 0x%x, size: 0x%x, nbBlocks: %d" % (index, area.offset, area.base, area.size, area.nbBlocks))
index += 1
# Then write the header containing memory areas declaration
flashOffset = 0
flashOffset += dump_bytes(self.file, flashOffset, "%08X"%len(areas))
flashOffset += dump_bytes(self.file, flashOffset, "%08X"%self.entry)
flashOffset += dump_bytes(self.file, flashOffset, "%08X"%self.bootaddr)
for area in areas:
flashOffset += dump_bytes(self.file, flashOffset, "%08X"%area.offset)
flashOffset += dump_bytes(self.file, flashOffset, "%08X"%area.base)
flashOffset += dump_bytes(self.file, flashOffset, "%08X"%area.size)
flashOffset += dump_bytes(self.file, flashOffset, "%08X"%area.nbBlocks)
# Finally write the data
for area in areas:
flashOffset = area.offset
for addr in range(area.base, area.base + area.size, 4):
flashOffset += dump_bytes(self.file, flashOffset, slm_dict[addr>>2])
class Stimuli(object):
def __init__(self):
self.stimFileRanges = []
self.areas = []
self.flashFile = None
def regFile(self, path, base, size):
file = Stim_file(path, base, size)
if base != None:
self.stimFileRanges.append(file)
def regFlashFile(self, path, archi=None, core=None):
self.flashFile = FlashStimFile(path, archi, core)
def regWord(self, addr, data):
found = False
for area in self.areas:
if area.regWord(addr):
found = True
break
if not found:
area = MemoryArea()
area.regWord(addr)
self.areas.append(area)
for stimFile in self.stimFileRanges:
if stimFile.regWord(addr, data):
return True
return False
def writeFlash(self):
self.flashFile.writesArea(self.areas)
def close(self):
if self.flashFile != None: self.flashFile.close()
for stimFile in self.stimFileRanges:
stimFile.close()
stimuli = Stimuli()
###############################################################################
# Function to dump single bytes of a string to a file
###############################################################################
def dump_bytes( filetoprint, addr, data_s):
for i in xrange(0,4,1):
filetoprint.write("@%08X %s\n" % ( addr+i, data_s[(3-i)*2:((3-i)+1)*2] ))
return 4
###############################################################################
# Read s19 file and put data bytes into a dictionary
###############################################################################
def s19_parse(filename, s19_dict):
s19_file = open(filename, 'r')
for line in s19_file:
rec_field = line[:2]
prefix = line[:4]
if rec_field == "S0" or prefix == "S009" or prefix == "S505" or prefix == "S705" or prefix == "S017" or prefix == "S804" or line == "":
continue
data = line[-6:-4] # extract data byte
str_addr = line[4:-6]
addr = int("0x%s" % str_addr, 0)
s19_dict[addr] = data
s19_file.close()
###############################################################################
# arrange bytes in words
###############################################################################
def bytes_to_words(byte_dict, word_dict):
for addr in byte_dict:
wordaddr = addr >> 2
data = "00000000"
if wordaddr in word_dict:
data = word_dict[wordaddr]
byte = addr % 4
byte0 = data[0:2]
byte1 = data[2:4]
byte2 = data[4:6]
byte3 = data[6:8]
new = byte_dict[addr]
if byte == 0:
data = "%s%s%s%s" % (byte0, byte1, byte2, new)
elif byte == 1:
data = "%s%s%s%s" % (byte0, byte1, new, byte3)
elif byte == 2:
data = "%s%s%s%s" % (byte0, new, byte2, byte3)
elif byte == 3:
data = "%s%s%s%s" % (new, byte1, byte2, byte3)
word_dict[wordaddr] = data
###############################################################################
# Start of file
###############################################################################
if(len(sys.argv) < 2):
print "Usage s19toslm.py FILENAME"
quit()
archi = None
core = None
if len(sys.argv) > 2: archi = sys.argv[2]
if len(sys.argv) > 3: core = sys.argv[3]
###############################################################################
# Parse s19 file
###############################################################################
s19_dict = {}
slm_dict = {}
s19_parse(sys.argv[1], s19_dict)
bytes_to_words(s19_dict, slm_dict)
###############################################################################
# open files
###############################################################################
stimuli.regFile("tcdm.slm", 0x10000000, 128 * 1024)
stimuli.regFile("fcTcdm.slm", 0x1B000000, 64 * 1024)
stimuli.regFile("l2_stim.slm", 0x1C000000, 512 * 1024)
stimuli.regFile(None , 0x1A000000, 64 * 1024)
if archi != 'GAP':
stimuli.regFlashFile("flash_stim.slm", archi, core)
###############################################################################
# write the stimuli
###############################################################################
for addr in sorted(slm_dict.keys()):
fullAddr = addr << 2
if not stimuli.regWord(fullAddr, slm_dict[addr]):
raise Exception("Got s19 address from unknown memory range: 0x%x" % fullAddr)
###############################################################################
# write flash
###############################################################################
if archi != 'GAP':
stimuli.writeFlash()
###############################################################################
# close all files
###############################################################################
stimuli.close()
| [
"[email protected]"
]
| |
bf732d79c691374c2a1fbe7b8c8e18e5e53ec00e | ac2bc51f88ca0d966968d3aa28331ac715d9a1e2 | /pymeshio/pmd/reader.py | d68a767ff889e6d5f4668235e16b345b35f6905f | []
| no_license | zhouhang95/pymeshio | 5b29a864253d75bbcc23e7514ea4f9c86338031c | 256dd9146103abc3e2e300de9ae09dcde057b534 | refs/heads/master | 2020-06-14T23:30:34.727047 | 2020-04-18T14:05:04 | 2020-04-18T14:05:04 | 195,154,815 | 0 | 0 | null | 2019-07-04T02:21:55 | 2019-07-04T02:21:55 | null | UTF-8 | Python | false | false | 7,910 | py | #coding: utf-8
"""
pmd reader
"""
import io
from .. import common
from .. import pmd
class Reader(common.BinaryReader):
"""pmx reader
"""
def __init__(self, ios, version):
super(Reader, self).__init__(ios)
self.version=version
def read_text(self, size):
"""read cp932 text
"""
src=self.unpack("%ds" % size, size)
assert(type(src)==bytes)
pos = src.find(b"\x00")
if pos==-1:
return src
else:
return src[:pos]
def read_vertex(self):
return pmd.Vertex(
self.read_vector3(),
self.read_vector3(),
self.read_vector2(),
self.read_uint(2),
self.read_uint(2),
self.read_uint(1),
self.read_uint(1))
def read_material(self):
return pmd.Material(
diffuse_color=self.read_rgb(),
alpha=self.read_float(),
specular_factor=self.read_float(),
specular_color=self.read_rgb(),
ambient_color=self.read_rgb(),
toon_index=self.read_int(1),
edge_flag=self.read_uint(1),
vertex_count=self.read_uint(4),
texture_file=self.read_text(20)
)
def read_bone(self):
name=self.read_text(20)
parent_index=self.read_uint(2)
tail_index=self.read_uint(2)
bone=pmd.createBone(name, self.read_uint(1))
bone.parent_index=parent_index
bone.tail_index=tail_index
bone.ik_index = self.read_uint(2)
bone.pos = self.read_vector3()
return bone
def read_ik(self):
ik=pmd.IK(self.read_uint(2), self.read_uint(2))
ik.length = self.read_uint(1)
ik.iterations = self.read_uint(2)
ik.weight = self.read_float()
ik.children=[self.read_uint(2) for _ in range(ik.length)]
return ik
def read_morph(self):
morph=pmd.Morph(self.read_text(20))
morph_size = self.read_uint(4)
morph.type = self.read_uint(1)
for j in range(morph_size):
morph.indices.append(self.read_uint(4))
morph.pos_list.append(self.read_vector3())
return morph
def read_rigidbody(self):
return pmd.RigidBody(
name=self.read_text(20),
bone_index=self.read_int(2),
collision_group=self.read_int(1),
no_collision_group=self.read_int(2),
shape_type=self.read_uint(1),
shape_size=self.read_vector3(),
shape_position=self.read_vector3(),
shape_rotation=self.read_vector3(),
mass=self.read_float(),
linear_damping=self.read_float(),
angular_damping=self.read_float(),
restitution=self.read_float(),
friction=self.read_float(),
mode=self.read_uint(1)
)
def read_joint(self):
return pmd.Joint(
name=self.read_text(20),
rigidbody_index_a=self.read_uint(4),
rigidbody_index_b=self.read_uint(4),
position=self.read_vector3(),
rotation=self.read_vector3(),
translation_limit_min=self.read_vector3(),
translation_limit_max=self.read_vector3(),
rotation_limit_min=self.read_vector3(),
rotation_limit_max=self.read_vector3(),
spring_constant_translation=self.read_vector3(),
spring_constant_rotation=self.read_vector3())
def __read(reader, model):
# model info
model.name=reader.read_text(20)
model.comment=reader.read_text(256)
# model data
model.vertices=[reader.read_vertex()
for _ in range(reader.read_uint(4))]
model.indices=[reader.read_uint(2)
for _ in range(reader.read_uint(4))]
model.materials=[reader.read_material()
for _ in range(reader.read_uint(4))]
model.bones=[reader.read_bone()
for _ in range(reader.read_uint(2))]
model.ik_list=[reader.read_ik()
for _ in range(reader.read_uint(2))]
model.morphs=[reader.read_morph()
for _ in range(reader.read_uint(2))]
model.morph_indices=[reader.read_uint(2)
for _ in range(reader.read_uint(1))]
model.bone_group_list=[pmd.BoneGroup(reader.read_text(50))
for _ in range(reader.read_uint(1))]
model.bone_display_list=[(reader.read_uint(2), reader.read_uint(1))
for _i in range(reader.read_uint(4))]
if reader.is_end():
# EOF
return True
############################################################
# extend1: english name
############################################################
if reader.read_uint(1)==1:
#return True
model.english_name=reader.read_text(20)
model.english_comment=reader.read_text(256)
for bone in model.bones:
bone.english_name=reader.read_text(20)
for morph in model.morphs:
if morph.name==b'base':
continue
morph.english_name=reader.read_text(20)
for g in model.bone_group_list:
g.english_name=reader.read_text(50)
############################################################
# extend2: toon_textures
############################################################
if reader.is_end():
# EOF
return True
model.toon_textures=[reader.read_text(100)
for _ in range(10)]
############################################################
# extend2: rigidbodies and joints
############################################################
if reader.is_end():
# EOF
return True
model.rigidbodies=[reader.read_rigidbody()
for _ in range(reader.read_uint(4))]
model.joints=[reader.read_joint()
for _ in range(reader.read_uint(4))]
return True
def read_from_file(path):
"""
read from file path, then return the pymeshio.pmd.Model.
:Parameters:
path
file path
>>> import pymeshio.pmd.reader
>>> m=pymeshio.pmd.reader.read_from_file('resources/初音ミクVer2.pmd')
>>> print(m)
<pmd-2.0 "Miku Hatsune" 12354vertices>
"""
pmd=read(io.BytesIO(common.readall(path)))
pmd.path=path
return pmd
def read(ios: io.IOBase):
"""
read from ios, then return the pymeshio.pmd.Model.
:Parameters:
ios
input stream (in io.IOBase)
>>> import pymeshio.pmd.reader
>>> m=pymeshio.pmd.reader.read(io.open('resources/初音ミクVer2.pmd', 'rb'))
>>> print(m)
<pmd-2.0 "Miku Hatsune" 12354vertices>
"""
assert(isinstance(ios, io.IOBase))
reader=common.BinaryReader(ios)
# header
signature=reader.unpack("3s", 3)
if signature!=b"Pmd":
raise common.ParseException(
"invalid signature: {0}".format(signature))
version=reader.read_float()
model=pmd.Model(version)
reader=Reader(reader.ios, version)
if(__read(reader, model)):
# check eof
if not reader.is_end():
#print("can not reach eof.")
pass
# build bone tree
for i, child in enumerate(model.bones):
child.index=i
if child.parent_index==0xFFFF:
# no parent
model.no_parent_bones.append(child)
child.parent=None
else:
# has parent
parent=model.bones[child.parent_index]
child.parent=parent
parent.children.append(child)
# 後位置
if child.hasChild():
child.tail=model.bones[child.tail_index].pos
return model
| [
"[email protected]"
]
| |
1fa6918b789095c70ac0a9b29a5bf35351e768ff | 45de3aa97525713e3a452c18dcabe61ac9cf0877 | /src/bases/anaconf/fichier_configuration.py | 3c393fe67c215a743548a07ae83628cef8b5bacf | [
"BSD-3-Clause"
]
| permissive | stormi/tsunami | 95a6da188eadea3620c70f7028f32806ee2ec0d1 | bdc853229834b52b2ee8ed54a3161a1a3133d926 | refs/heads/master | 2020-12-26T04:27:13.578652 | 2015-11-17T21:32:38 | 2015-11-17T21:32:38 | 25,606,146 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,208 | py | # -*-coding:Utf-8 -*
# Copyright (c) 2010 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Ce fichier décrit la classe FichierConfiguration, détaillée plus bas."""
import re
import textwrap
from .exceptions import *
class FichierConfiguration:
"""Cette classe définit un fichier de configuration.
Le fichier créé par cette classe est déjà ouvert. La classe se contente
de l'analyser et de placer les données dans un dictionnaire.
Elle est également en charge de mettre un jour un fichier en tenant
compte d'un autre fichier (mettre à jour un modèle en tenant compte
des données configurées, ici).
"""
def __init__(self, nom, chaine, logger):
"""Constructeur d'un fichier de configuration.
On lui passe la chaîne lue dans le fichier, non analysée.
Cette chaîne contient donc les données brutes, il faut l'analyser.
"""
self.nom = nom
self.fichier = chaine
self.donnees = {}
self.lignes = {}
self.logger = logger
# On analyse la chaîne
t_contenu = chaine.split("\n")
contenu = chaine
delimiteurs = ('\\', ',', '[', '{', '(')
# On lit les données
i = 0
while i < len(t_contenu):
ligne = t_contenu[i]
if ligne.strip() == "":
i += 1
continue
elif ligne.lstrip().startswith("#"):
i += 1
continue
elif "=" not in ligne:
self.logger.warning("[{}:{}]: le signe '=' n'a pas été " \
"trouvé ('{}')".format(self.nom, i + 1, ligne))
i += 1
else:
nom_donnee = ligne.split("=")[0].strip()
donnee = "=".join(ligne.split("=")[1:]).lstrip()
# Si la ligne se poursuit, on continue
ligne_debut = i
while ligne.rstrip()[-1] in delimiteurs or \
ligne.lstrip().startswith("#"):
i += 1
if i >= len(t_contenu):
break
ligne = t_contenu[i]
donnee += "\n" + ligne
ligne_fin = i
self.lignes[nom_donnee] = (ligne_debut, ligne_fin)
self.donnees[nom_donnee] = donnee
i += 1
def mettre_a_jour(self, autre_fichier):
"""Met à jour l'attribut 'chaine' en fonction d'un autre fichier.
On parcourt les données de cet autre fichier.
* Si la donnée est présente dans self.donnees, on la réécrit
sans savoir si elle est identique ou non, on l'écrase)
* Sinon on ne la réécrit pas.
"""
t_contenu = self.fichier.split("\n")
for nom_don, val_don in autre_fichier.donnees.items():
if nom_don in self.donnees.keys(): # la donnée existe
# On la met à jour
self.donnees[nom_don] = val_don
if nom_don not in self.lignes:
# La donnée n'a pas été trouvée
raise ErreurInterpretation("la donnée {} n'a pas " \
"été trouvée dans le fichier à mettre à " \
"jour".format(nom_don))
debut, fin = self.lignes[nom_don]
nv_val = nom_don + " = " + val_don
nv_val = nv_val.split("\n")
t_contenu = t_contenu[:debut] + nv_val + t_contenu[fin + 1:]
self.fichier = "\n".join(t_contenu)
| [
"[email protected]"
]
| |
7f5bf99bf96136efc47b1ad93ed0722deb7c4c24 | 663365d4c1c4068dab79a4b24cf6c96888b0862d | /Functions/migrations/0021_contribute_upload_file.py | 173945ddf3192fbd6275812a40ffb533ccbbbafa | []
| no_license | gitanjali1077/UnistashFinal | e7052b26db70d3ed728f7cddd90da31f6d4f0a50 | 31251e441c8759ca3d6c4b0cb274902293fd38a7 | refs/heads/master | 2021-07-02T07:01:10.919891 | 2017-09-23T11:55:03 | 2017-09-23T11:55:03 | 104,563,813 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-08-28 18:28
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Functions', '0020_contribute'),
]
operations = [
migrations.AddField(
model_name='contribute',
name='upload_file',
field=models.FileField(default='abc.jpg', upload_to='documents/'),
preserve_default=False,
),
]
| [
"[email protected]"
]
| |
c705ad27d574a1c68f53e7ea69f8e302c6e3bd45 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03360/s908615245.py | 1bd71f84ec89bec0620034eb77b6014bd7f9078f | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | lis = list(map(int,input().split()))
a = int(input())
lis.sort(reverse = True)
for i in range(a):
lis[0] *= 2
print(sum(lis)) | [
"[email protected]"
]
| |
81dee0a2004d73370b2d84df410b1771eb0f2555 | 1fd8e5db25f8ebc7cc4506cbb07ba98f717b667e | /blackjack_2.py | 7f3f9103277f58cbeedc43f4453994139b518033 | []
| no_license | flatplanet/Intro-To-TKinter-Youtube-Course | 6103410435fc3b977fb44a4b08d045950ba10380 | cf988099fc358e52ed773273cb2e7ddb9d37d995 | refs/heads/master | 2022-10-06T10:02:38.689302 | 2022-07-18T18:11:12 | 2022-07-18T18:11:12 | 174,183,345 | 524 | 426 | null | 2021-10-10T16:16:44 | 2019-03-06T16:44:03 | Python | UTF-8 | Python | false | false | 9,301 | py | from tkinter import *
import random
from PIL import Image, ImageTk
from tkinter import messagebox
root = Tk()
root.title('Codemy.com - Card Deck')
root.iconbitmap('c:/gui/codemy.ico')
root.geometry("1200x800")
root.configure(background="green")
# Test for blackjack on shuffle
def blackjack_shuffle(player):
if player == "dealer":
if len(dealer_score) == 2:
if dealer_score[0] + dealer_score[1] == 21:
messagebox.showinfo("Dealer Wins!", "Blackjack! Dealer Wins!")
# Disable buttons
card_button.config(state="disabled")
stand_button.config(state="disabled")
if player == "player":
if len(player_score) == 2:
if player_score[0] + player_score[1] == 21:
messagebox.showinfo("Player Wins!", "Blackjack! Player Wins!")
# Disable buttons
card_button.config(state="disabled")
stand_button.config(state="disabled")
# Resize Cards
def resize_cards(card):
# Open the image
our_card_img = Image.open(card)
# Resize The Image
our_card_resize_image = our_card_img.resize((150, 218))
# output the card
global our_card_image
our_card_image = ImageTk.PhotoImage(our_card_resize_image)
# Return that card
return our_card_image
# Shuffle The Cards
def shuffle():
# Enable buttons
card_button.config(state="normal")
stand_button.config(state="normal")
# Clear all the old cards from previous games
dealer_label_1.config(image='')
dealer_label_2.config(image='')
dealer_label_3.config(image='')
dealer_label_4.config(image='')
dealer_label_5.config(image='')
player_label_1.config(image='')
player_label_2.config(image='')
player_label_3.config(image='')
player_label_4.config(image='')
player_label_5.config(image='')
# Define Our Deck
suits = ["diamonds", "clubs", "hearts", "spades"]
values = range(2, 15)
# 11 = Jack, 12=Queen, 13=King, 14 = Ace
global deck
deck =[]
for suit in suits:
for value in values:
deck.append(f'{value}_of_{suit}')
# Create our players
global dealer, player, dealer_spot, player_spot, dealer_score, player_score
dealer = []
player = []
dealer_score = []
player_score = []
dealer_spot = 0
player_spot = 0
# Shuffle Two Cards for player and dealer
dealer_hit()
dealer_hit()
player_hit()
player_hit()
# Put number of remaining cards in title bar
root.title(f'Codemy.com - {len(deck)} Cards Left')
def dealer_hit():
global dealer_spot
if dealer_spot < 5:
try:
# Get the player Card
dealer_card = random.choice(deck)
# Remove Card From Deck
deck.remove(dealer_card)
# Append Card To Dealer List
dealer.append(dealer_card)
# Append to dealer score list and convert facecards to 10 or 11
dcard = int(dealer_card.split("_", 1)[0])
if dcard == 14:
dealer_score.append(11)
elif dcard == 11 or dcard == 12 or dcard == 13:
dealer_score.append(10)
else:
dealer_score.append(dcard)
# Output Card To Screen
global dealer_image1, dealer_image2, dealer_image3, dealer_image4, dealer_image5
if dealer_spot == 0:
# Resize Card
dealer_image1 = resize_cards(f'images/cards/{dealer_card}.png')
# Output Card To Screen
dealer_label_1.config(image=dealer_image1)
# Increment our player spot counter
dealer_spot += 1
elif dealer_spot == 1:
# Resize Card
dealer_image2 = resize_cards(f'images/cards/{dealer_card}.png')
# Output Card To Screen
dealer_label_2.config(image=dealer_image2)
# Increment our player spot counter
dealer_spot += 1
elif dealer_spot == 2:
# Resize Card
dealer_image3 = resize_cards(f'images/cards/{dealer_card}.png')
# Output Card To Screen
dealer_label_3.config(image=dealer_image3)
# Increment our player spot counter
dealer_spot += 1
elif dealer_spot == 3:
# Resize Card
dealer_image4 = resize_cards(f'images/cards/{dealer_card}.png')
# Output Card To Screen
dealer_label_4.config(image=dealer_image4)
# Increment our player spot counter
dealer_spot += 1
elif dealer_spot == 4:
# Resize Card
dealer_image5 = resize_cards(f'images/cards/{dealer_card}.png')
# Output Card To Screen
dealer_label_5.config(image=dealer_image5)
# Increment our player spot counter
dealer_spot += 1
# Put number of remaining cards in title bar
root.title(f'Codemy.com - {len(deck)} Cards Left')
except:
root.title(f'Codemy.com - No Cards In Deck')
# Check for blackjack
blackjack_shuffle("dealer")
def player_hit():
global player_spot
if player_spot < 5:
try:
# Get the player Card
player_card = random.choice(deck)
# Remove Card From Deck
deck.remove(player_card)
# Append Card To Dealer List
player.append(player_card)
# Append to dealer score list and convert facecards to 10 or 11
pcard = int(player_card.split("_", 1)[0])
if pcard == 14:
player_score.append(11)
elif pcard == 11 or pcard == 12 or pcard == 13:
player_score.append(10)
else:
player_score.append(pcard)
# Output Card To Screen
global player_image1, player_image2, player_image3, player_image4, player_image5
if player_spot == 0:
# Resize Card
player_image1 = resize_cards(f'images/cards/{player_card}.png')
# Output Card To Screen
player_label_1.config(image=player_image1)
# Increment our player spot counter
player_spot += 1
elif player_spot == 1:
# Resize Card
player_image2 = resize_cards(f'images/cards/{player_card}.png')
# Output Card To Screen
player_label_2.config(image=player_image2)
# Increment our player spot counter
player_spot += 1
elif player_spot == 2:
# Resize Card
player_image3 = resize_cards(f'images/cards/{player_card}.png')
# Output Card To Screen
player_label_3.config(image=player_image3)
# Increment our player spot counter
player_spot += 1
elif player_spot == 3:
# Resize Card
player_image4 = resize_cards(f'images/cards/{player_card}.png')
# Output Card To Screen
player_label_4.config(image=player_image4)
# Increment our player spot counter
player_spot += 1
elif player_spot == 4:
# Resize Card
player_image5 = resize_cards(f'images/cards/{player_card}.png')
# Output Card To Screen
player_label_5.config(image=player_image5)
# Increment our player spot counter
player_spot += 1
# Put number of remaining cards in title bar
root.title(f'Codemy.com - {len(deck)} Cards Left')
except:
root.title(f'Codemy.com - No Cards In Deck')
# Check for blackjack
blackjack_shuffle("player")
# Deal Out Cards
def deal_cards():
try:
# Get the deler Card
card = random.choice(deck)
# Remove Card From Deck
deck.remove(card)
# Append Card To Dealer List
dealer.append(card)
# Output Card To Screen
global dealer_image
dealer_image = resize_cards(f'images/cards/{card}.png')
dealer_label.config(image=dealer_image)
#dealer_label.config(text=card)
# Get the player Card
card = random.choice(deck)
# Remove Card From Deck
deck.remove(card)
# Append Card To Dealer List
player.append(card)
# Output Card To Screen
global player_image
player_image = resize_cards(f'images/cards/{card}.png')
player_label.config(image=player_image)
#player_label.config(text=card)
# Put number of remaining cards in title bar
root.title(f'Codemy.com - {len(deck)} Cards Left')
except:
root.title(f'Codemy.com - No Cards In Deck')
my_frame = Frame(root, bg="green")
my_frame.pack(pady=20)
# Create Frames For Cards
dealer_frame = LabelFrame(my_frame, text="Dealer", bd=0)
dealer_frame.pack(padx=20, ipadx=20)
player_frame = LabelFrame(my_frame, text="Player", bd=0)
player_frame.pack(ipadx=20, pady=10)
# Put Dealer cards in frames
dealer_label_1 = Label(dealer_frame, text='')
dealer_label_1.grid(row=0, column=0, pady=20, padx=20)
dealer_label_2 = Label(dealer_frame, text='')
dealer_label_2.grid(row=0, column=1, pady=20, padx=20)
dealer_label_3 = Label(dealer_frame, text='')
dealer_label_3.grid(row=0, column=2, pady=20, padx=20)
dealer_label_4 = Label(dealer_frame, text='')
dealer_label_4.grid(row=0, column=3, pady=20, padx=20)
dealer_label_5 = Label(dealer_frame, text='')
dealer_label_5.grid(row=0, column=4, pady=20, padx=20)
# Put Player cards in frames
player_label_1 = Label(player_frame, text='')
player_label_1.grid(row=1, column=0, pady=20, padx=20)
player_label_2 = Label(player_frame, text='')
player_label_2.grid(row=1, column=1, pady=20, padx=20)
player_label_3 = Label(player_frame, text='')
player_label_3.grid(row=1, column=2, pady=20, padx=20)
player_label_4 = Label(player_frame, text='')
player_label_4.grid(row=1, column=3, pady=20, padx=20)
player_label_5 = Label(player_frame, text='')
player_label_5.grid(row=1, column=4, pady=20, padx=20)
# Create Button Frame
button_frame = Frame(root, bg="green")
button_frame.pack(pady=20)
# Create a couple buttons
shuffle_button = Button(button_frame, text="Shuffle Deck", font=("Helvetica", 14), command=shuffle)
shuffle_button.grid(row=0, column=0)
card_button = Button(button_frame, text="Hit Me!", font=("Helvetica", 14), command=player_hit)
card_button.grid(row=0, column=1, padx=10)
stand_button = Button(button_frame, text="Stand!", font=("Helvetica", 14))
stand_button.grid(row=0, column=2)
# Shuffle Deck On Start
shuffle()
root.mainloop() | [
"[email protected]"
]
| |
396afefec4925ee05474afb55fca1e1c01268612 | 627fda699b0dc401a19b0cbbc17ea6632f315c8c | /baltimore.py | 123f209bc531b868c7a5fed241650c20e9899e8a | []
| no_license | Romulus83/python | 31db444e766ceff39813162f3e64edbb53bfdfdf | f5dc170ccd2b98f84a17f8bd8d8f5d8bd9a51f60 | refs/heads/master | 2023-02-16T16:07:18.947227 | 2021-01-16T17:08:40 | 2021-01-16T17:08:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,175 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Mar 3 18:14:48 2020
@author: user
"""
"""
remove the dollar signs in the AnnualSalary field and assign it as a int
"""
import pandas as pd
df4 = pd.read_csv("Baltimore_City_Employee_Salaries_FY2014.csv")
df4["AnnualSalary"] = df4["AnnualSalary"].astype("int64")
"""
Group the data on JobTitle and AnnualSalary, and aggregate with sum, mean, etc.
Sort the data and display to show who get the highest salary
"""
df4[["AnnualSalary","JobTitle"]]
df4["AnnualSalary"].agg(['sum','mean'])
a = sorted(df4["AnnualSalary"])
df4["AnnualSalary"].max()
df4["AnnualSalary"].min()
"""
Try to group on JobTitle only and sort the data and display
"""
sorted(df4["JobTitle"])
"""
How many employess are there for each JobRoles and Graph it
"""
import matplotlib.pyplot as plt
plt.pie(df4["JobTitle"].value_counts(),labels = df4["JobTitle"].unique(),autopct = "%.2f",radius = 3)
"""
Graph and show which Job Title spends the most
"""
import matplotlib.pyplot as plt
plt.pie(df4["HireDate"].value_counts(dropna = False),labels = df4["HireDate"].unique(),autopct = "%.2f",radius = 3)
| [
"[email protected]"
]
| |
c501b83c089a167048db9394457c00320da924b2 | 786de89be635eb21295070a6a3452f3a7fe6712c | /CalibManager/tags/V00-00-68/src/ConfigParametersForApp.py | f7d3a9f5c4bce80fcb97df832afdda9a10bccf88 | []
| no_license | connectthefuture/psdmrepo | 85267cfe8d54564f99e17035efe931077c8f7a37 | f32870a987a7493e7bf0f0a5c1712a5a030ef199 | refs/heads/master | 2021-01-13T03:26:35.494026 | 2015-09-03T22:22:11 | 2015-09-03T22:22:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 40,587 | py | #--------------------------------------------------------------------------
# File and Version Information:
# $Id$
#
# Description:
# Module ConfigParametersForApp
#
#------------------------------------------------------------------------
"""ConfigParametersForApp - class supporting configuration parameters for specific application.
This software was developed for the SIT project. If you use all or
part of it, please give an appropriate acknowledgment.
@see RelatedModule
@version $Id$
@author Mikhail S. Dubrovin
"""
#------------------------------
# Module's version from SVN --
#------------------------------
__version__ = "$Revision$"
__author__ = "Mikhail S. Dubrovin"
# $Source$
#----------------------
# Import of modules --
#----------------------
import sys
from Logger import logger
from ConfigParameters import *
from PyQt4 import QtGui, QtCore
import AppDataPath as apputils # for icons
#-----------------------------
class ConfigParametersForApp ( ConfigParameters ) :
"""Is intended as a storage for configuration parameters for CorAna project.
#@see BaseClass ConfigParameters
#@see OtherClass Parameters
"""
name = 'ConfigParametersForApp'
list_pars = []
#char_expand = ' *' # down-head triangle
#char_shrink = ' ^' # down-head triangle
char_expand = u' \u25BC' # down-head triangle
char_shrink = u' \u25B2' # solid up-head triangle
#char_shrink = u'\u25B6' # solid right-head triangle
#char_shrink = u'\u25B7' # open right-head triangle
list_of_queues = ['psnehq', 'psfehq', 'psanacsq']
list_of_instr = ['AMO', 'SXR', 'XPP', 'XCS', 'CXI', 'MEC']
list_of_show_runs = ['in range', 'dark', 'all']
list_of_show_dets = ['any', 'selected any', 'selected all']
par01 = 'MPA1SdCp7h18m'
par02 = __author__.split()[2].lower()
dict_bjpeds = {} # dictionary of run_num:BatchJobPedestals objects
dict_guidarklistitem = {} # dictionary of run_num:GUIDarkListItem objects
def __init__ ( self, fname=None ) :
"""Constructor.
@param fname the file name with configuration parameters, if not specified then it will be set to the default value at declaration.
"""
ConfigParameters.__init__(self)
self.fname_cp = 'confpars-calibman.txt' # Re-define default config file name
self.declareAllParameters()
self.readParametersFromFile (fname)
self.initRunTimeParameters()
self.defineStyles()
def initRunTimeParameters( self ) :
self.iconsAreLoaded = False
#self.char_expand = u' \u25BE' # down-head triangle
self.guilogger = None
self.guimain = None
self.guidark = None
self.guidarklist = None
self.guitabs = None
self.guistatus = None
self.guiinsexpdirdet = None
self.guifilebrowser = None
self.blsp = None
self.guidarkcontrolbar = None
self.guigeometry = None
self.guimetrology = None
self.dark_list = None
self.guifilemanager = None
self.guifilemanagersingle = None
self.guifilemanagersinglecontrol = None
self.guifilemanagergroup = None
self.guifilemanagergroupcontrol = None
self.guiexpcalibdir = None
self.guidirtree = None
self.dirtreemodel = None
#self.thread_check_new_xtc_files = None
#-----------------------------
def setIcons(self) :
if self.iconsAreLoaded : return
self.iconsAreLoaded = True
#path = './icons/'
#path = "%s/icons/" % os.path.dirname(sys.argv[0])
#print 'path to icons:', pat
#logger.info('Load icons from directory: '+path, self.name)
path_icon_contents = apputils.AppDataPath('CalibManager/icons/contents.png' ).path()
path_icon_mail_forward = apputils.AppDataPath('CalibManager/icons/mail-forward.png' ).path()
path_icon_button_ok = apputils.AppDataPath('CalibManager/icons/button_ok.png' ).path()
path_icon_button_cancel = apputils.AppDataPath('CalibManager/icons/button_cancel.png').path()
path_icon_exit = apputils.AppDataPath('CalibManager/icons/exit.png' ).path()
path_icon_home = apputils.AppDataPath('CalibManager/icons/home.png' ).path()
path_icon_redo = apputils.AppDataPath('CalibManager/icons/redo.png' ).path()
path_icon_undo = apputils.AppDataPath('CalibManager/icons/undo.png' ).path()
path_icon_reload = apputils.AppDataPath('CalibManager/icons/reload.png' ).path()
path_icon_save = apputils.AppDataPath('CalibManager/icons/save.png' ).path()
path_icon_save_cfg = apputils.AppDataPath('CalibManager/icons/fileexport.png' ).path()
path_icon_edit = apputils.AppDataPath('CalibManager/icons/edit.png' ).path()
path_icon_browser = apputils.AppDataPath('CalibManager/icons/fileopen.png' ).path()
path_icon_monitor = apputils.AppDataPath('CalibManager/icons/icon-monitor.png' ).path()
path_icon_unknown = apputils.AppDataPath('CalibManager/icons/icon-unknown.png' ).path()
path_icon_plus = apputils.AppDataPath('CalibManager/icons/icon-plus.png' ).path()
path_icon_minus = apputils.AppDataPath('CalibManager/icons/icon-minus.png' ).path()
path_icon_logviewer = apputils.AppDataPath('CalibManager/icons/logviewer.png' ).path()
path_icon_lock = apputils.AppDataPath('CalibManager/icons/locked-icon.png' ).path()
path_icon_unlock = apputils.AppDataPath('CalibManager/icons/unlocked-icon.png').path()
path_icon_convert = apputils.AppDataPath('CalibManager/icons/icon-convert.png' ).path()
path_icon_table = apputils.AppDataPath('CalibManager/icons/table.gif' ).path()
path_icon_folder_open = apputils.AppDataPath('CalibManager/icons/folder_open.gif' ).path()
path_icon_folder_closed = apputils.AppDataPath('CalibManager/icons/folder_closed.gif').path()
self.icon_contents = QtGui.QIcon(path_icon_contents )
self.icon_mail_forward = QtGui.QIcon(path_icon_mail_forward )
self.icon_button_ok = QtGui.QIcon(path_icon_button_ok )
self.icon_button_cancel = QtGui.QIcon(path_icon_button_cancel)
self.icon_exit = QtGui.QIcon(path_icon_exit )
self.icon_home = QtGui.QIcon(path_icon_home )
self.icon_redo = QtGui.QIcon(path_icon_redo )
self.icon_undo = QtGui.QIcon(path_icon_undo )
self.icon_reload = QtGui.QIcon(path_icon_reload )
self.icon_save = QtGui.QIcon(path_icon_save )
self.icon_save_cfg = QtGui.QIcon(path_icon_save_cfg )
self.icon_edit = QtGui.QIcon(path_icon_edit )
self.icon_browser = QtGui.QIcon(path_icon_browser )
self.icon_monitor = QtGui.QIcon(path_icon_monitor )
self.icon_unknown = QtGui.QIcon(path_icon_unknown )
self.icon_plus = QtGui.QIcon(path_icon_plus )
self.icon_minus = QtGui.QIcon(path_icon_minus )
self.icon_logviewer = QtGui.QIcon(path_icon_logviewer )
self.icon_lock = QtGui.QIcon(path_icon_lock )
self.icon_unlock = QtGui.QIcon(path_icon_unlock )
self.icon_convert = QtGui.QIcon(path_icon_convert )
self.icon_table = QtGui.QIcon(path_icon_table )
self.icon_folder_open = QtGui.QIcon(path_icon_folder_open )
self.icon_folder_closed = QtGui.QIcon(path_icon_folder_closed)
#self.icon_contents = QtGui.QIcon(path + 'contents.png' )
#self.icon_mail_forward = QtGui.QIcon(path + 'mail-forward.png' )
#self.icon_button_ok = QtGui.QIcon(path + 'button_ok.png' )
#self.icon_button_cancel = QtGui.QIcon(path + 'button_cancel.png' )
#self.icon_exit = QtGui.QIcon(path + 'exit.png' )
#self.icon_home = QtGui.QIcon(path + 'home.png' )
#self.icon_redo = QtGui.QIcon(path + 'redo.png' )
#self.icon_undo = QtGui.QIcon(path + 'undo.png' )
#self.icon_reload = QtGui.QIcon(path + 'reload.png' )
#self.icon_save = QtGui.QIcon(path + 'save.png' )
#self.icon_save_cfg = QtGui.QIcon(path + 'fileexport.png' )
#self.icon_edit = QtGui.QIcon(path + 'edit.png' )
#self.icon_browser = QtGui.QIcon(path + 'fileopen.png' )
#self.icon_monitor = QtGui.QIcon(path + 'icon-monitor.png' )
#self.icon_unknown = QtGui.QIcon(path + 'icon-unknown.png' )
#self.icon_logviewer = QtGui.QIcon(path + 'logviewer.png' )
#self.icon_lock = QtGui.QIcon(path + 'locked-icon.png' )
#self.icon_unlock = QtGui.QIcon(path + 'unlocked-icon.png' )
#base_dir = '/usr/share/icons/Bluecurve/24x24/'
#self.icon_contents = QtGui.QIcon(base_dir + 'actions/contents.png')
#self.icon_mail_forward = QtGui.QIcon(base_dir + '../../gnome/24x24/actions/mail-forward.png')
#self.icon_button_ok = QtGui.QIcon(base_dir + 'actions/button_ok.png')
#self.icon_button_cancel = QtGui.QIcon(base_dir + 'actions/button_cancel.png')
#self.icon_exit = QtGui.QIcon(base_dir + 'actions/exit.png')
#self.icon_home = QtGui.QIcon(base_dir + 'actions/gohome.png')
#self.icon_redo = QtGui.QIcon(base_dir + 'actions/redo.png')
#self.icon_undo = QtGui.QIcon(base_dir + 'actions/undo.png')
#self.icon_reload = QtGui.QIcon(base_dir + 'actions/reload.png')
#self.icon_stop = QtGui.QIcon(base_dir + 'actions/stop.png')
#self.icon_save_cfg = QtGui.QIcon(base_dir + 'actions/fileexport.png')
#self.icon_save = QtGui.QIcon(base_dir + 'stock/stock-save.png')
#self.icon_edit = QtGui.QIcon(base_dir + 'actions/edit.png')
#self.icon_browser = QtGui.QIcon(base_dir + 'actions/fileopen.png')
#self.icon_monitor = QtGui.QIcon(base_dir + 'apps/icon-monitor.png')
#self.icon_unknown = QtGui.QIcon(base_dir + 'apps/icon-unknown.png')
#self.icon_logviewer = QtGui.QIcon(base_dir + '../32x32/apps/logviewer.png')
self.icon_logger = self.icon_edit
self.icon_help = self.icon_unknown
self.icon_reset = self.icon_reload
#-----------------------------
def declareAllParameters( self ) :
# Possible typs for declaration : 'str', 'int', 'long', 'float', 'bool'
# GUILogger.py
self.log_level = self.declareParameter( name='LOG_LEVEL_OF_MSGS', val_def='info', type='str' )
self.log_file = self.declareParameter( name='LOG_FILE_FOR_LEVEL', val_def='./log_for_level.txt', type='str' )
#self.log_file_total = self.declareParameter( name='LOG_FILE_TOTAL', val_def='./log_total.txt', type='str' )
self.save_log_at_exit = self.declareParameter( name='SAVE_LOG_AT_EXIT', val_def=True, type='bool')
# GUIMain.py (10, 25, 800, 700)
self.main_win_width = self.declareParameter( name='MAIN_WIN_WIDTH', val_def=800, type='int' )
self.main_win_height = self.declareParameter( name='MAIN_WIN_HEIGHT', val_def=700, type='int' )
self.main_win_pos_x = self.declareParameter( name='MAIN_WIN_POS_X', val_def=5, type='int' )
self.main_win_pos_y = self.declareParameter( name='MAIN_WIN_POS_Y', val_def=5, type='int' )
# GUIInsExpDirDet.py
self.instr_dir = self.declareParameter( name='INSTRUMENT_DIR', val_def='/reg/d/psdm', type='str' )
self.instr_name = self.declareParameter( name='INSTRUMENT_NAME', val_def='Select', type='str' ) # 'CXI'
self.exp_name = self.declareParameter( name='EXPERIMENT_NAME', val_def='Select', type='str' ) # 'cxitut13'
self.det_but_title = self.declareParameter( name='DETECTOR_BUT_TITLE',val_def='Select', type='str' ) # 'Select' or 'Selected:N'
self.det_name = self.declareParameter( name='DETECTOR_NAMES', val_def='', type='str' ) # 'CSPAD'
self.calib_dir = self.declareParameter( name='CALIB_DIRECTORY', val_def='Select', type='str' ) # '/reg/d/psdm/CXI/cxitut13/calib'
# GUIExpCalibDet.py
self.calib_dir_src = self.declareParameter( name='CALIB_DIRECTORY_SRC', val_def='Select', type='str' ) # '/reg/d/psdm/CXI/cxitut13/calib'
self.exp_name_src = self.declareParameter( name='EXPERIMENT_NAME_SRC', val_def='Select', type='str' ) # 'cxitut13'
# FileDeployer.py
self.fname_history = self.declareParameter( name='HISTORY_FILE_NAME', val_def='HISTORY', type='str' )
# GUITabs.py
self.current_tab = self.declareParameter( name='CURRENT_TAB' , val_def='Status', type='str' )
# GUIConfig.py
self.current_config_tab = self.declareParameter( name='CURRENT_CONFIG_TAB', val_def='Configuration File', type='str' )
# GUIFileManager.py
self.current_fmanager_tab = self.declareParameter( name='CURRENT_FILE_MANAGER_TAB', val_def='Single File', type='str' )
# GUIMainSplit.py
####self.fname_cp = self.declareParameter( name='FNAME_CONFIG_PARS', val=fname, val_def='confpars.txt', type='str' )
# GUIConfigPars.py
self.dir_work = self.declareParameter( name='DIRECTORY_WORK', val_def='./work', type='str' )
self.dir_results = self.declareParameter( name='DIRECTORY_RESULTS', val_def='./results', type='str' )
self.fname_prefix = self.declareParameter( name='FILE_NAME_PREFIX', val_def='clb-', type='str' )
self.save_cp_at_exit = self.declareParameter( name='SAVE_CONFIG_AT_EXIT', val_def=True, type='bool')
# GUIGeometry.py
self.current_geometry_tab = self.declareParameter( name='CURRENT_GEOMETRY_TAB', val_def='Metrology', type='str' )
self.fname_metrology_xlsx = self.declareParameter( name='FNAME_METROLOGY_XLSX', val_def='*.xlsx', type='str' )
self.fname_metrology_text = self.declareParameter( name='FNAME_METROLOGY_TEXT', val_def='metrology.txt', type='str' )
# GUIDark.py
self.dark_more_opts = self.declareParameter( name='DARK_MORE_OPTIONS', val_def=True, type='bool')
# GUIDarkRunGo.py
self.str_run_number = self.declareParameter( name='STRING_RUN_NUMBER', val_def='None', type='str' )
self.str_run_from = self.declareParameter( name='STRING_RUN_FROM', val_def='0000', type='str' )
self.str_run_to = self.declareParameter( name='STRING_RUN_TO', val_def='end', type='str' )
# GUIDarkControlBar.py
self.dark_list_show_runs = self.declareParameter( name='DARK_LIST_SHOW_RUNS', val_def=self.list_of_show_runs[0], type='str' )
self.dark_list_show_dets = self.declareParameter( name='DARK_LIST_SHOW_DETS', val_def=self.list_of_show_dets[0], type='str' )
self.dark_deploy_hotpix = self.declareParameter( name='DARK_DEPLOY_HOTPIX', val_def=True, type='bool')
self.dark_list_run_min = self.declareParameter( name='DARK_LIST_RUN_MIN', val_def=1, type='int' )
self.dark_list_run_max = self.declareParameter( name='DARK_LIST_RUN_MAX', val_def=10, type='int' )
#PlotImgSpeWidget.py
self.plot_intens_min = self.declareParameter( name='PLOT_INTENSITY_MIN', val_def='', type='str' )
self.plot_intens_max = self.declareParameter( name='PLOT_INTENSITY_MAX', val_def='', type='str' )
# GUIGrabSubmitELog.py
#self.cbx_more_options = self.declareParameter( name='CBX_SHOW_MORE_OPTIONS', val_def=False, type='bool' )
#self.img_infname = self.declareParameter( name='IMG_INPUT_FNAME', val_def='./img-1.ppm', type='str' )
#self.img_oufname = self.declareParameter( name='IMG_OUTPUT_FNAME', val_def='./img-1.ppm', type='str' )
#self.elog_post_des = self.declareParameter( name='ELOG_POST_DESCRIPTION', val_def='Image', type='str' )
#self.elog_post_tag = self.declareParameter( name='ELOG_POST_TAG', val_def='SCREENSHOT', type='str' )
#self.elog_post_ins = self.declareParameter( name='ELOG_POST_INSTRUMENT', val_def='AMO', type='str' )
#self.elog_post_exp = self.declareParameter( name='ELOG_POST_EXPERIMENT', val_def='amodaq09', type='str' )
#self.elog_post_in2 = self.declareParameter( name='ELOG_POST_INSTRUMENT_2', val_def='NEH', type='str' )
#self.elog_post_ex2 = self.declareParameter( name='ELOG_POST_EXPERIMENT_2', val_def='CXI Instrument', type='str' )
#self.elog_post_run = self.declareParameter( name='ELOG_POST_RUN', val_def='', type='str' )
#self.elog_post_res = self.declareParameter( name='ELOG_POST_RESPONCE', val_def='', type='str' )
#self.elog_post_msg = self.declareParameter( name='ELOG_POST_MESSAGE', val_def='', type='str' )
#self.elog_post_usr = self.declareParameter( name='ELOG_POST_USER_NAME', val_def='Unknown', type='str' )
#self.elog_post_sta = self.declareParameter( name='ELOG_POST_STATION', val_def='', type='str' )
#self.elog_post_url = self.declareParameter( name='ELOG_POST_URL', val_def='', type='str' )
#self.elog_post_cmd = self.declareParameter( name='ELOG_POST_CHILD_COMMAND', val_def='', type='str' )
# GUIDark.py
#self.use_dark_xtc_all = self.declareParameter( name='USE_DARK_XTC_ALL_CHUNKS', val_def=True, type='bool' )
#self.in_dir_dark = self.declareParameter( name='IN_DIRECTORY_DARK', val_def='/reg/d/ana12/xcs/xcsi0112/xtc',type='str' )
#self.in_file_dark = self.declareParameter( name='IN_FILE_NAME_DARK', val_def='e167-r0020-s00-c00.xtc',type='str' )
#self.bat_dark_total = self.declareParameter( name='BATCH_DARK_TOTAL', val_def=-1, type='int' )
self.bat_dark_start = self.declareParameter( name='BATCH_DARK_START', val_def= 1, type='int' )
self.bat_dark_end = self.declareParameter( name='BATCH_DARK_END', val_def=1000, type='int' )
self.bat_dark_scan = self.declareParameter( name='BATCH_DARK_SCAN', val_def=10, type='int' )
self.bat_det_info = self.declareParameter( name='BATCH_DET_INFO', val_def='DetInfo(:Princeton)', type='str' )
self.bat_img_rec_mod = self.declareParameter( name='BATCH_IMG_REC_MODULE', val_def='ImgAlgos.PrincetonImageProducer', type='str' )
self.mask_rms_thr = self.declareParameter( name='MASK_HOT_PIX_ADU_THR_RMS', val_def= 0, type='float' )
self.mask_min_thr = self.declareParameter( name='MASK_HOT_PIX_ADU_THR_MIN', val_def= 2, type='float' )
self.mask_max_thr = self.declareParameter( name='MASK_HOT_PIX_ADU_THR_MAX', val_def=65000, type='float' )
self.mask_hot_is_used = self.declareParameter( name='MASK_HOT_PIX_IS_USED', val_def= True, type='bool' )
# For batch jobs
self.bat_queue = self.declareParameter( name='BATCH_QUEUE', val_def='psanacsq', type='str' )
self.bat_submit_interval_sec = self.declareParameter( name='BATCH_SUBMIT_INTERVAL_SEC', val_def=30, type='int' )
# GUIMaskEditor.py
self.path_mask_img = self.declareParameter( name='PATH_TO_MASK_IMAGE', val_def='./work/*.txt', type='str' )
# GUIFileManagerSingleControl.py
#self.path_fm_selected = self.declareParameter( name='PATH_FM_SELECTED', val_def='./work/*.txt', type='str' )
# CommandLineCalib.py
self.xtc_dir_non_std = self.declareParameter( name='XTC_DIR_NON_STD', val_def='', type='str' ) # './my/xtc'
#-----------------------------
self.list_of_dets = ['CSPAD', 'CSPAD2x2', 'Princeton', 'pnCCD', 'Tm6740', \
'Opal1000', 'Opal2000', 'Opal4000', 'Opal8000', \
'OrcaFl40', 'Epix', 'Epix10k', 'Epix100a', 'Fccd960', 'Acqiris']
self.list_of_dets_lower = [det.lower() for det in self.list_of_dets]
self.list_of_data_types = ['CsPad::DataV',
'CsPad2x2::ElementV',
'Princeton::FrameV',
'PNCCD::FullFrameV',
'Camera::FrameV',
'Camera::FrameV',
'Camera::FrameV',
'Camera::FrameV',
'Camera::FrameV',
'Camera::FrameV',
'Epix::ElementV',
'Epix::ElementV',
'Epix::ElementV',
'Camera::FrameV',
'Acqiris::DataDesc']
self.dict_of_det_data_types = dict( zip(self.list_of_dets, self.list_of_data_types) )
#self.print_dict_of_det_data_types()
self.list_of_calib_types = ['CsPad::CalibV1',
'CsPad2x2::CalibV1',
'Princeton::CalibV1',
'PNCCD::CalibV1',
'Camera::CalibV1',
'Camera::CalibV1',
'Camera::CalibV1',
'Camera::CalibV1',
'Camera::CalibV1',
'Camera::CalibV1',
'Epix::CalibV1',
'Epix10k::CalibV1',
'Epix100a::CalibV1',
'Fccd960::CalibV1',
'Acqiris::CalibV1']
self.dict_of_det_calib_types = dict( zip(self.list_of_dets, self.list_of_calib_types) )
#self.print_dict_of_det_calib_types()
#-----------------------------
det_cbx_states = [ (False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool'), \
(False, False ,'bool') ]
self.det_cbx_states_list = self.declareListOfPars( 'DETECTOR_CBX_STATE', det_cbx_states )
#-----------------------------
self.const_types_cspad = [
'center'
,'center_global'
,'offset'
,'offset_corr'
,'marg_gap_shift'
,'quad_rotation'
,'quad_tilt'
,'rotation'
,'tilt'
,'pedestals'
,'pixel_status'
,'common_mode'
,'filter'
,'pixel_gain'
,'pixel_rms'
,'geometry'
#,'beam_vector'
#,'beam_intersect'
]
self.const_types_cspad2x2 = [
'center'
,'tilt'
,'pedestals'
,'pixel_status'
,'common_mode'
,'filter'
,'pixel_gain'
,'pixel_rms'
]
self.const_types_princeton = [
'pedestals'
,'pixel_status'
,'pixel_gain'
,'pixel_rms'
,'common_mode'
]
self.const_types_pnccd = [
'pedestals'
,'pixel_status'
,'pixel_gain'
,'pixel_rms'
,'common_mode'
]
self.const_types_camera = [
'pedestals'
,'pixel_status'
,'pixel_gain'
,'pixel_rms'
,'common_mode'
]
self.const_types_orcafl40 = [
'pedestals'
,'pixel_status'
,'pixel_gain'
,'pixel_rms'
,'common_mode'
]
self.const_types_epix = [
'pedestals'
,'pixel_status'
,'pixel_gain'
,'pixel_rms'
,'common_mode'
]
self.const_types_fccd960 = [
'pedestals'
,'pixel_status'
,'pixel_gain'
,'pixel_rms'
,'common_mode'
]
self.const_types_acqiris = [
'pedestals'
]
self.dict_of_det_const_types = dict( zip(self.list_of_dets, [ self.const_types_cspad
,self.const_types_cspad2x2
,self.const_types_princeton
,self.const_types_pnccd
,self.const_types_camera
,self.const_types_camera
,self.const_types_camera
,self.const_types_camera
,self.const_types_camera
,self.const_types_orcafl40
,self.const_types_epix
,self.const_types_epix
,self.const_types_epix
,self.const_types_fccd960
,self.const_types_acqiris
]) )
self.srcs_cspad = [
'CxiDs1.0:Cspad.0'
,'CxiDs2.0:Cspad.0'
,'CxiDsd.0:Cspad.0'
,'MecTargetChamber.0:Cspad.0'
,'XcsEndstation.0:Cspad.0'
,'XppGon.0:Cspad.0'
]
self.srcs_cspad2x2 = [
'CxiDg2.0:Cspad2x2.0'
,'CxiDg2.0:Cspad2x2.1'
,'CxiSc1.0:Cspad2x2.0'
,'CxiSc2.0:Cspad2x2.0'
,'CxiSc2.0:Cspad2x2.1'
,'CxiSc2.0:Cspad2x2.2'
,'CxiSc2.0:Cspad2x2.3'
,'CxiSc2.0:Cspad2x2.4'
,'CxiSc2.0:Cspad2x2.5'
,'CxiSc2.0:Cspad2x2.6'
,'CxiSc2.0:Cspad2x2.7'
,'MecEndstation.0:Cspad2x2.6'
,'MecTargetChamber.0:Cspad2x2.0'
,'MecTargetChamber.0:Cspad2x2.1'
,'MecTargetChamber.0:Cspad2x2.2'
,'MecTargetChamber.0:Cspad2x2.3'
,'MecTargetChamber.0:Cspad2x2.4'
,'MecTargetChamber.0:Cspad2x2.5'
,'SxrBeamline.0:Cspad2x2.2'
,'SxrBeamline.0:Cspad2x2.3'
,'XcsEndstation.0:Cspad2x2.0'
,'XcsEndstation.0:Cspad2x2.1'
,'XppGon.0:Cspad2x2.0'
,'XppGon.0:Cspad2x2.1'
,'XppGon.0:Cspad2x2.2'
,'XppGon.0:Cspad2x2.3'
]
self.srcs_princeton = [
'CxiEndstation.0:Princeton.0'
,'MecTargetChamber.0:Princeton.0'
,'MecTargetChamber.0:Princeton.1'
,'MecTargetChamber.0:Princeton.2'
,'MecTargetChamber.0:Princeton.3'
,'MecTargetChamber.0:Princeton.4'
,'MecTargetChamber.0:Princeton.5'
,'SxrEndstation.0:Princeton.0'
,'XcsBeamline.0:Princeton.0'
]
self.srcs_pnccd = [
'Camp.0:pnCCD.0'
,'Camp.0:pnCCD.1'
,'SxrEndstation.0:pnCCD.0'
,'XcsEndstation.0:pnCCD.0'
]
self.srcs_tm6740 = [
'CxiDg1.0:Tm6740.0'
,'CxiDg2.0:Tm6740.0'
,'CxiDg4.0:Tm6740.0'
,'CxiDsd.0:Tm6740.0'
,'CxiDsu.0:Tm6740.0'
,'CxiKb1.0:Tm6740.0'
,'CxiSc1.0:Tm6740.0'
,'CxiSc2.0:Tm6740.0'
,'CxiSc2.0:Tm6740.1'
,'XcsBeamline.1:Tm6740.4'
,'XcsBeamline.1:Tm6740.5'
,'XppEndstation.1:Tm6740.1'
,'XppMonPim.1:Tm6740.1'
,'XppSb3Pim.1:Tm6740.1'
,'XppSb4Pim.1:Tm6740.1'
]
self.srcs_opal1000 = [
'AmoBPS.0:Opal1000.0'
,'AmoBPS.0:Opal1000.1'
,'AmoEndstation.0:Opal1000.0'
,'AmoEndstation.1:Opal1000.0'
,'AmoEndstation.2:Opal1000.0'
,'AmoVMI.0:Opal1000.0'
,'CxiDg3.0:Opal1000.0'
,'CxiEndstation.0:Opal1000.1'
,'CxiEndstation.0:Opal1000.2'
,'MecTargetChamber.0:Opal1000.1'
,'SxrBeamline.0:Opal1000.0'
,'SxrBeamline.0:Opal1000.1'
,'SxrBeamline.0:Opal1000.100'
,'SxrEndstation.0:Opal1000.0'
,'SxrEndstation.0:Opal1000.1'
,'SxrEndstation.0:Opal1000.2'
,'SxrEndstation.0:Opal1000.3'
,'XcsEndstation.0:Opal1000.0'
,'XcsEndstation.0:Opal1000.1'
,'XcsEndstation.1:Opal1000.1'
,'XcsEndstation.1:Opal1000.2'
,'XppEndstation.0:Opal1000.0'
,'XppEndstation.0:Opal1000.1'
,'XppEndstation.0:Opal1000.2'
]
self.srcs_opal2000 = [
'CxiEndstation.0:Opal2000.1'
,'CxiEndstation.0:Opal2000.2'
,'CxiEndstation.0:Opal2000.3'
,'MecTargetChamber.0:Opal2000.0'
,'MecTargetChamber.0:Opal2000.1'
,'MecTargetChamber.0:Opal2000.2'
]
self.srcs_opal4000 = [
'CxiEndstation.0:Opal4000.1'
,'CxiEndstation.0:Opal4000.3'
,'MecTargetChamber.0:Opal4000.0'
,'MecTargetChamber.0:Opal4000.1'
]
self.srcs_opal8000 = [
'MecTargetChamber.0:Opal8000.0'
,'MecTargetChamber.0:Opal8000.1'
]
self.srcs_orcafl40 = [
'XcsEndstation.0:OrcaFl40.0'
,'XppEndstation.0:OrcaFl40.0'
]
self.srcs_epix = [
'NoDetector.0:Epix.0'
,'XcsEndstation.0:Epix.0'
,'XcsEndstation.0:Epix.1'
]
self.srcs_epix10k = [
'NoDetector.0:Epix10k.0'
]
self.srcs_epix100a = [
'NoDetector.0:Epix100a.0'
]
self.srcs_fccd960 = [
'XcsEndstation.0:Fccd960.0'
]
self.srcs_acqiris = [
'AmoETOF.0:Acqiris.0'
,'AmoITOF.0:Acqiris.0'
,'Camp.0:Acqiris.0'
,'CxiEndstation.0:Acqiris.0'
,'CxiSc1.0:Acqiris.0'
,'MecTargetChamber.0:Acqiris.0'
,'SxrEndstation.0:Acqiris.0'
,'SxrEndstation.0:Acqiris.1'
,'SxrEndstation.0:Acqiris.2'
,'SxrEndstation.0:Acqiris.3'
,'SxrEndstation.0:Acqiris.4'
,'XcsBeamline.0:Acqiris.0'
,'XppLas.0:Acqiris.0'
]
self.dict_of_det_sources = dict( zip(self.list_of_dets, [ self.srcs_cspad
,self.srcs_cspad2x2
,self.srcs_princeton
,self.srcs_pnccd
,self.srcs_tm6740
,self.srcs_opal1000
,self.srcs_opal2000
,self.srcs_opal4000
,self.srcs_opal8000
,self.srcs_orcafl40
,self.srcs_epix
,self.srcs_epix10k
,self.srcs_epix100a
,self.srcs_fccd960
,self.srcs_acqiris
]) )
self.dict_of_metrology_scripts = dict( zip(self.list_of_dets, [ ['CSPADV1', 'CSPADV2']
,['CSPAD2X2V1']
,['PRINCETONV1']
,['PNCCDV1']
,['TM6740V1']
,['OPAL1000V1']
,['OPAL2000V1']
,['OPAL4000V1']
,['OPAL8000V1']
,['ORCAFL40V1']
,['EPIXV1']
,['EPIX10KV1']
,['EPIX100AV1']
,['FCCD960V1']
,['ACQIRISV1']
]) )
#-----------------------------
self.list_of_det_pars = zip(self.list_of_dets, self.list_of_data_types, self.det_cbx_states_list)
#-----------------------------
def list_of_dets_selected( self ) :
return [det for det,state in zip(self.list_of_dets,self.det_cbx_states_list) if state.value()]
#-----------------------------
def print_dict_of_det_data_types ( self ) :
print 'List of detector names and associated types:'
for det, type in self.dict_of_det_data_types.items():
print '%10s : %s' % (det, type)
def print_dict_of_det_calib_types ( self ) :
print 'List of detector names and associated calibration types:'
for det, type in self.dict_of_det_calib_types.items():
print '%10s : %s' % (det, type)
#-----------------------------
def defineStyles( self ) :
self.styleYellowish = "background-color: rgb(255, 255, 220); color: rgb(0, 0, 0);" # Yellowish
self.stylePink = "background-color: rgb(255, 200, 220); color: rgb(0, 0, 0);" # Pinkish
self.styleYellowBkg = "background-color: rgb(240, 240, 100); color: rgb(0, 0, 0);" # YellowBkg
self.styleGreenMy = "background-color: rgb(150, 250, 230); color: rgb(0, 0, 0);" # My
self.styleGray = "background-color: rgb(230, 240, 230); color: rgb(0, 0, 0);" # Gray
self.styleGreenish = "background-color: rgb(100, 240, 200); color: rgb(0, 0, 0);" # Greenish
self.styleGreenPure = "background-color: rgb(150, 255, 150); color: rgb(0, 0, 0);" # Green
self.styleBluish = "background-color: rgb(220, 220, 250); color: rgb(0, 0, 0);" # Bluish
self.styleWhite = "background-color: rgb(255, 255, 255); color: rgb(0, 0, 0);"
self.styleRedBkgd = "background-color: rgb(255, 0, 0); color: rgb(0, 0, 0);" # Red background
self.styleReddish = "background-color: rgb(220, 0, 0); color: rgb(0, 0, 0);" # Reddish background
self.styleTransp = "background-color: rgb(255, 0, 0, 100);"
#self.styleDefault = "background-color: rgb(239, 235, 231, 255); color: rgb(0, 0, 0);" # Gray bkgd
self.styleDefault = ""
#self.styleTitle = "color: rgb(150, 160, 100);"
self.styleBlue = "color: rgb(100, 0, 150);"
self.styleBuriy = "color: rgb(150, 100, 50);"
self.styleRed = "color: rgb(255, 0, 0);"
self.styleGreen = "color: rgb(0, 150, 0);"
self.styleYellow = "color: rgb(0, 150, 150);"
#self.styleBkgd = self.styleGreenMy # styleYellowish
self.styleBkgd = self.styleDefault
self.styleTitle = self.styleBuriy
self.styleLabel = self.styleBlue
self.styleEdit = self.styleWhite
self.styleEditInfo = self.styleBkgd # self.styleGreenish
#self.styleEditInfo = self.styleGreenish # Bluish
self.styleEditBad = self.styleRedBkgd
self.styleButton = self.styleGray
self.styleButtonLeft = self.styleButton + 'text-align: left;'
self.styleButtonOn = self.styleBluish
self.styleButtonClose = self.stylePink
self.styleButtonWarning= self.styleYellowBkg
self.styleButtonGood = self.styleGreenPure
#self.styleButtonBad = self.stylePink
self.styleButtonBad = self.styleReddish
self.styleBox = self.styleGray
self.styleCBox = self.styleYellowish
self.styleStatusGood = self.styleGreen
self.styleStatusWarning= self.styleYellow
self.styleStatusAlarm = self.styleRed
self.styleTitleBold = self.styleTitle + 'font-size: 18pt; font-family: Courier; font-weight: bold;'
self.styleWhiteFixed = self.styleWhite + 'font-family: Fixed;'
self.colorEditInfo = QtGui.QColor(100, 255, 200)
self.colorEditBad = QtGui.QColor(255, 0, 0)
self.colorEdit = QtGui.QColor('white')
self.colorTabItem = QtGui.QColor('white')
self.styleTitleInFrame = self.styleWhite # self.styleDefault # self.styleWhite # self.styleGray
def printParsDirectly( self ) :
logger.info('Direct use of parameter:' + self.fname_ped.name() + ' ' + self.fname_ped.value(), self.name )
logger.info('Direct use of parameter:' + self.fname_dat.name() + ' ' + self.fname_dat.value(), self.name )
def close( self ) :
if self.save_cp_at_exit.value() :
fname = self.fname_cp
logger.info('save configuration parameters in file: %s' % fname, __name__)
self.saveParametersInFile( fname )
#-----------------------------
confpars = ConfigParametersForApp ()
cp = confpars
#-----------------------------
def test_ConfigParametersForApp() :
confpars.printParameters()
#confpars.printParsDirectly()
confpars.saveParametersInFile()
#-----------------------------
if __name__ == "__main__" :
test_ConfigParametersForApp()
sys.exit (0)
#-----------------------------
| [
"[email protected]@b967ad99-d558-0410-b138-e0f6c56caec7"
]
| [email protected]@b967ad99-d558-0410-b138-e0f6c56caec7 |
ffaa5f02e39b29398daa68fa9fb34f9b4ddb956e | 9b11e49cbb9120f3f7e69a8884c0cee42896566d | /Hyperbola_search_part1.py | 7c39171ec6408d181e5a721ab55f1b089259ad66 | []
| no_license | albusdemens/Backscattering_3DND | e6966cd8c39342181183de20028b959227a5f570 | f911f1f9f9bf863daffe2bcc85bd7d7a94e5b3c7 | refs/heads/master | 2021-01-21T13:53:17.469879 | 2016-06-27T10:51:45 | 2016-06-27T10:51:45 | 44,485,363 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,527 | py | # Alberto Cereser, 4 Feb 2014
# [email protected], Technical University of Denmark
# For each point, this script looks for its successor
# For the location of the cutouts, the code follows the approach described in
# http://scikit-image.org/docs/dev/auto_examples/plot_template.html
import sys
#import pandas as pd
import numpy as np
from numpy import ndarray
import math
import matplotlib.pyplot as plt
from matplotlib.pyplot import *
from skimage import data
from skimage.feature import match_template
import os
if __name__ == '__main__':
# I start reading in the list of diffraction spots data made using Peak_analyzer_simple_neighbours.py
input_filename = "Fe_PSI_spt_refined.txt"
input_file = open(input_filename, 'r')
# Here are the variables to change, depending on the number of projections considered
# and on the lines of the txt file (Fe_PSI_spt_refined.txt)
Number_of_projections = 181
Number_of_lines_in_txt = 3493
counter_array = []
correlation_threshold_value = 0.7
# I make a square mask, so to ignore the central black region
X_mask = 1135
Y_mask = 1251
output_filename = ("/Users/Alberto/Documents/Data_analysis/ICON_Aug2013/Data_analysis/Python_code/Fe_PSI_spt_tagged_15apr.txt")
for i in range(2, (Number_of_projections + 1)):
cmd = 'python Hyperbola_search_part2.py %i %s %s %i %i %f' % (i, input_filename, output_filename, Number_of_projections, Number_of_lines_in_txt, correlation_threshold_value)
os.system(cmd)
# filename_cutouts_combined = ("/Users/Alberto/Documents/Data_analysis/ICON_Aug2013/Data_analysis/cutouts_combined/cutouts_combined_%03i.txt" % (i))
# filename_cutouts_combined_tag = ("/Users/Alberto/Documents/Data_analysis/ICON_Aug2013/Data_analysis/cutouts_combined/cutouts_combined_tag_%03i.txt" % (i))
# image = np.loadtxt(filename_cutouts_combined)
# image_tagged = np.loadtxt(filename_cutouts_combined_tag)
# for line in input_file:
# line_elements = line.split()
# Angle_number = int(line_elements[0])
# Omega = float(line_elements[1])
# Intensity = float(line_elements[2])
# X = float(line_elements[3])
# Y = float(line_elements[4])
# Address = str(line_elements[5])
# ID = int(line_elements[6])
# print i, Angle_number
#if ((Angle_number + 1) == i): # we map cutouts in the following
#cutout = np.loadtxt(Address)
#index = Angle_number #+ 1
#array_cutout = np.array(cutout)
#array_image = np.array(image)
#correlation = match_template(image, cutout)
#ij = np.unravel_index(np.argmax(correlation), correlation.shape)
#x, y = ij[::-1]
#ran = array_cutout.shape
#ran_y = ran[1]
#ran_x = ran[0]
#x_center = x + (ran_x/2)
#y_center = y + (ran_y/2)
#print x_center, y_center
# To do: insert case when spot in central square
# We now calculate the distance between the cutout center (in Omega) and the point we found
# (in Omega + 1)
#distance = math.sqrt((x_center - X)**2 + (y_center - Y)**2)
#print i
#if distance < 200: # We search that the two points are not too far away
# if (np.amax(correlation) > correlation_threshold_value):
# # We need now to find the cutout which is closer to the point where we located
# # The center of the cutout
# tag = image_tagged[y_center, x_center]
# else:
# tag = 0
# print distance, tag
#f = open(output_file, "a+")
#f.write("%i %f %f %f %f %s %f %i %i\n" % (Angle_number, Omega, Intensity, X, Y, Address, np.amax(correlation), ID, int(tag)))
#f.close
input_file.close() | [
"[email protected]"
]
| |
1568ebac3d05d96cad143aed5faa28b55aed2fbf | 6b6e20004b46165595f35b5789e7426d5289ea48 | /data/archivedlogs.py | 0172c74c8c3e951453a1aadc5a5a85a31982dc97 | [
"Apache-2.0"
]
| permissive | anwarchk/quay | 2a83d0ab65aff6a1120fbf3a45dd72f42211633b | 23c5120790c619174e7d36784ca5aab7f4eece5c | refs/heads/master | 2020-09-12T18:53:21.093606 | 2019-11-15T19:29:02 | 2019-11-15T19:29:02 | 222,517,145 | 0 | 0 | Apache-2.0 | 2019-11-18T18:32:35 | 2019-11-18T18:32:35 | null | UTF-8 | Python | false | false | 1,038 | py | import logging
from util.registry.gzipinputstream import GzipInputStream
from flask import send_file, abort
from data.userfiles import DelegateUserfiles, UserfilesHandlers
JSON_MIMETYPE = 'application/json'
logger = logging.getLogger(__name__)
class LogArchive(object):
def __init__(self, app=None, distributed_storage=None):
self.app = app
if app is not None:
self.state = self.init_app(app, distributed_storage)
else:
self.state = None
def init_app(self, app, distributed_storage):
location = app.config.get('LOG_ARCHIVE_LOCATION')
path = app.config.get('LOG_ARCHIVE_PATH', None)
handler_name = 'web.logarchive'
log_archive = DelegateUserfiles(app, distributed_storage, location, path,
handler_name=handler_name)
# register extension with app
app.extensions = getattr(app, 'extensions', {})
app.extensions['log_archive'] = log_archive
return log_archive
def __getattr__(self, name):
return getattr(self.state, name, None)
| [
"[email protected]"
]
| |
5b47ee2fdfef62a928d08f196124943124e29eaf | f0c402d3858f0643561886797578b1e64655b1b3 | /py/riscv/exception_handlers/EnvironmentCallHandler.py | f7d7320626d9fcd3c808a7a8d10bd79f12fbd112 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | Leo-Wang-JL/force-riscv | 39ad2a72abd814df4b63879ce9825b6b06a9391a | deee6acaaee092eb90ac2538de122303334e5be3 | refs/heads/master | 2023-01-28T00:06:58.135651 | 2020-11-18T02:54:10 | 2020-11-18T02:54:10 | 271,873,013 | 0 | 0 | NOASSERTION | 2020-06-28T00:51:26 | 2020-06-12T19:15:26 | C++ | UTF-8 | Python | false | false | 7,073 | py | #
# Copyright (C) [2020] Futurewei Technologies, Inc.
#
# FORCE-RISCV is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR
# FIT FOR A PARTICULAR PURPOSE.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from base.exception_handlers.ReusableSequence import ReusableSequence
from riscv.PrivilegeLevel import PrivilegeLevelRISCV
from riscv.exception_handlers.ExceptionHandlerContext import RegisterCallRole
class EnvironmentCallHandlerRISCV(ReusableSequence):
def __init__(self, aGenThread, aFactory, aStack):
super().__init__(aGenThread, aFactory, aStack)
self.mDataBlockAddrRegIndex = None
self.mActionCodeRegIndex = None
def generateHandler(self, **kwargs):
try:
handler_context = kwargs['handler_context']
except KeyError:
self.error('INTERNAL ERROR: one or more arguments to EnvironmentCallHandlerRISCV generate method missing.')
self.debug('[EnvironmentCallHandlerRISCV] generate handler address: 0x%x' % self.getPEstate('PC'))
self.mAssemblyHelper.clearLabels('EnvironmentCallHandlerRISCV')
(_, self.mActionCodeRegIndex) = handler_context.getScratchRegisterIndices(RegisterCallRole.ARGUMENT, 2)
priv_level_reg_index = handler_context.getScratchRegisterIndices(RegisterCallRole.PRIV_LEVEL_VALUE)
scratch_reg_index = handler_context.getScratchRegisterIndices(RegisterCallRole.TEMPORARY, 1)
# Action Code 1: Return to S Mode
self.mAssemblyHelper.genMoveImmediate(scratch_reg_index, 1)
self.mAssemblyHelper.genConditionalBranchToLabel(self.mActionCodeRegIndex, scratch_reg_index, 8, 'EQ', 'RETURN_TO_S_MODE')
# Action Code 2: Load From Data Block
self.mAssemblyHelper.genMoveImmediate(scratch_reg_index, 2)
self.mAssemblyHelper.genConditionalBranchToLabel(self.mActionCodeRegIndex, scratch_reg_index, 48, 'EQ', 'LOAD_FROM_DATA_BLOCK')
# All other action codes: Skip instruction and return
self.mAssemblyHelper.genRelativeBranchToLabel(78, 'SKIP_INSTRUCTION')
self.mAssemblyHelper.addLabel('RETURN_TO_S_MODE')
self._genReturnToSMode(handler_context)
self.mAssemblyHelper.addLabel('LOAD_FROM_DATA_BLOCK')
self._genLoadRegistersFromDataBlock(handler_context)
self.mAssemblyHelper.addLabel('SKIP_INSTRUCTION')
self.mAssemblyHelper.genIncrementExceptionReturnAddress(scratch_reg_index, priv_level_reg_index)
self.mAssemblyHelper.addLabel('RETURN')
self.mAssemblyHelper.genReturn()
## Generate instructions to return to S Mode using the first data block entry as the return
# address.
#
# @param aHandlerContext The exception handler context from which register indices can be
# retrieved by role.
def _genReturnToSMode(self, aHandlerContext):
(self.mDataBlockAddrRegIndex, _) = aHandlerContext.getScratchRegisterIndices(RegisterCallRole.ARGUMENT, 2)
priv_level_reg_index = aHandlerContext.getScratchRegisterIndices(RegisterCallRole.PRIV_LEVEL_VALUE)
(scratch_reg_index, xstatus_reg_index, inverse_mask_reg_index) = aHandlerContext.getScratchRegisterIndices(RegisterCallRole.TEMPORARY, 3)
for priv_level in self.mAssemblyHelper.genPrivilegeLevelInstructions(aPrivLevels=tuple(PrivilegeLevelRISCV)[1:], aInstrCountPerLevel=9, aScratchRegIndex=scratch_reg_index, aPrivLevelRegIndex=priv_level_reg_index):
self.mAssemblyHelper.genReadSystemRegister(xstatus_reg_index, ('%sstatus' % priv_level.name.lower()))
self.mAssemblyHelper.genMoveImmediate(scratch_reg_index, 1)
if priv_level == PrivilegeLevelRISCV.S:
self.mAssemblyHelper.genShiftLeftImmediate(scratch_reg_index, 8)
elif priv_level == PrivilegeLevelRISCV.M:
self.mAssemblyHelper.genShiftLeftImmediate(scratch_reg_index, 11)
self.mAssemblyHelper.genNotRegister(inverse_mask_reg_index, aSrcRegIndex=scratch_reg_index)
self.mAssemblyHelper.genAndRegister(xstatus_reg_index, inverse_mask_reg_index)
self.mAssemblyHelper.genOrRegister(xstatus_reg_index, scratch_reg_index)
self.mAssemblyHelper.genWriteSystemRegister(('%sstatus' % priv_level.name.lower()), xstatus_reg_index)
self.genInstruction('LD##RISCV', {'rd': scratch_reg_index, 'rs1': self.mDataBlockAddrRegIndex, 'simm12': 0, 'NoRestriction': 1})
self.mAssemblyHelper.genWriteSystemRegister(('%sepc' % priv_level.name.lower()), scratch_reg_index)
self.mAssemblyHelper.genRelativeBranchToLabel(52, 'RETURN')
## Generate instructions to load CSRs using values from the data block.
#
# @param aHandlerContext The exception handler context from which register indices can be
# retrieved by role.
def _genLoadRegistersFromDataBlock(self, aHandlerContext):
# The data block should hold values for the following sequence of registers: xstatus, xepc,
# satp, action code register, data block address register
(self.mDataBlockAddrRegIndex, _) = aHandlerContext.getScratchRegisterIndices(RegisterCallRole.ARGUMENT, 2)
priv_level_reg_index = aHandlerContext.getScratchRegisterIndices(RegisterCallRole.PRIV_LEVEL_VALUE)
scratch_reg_index = aHandlerContext.getScratchRegisterIndices(RegisterCallRole.TEMPORARY, 1)
for priv_level in self.mAssemblyHelper.genPrivilegeLevelInstructions(aPrivLevels=tuple(PrivilegeLevelRISCV)[1:], aInstrCountPerLevel=4, aScratchRegIndex=scratch_reg_index, aPrivLevelRegIndex=priv_level_reg_index):
self.genInstruction('LD##RISCV', {'rd': scratch_reg_index, 'rs1': self.mDataBlockAddrRegIndex, 'simm12': 0, 'NoRestriction': 1})
self.mAssemblyHelper.genWriteSystemRegister(('%sstatus' % priv_level.name.lower()), scratch_reg_index)
self.genInstruction('LD##RISCV', {'rd': scratch_reg_index, 'rs1': self.mDataBlockAddrRegIndex, 'simm12': 8, 'NoRestriction': 1})
self.mAssemblyHelper.genWriteSystemRegister(('%sepc' % priv_level.name.lower()), scratch_reg_index)
self.genInstruction('LD##RISCV', {'rd': scratch_reg_index, 'rs1': self.mDataBlockAddrRegIndex, 'simm12': 16, 'NoRestriction': 1})
self.mAssemblyHelper.genWriteSystemRegister('satp', scratch_reg_index)
self.genInstruction('LD##RISCV', {'rd': self.mActionCodeRegIndex, 'rs1': self.mDataBlockAddrRegIndex, 'simm12': 24, 'NoRestriction': 1})
self.genInstruction('LD##RISCV', {'rd': self.mDataBlockAddrRegIndex, 'rs1': self.mDataBlockAddrRegIndex, 'simm12': 32, 'NoRestriction': 1})
self.mAssemblyHelper.genRelativeBranchToLabel(20, 'RETURN')
| [
"[email protected]"
]
| |
c67f1cf56af12a5f1b00dfeda2daebc7d6a86dd6 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/network/get_express_route_circuit_peering.py | 325f529ed85bffafc77adce6623c016101deda54 | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,085 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
__all__ = [
'GetExpressRouteCircuitPeeringResult',
'AwaitableGetExpressRouteCircuitPeeringResult',
'get_express_route_circuit_peering',
'get_express_route_circuit_peering_output',
]
@pulumi.output_type
class GetExpressRouteCircuitPeeringResult:
"""
Peering in an ExpressRouteCircuit resource.
"""
def __init__(__self__, azure_asn=None, connections=None, etag=None, express_route_connection=None, gateway_manager_etag=None, id=None, ipv6_peering_config=None, last_modified_by=None, microsoft_peering_config=None, name=None, peer_asn=None, peered_connections=None, peering_type=None, primary_azure_port=None, primary_peer_address_prefix=None, provisioning_state=None, route_filter=None, secondary_azure_port=None, secondary_peer_address_prefix=None, shared_key=None, state=None, stats=None, type=None, vlan_id=None):
if azure_asn and not isinstance(azure_asn, int):
raise TypeError("Expected argument 'azure_asn' to be a int")
pulumi.set(__self__, "azure_asn", azure_asn)
if connections and not isinstance(connections, list):
raise TypeError("Expected argument 'connections' to be a list")
pulumi.set(__self__, "connections", connections)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if express_route_connection and not isinstance(express_route_connection, dict):
raise TypeError("Expected argument 'express_route_connection' to be a dict")
pulumi.set(__self__, "express_route_connection", express_route_connection)
if gateway_manager_etag and not isinstance(gateway_manager_etag, str):
raise TypeError("Expected argument 'gateway_manager_etag' to be a str")
pulumi.set(__self__, "gateway_manager_etag", gateway_manager_etag)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if ipv6_peering_config and not isinstance(ipv6_peering_config, dict):
raise TypeError("Expected argument 'ipv6_peering_config' to be a dict")
pulumi.set(__self__, "ipv6_peering_config", ipv6_peering_config)
if last_modified_by and not isinstance(last_modified_by, str):
raise TypeError("Expected argument 'last_modified_by' to be a str")
pulumi.set(__self__, "last_modified_by", last_modified_by)
if microsoft_peering_config and not isinstance(microsoft_peering_config, dict):
raise TypeError("Expected argument 'microsoft_peering_config' to be a dict")
pulumi.set(__self__, "microsoft_peering_config", microsoft_peering_config)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if peer_asn and not isinstance(peer_asn, float):
raise TypeError("Expected argument 'peer_asn' to be a float")
pulumi.set(__self__, "peer_asn", peer_asn)
if peered_connections and not isinstance(peered_connections, list):
raise TypeError("Expected argument 'peered_connections' to be a list")
pulumi.set(__self__, "peered_connections", peered_connections)
if peering_type and not isinstance(peering_type, str):
raise TypeError("Expected argument 'peering_type' to be a str")
pulumi.set(__self__, "peering_type", peering_type)
if primary_azure_port and not isinstance(primary_azure_port, str):
raise TypeError("Expected argument 'primary_azure_port' to be a str")
pulumi.set(__self__, "primary_azure_port", primary_azure_port)
if primary_peer_address_prefix and not isinstance(primary_peer_address_prefix, str):
raise TypeError("Expected argument 'primary_peer_address_prefix' to be a str")
pulumi.set(__self__, "primary_peer_address_prefix", primary_peer_address_prefix)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if route_filter and not isinstance(route_filter, dict):
raise TypeError("Expected argument 'route_filter' to be a dict")
pulumi.set(__self__, "route_filter", route_filter)
if secondary_azure_port and not isinstance(secondary_azure_port, str):
raise TypeError("Expected argument 'secondary_azure_port' to be a str")
pulumi.set(__self__, "secondary_azure_port", secondary_azure_port)
if secondary_peer_address_prefix and not isinstance(secondary_peer_address_prefix, str):
raise TypeError("Expected argument 'secondary_peer_address_prefix' to be a str")
pulumi.set(__self__, "secondary_peer_address_prefix", secondary_peer_address_prefix)
if shared_key and not isinstance(shared_key, str):
raise TypeError("Expected argument 'shared_key' to be a str")
pulumi.set(__self__, "shared_key", shared_key)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if stats and not isinstance(stats, dict):
raise TypeError("Expected argument 'stats' to be a dict")
pulumi.set(__self__, "stats", stats)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if vlan_id and not isinstance(vlan_id, int):
raise TypeError("Expected argument 'vlan_id' to be a int")
pulumi.set(__self__, "vlan_id", vlan_id)
@property
@pulumi.getter(name="azureASN")
def azure_asn(self) -> Optional[int]:
"""
The Azure ASN.
"""
return pulumi.get(self, "azure_asn")
@property
@pulumi.getter
def connections(self) -> Optional[Sequence['outputs.ExpressRouteCircuitConnectionResponse']]:
"""
The list of circuit connections associated with Azure Private Peering for this circuit.
"""
return pulumi.get(self, "connections")
@property
@pulumi.getter
def etag(self) -> str:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="expressRouteConnection")
def express_route_connection(self) -> Optional['outputs.ExpressRouteConnectionIdResponse']:
"""
The ExpressRoute connection.
"""
return pulumi.get(self, "express_route_connection")
@property
@pulumi.getter(name="gatewayManagerEtag")
def gateway_manager_etag(self) -> Optional[str]:
"""
The GatewayManager Etag.
"""
return pulumi.get(self, "gateway_manager_etag")
@property
@pulumi.getter
def id(self) -> Optional[str]:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="ipv6PeeringConfig")
def ipv6_peering_config(self) -> Optional['outputs.Ipv6ExpressRouteCircuitPeeringConfigResponse']:
"""
The IPv6 peering configuration.
"""
return pulumi.get(self, "ipv6_peering_config")
@property
@pulumi.getter(name="lastModifiedBy")
def last_modified_by(self) -> str:
"""
Who was the last to modify the peering.
"""
return pulumi.get(self, "last_modified_by")
@property
@pulumi.getter(name="microsoftPeeringConfig")
def microsoft_peering_config(self) -> Optional['outputs.ExpressRouteCircuitPeeringConfigResponse']:
"""
The Microsoft peering configuration.
"""
return pulumi.get(self, "microsoft_peering_config")
@property
@pulumi.getter
def name(self) -> Optional[str]:
"""
The name of the resource that is unique within a resource group. This name can be used to access the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="peerASN")
def peer_asn(self) -> Optional[float]:
"""
The peer ASN.
"""
return pulumi.get(self, "peer_asn")
@property
@pulumi.getter(name="peeredConnections")
def peered_connections(self) -> Sequence['outputs.PeerExpressRouteCircuitConnectionResponse']:
"""
The list of peered circuit connections associated with Azure Private Peering for this circuit.
"""
return pulumi.get(self, "peered_connections")
@property
@pulumi.getter(name="peeringType")
def peering_type(self) -> Optional[str]:
"""
The peering type.
"""
return pulumi.get(self, "peering_type")
@property
@pulumi.getter(name="primaryAzurePort")
def primary_azure_port(self) -> Optional[str]:
"""
The primary port.
"""
return pulumi.get(self, "primary_azure_port")
@property
@pulumi.getter(name="primaryPeerAddressPrefix")
def primary_peer_address_prefix(self) -> Optional[str]:
"""
The primary address prefix.
"""
return pulumi.get(self, "primary_peer_address_prefix")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state of the express route circuit peering resource.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="routeFilter")
def route_filter(self) -> Optional['outputs.SubResourceResponse']:
"""
The reference to the RouteFilter resource.
"""
return pulumi.get(self, "route_filter")
@property
@pulumi.getter(name="secondaryAzurePort")
def secondary_azure_port(self) -> Optional[str]:
"""
The secondary port.
"""
return pulumi.get(self, "secondary_azure_port")
@property
@pulumi.getter(name="secondaryPeerAddressPrefix")
def secondary_peer_address_prefix(self) -> Optional[str]:
"""
The secondary address prefix.
"""
return pulumi.get(self, "secondary_peer_address_prefix")
@property
@pulumi.getter(name="sharedKey")
def shared_key(self) -> Optional[str]:
"""
The shared key.
"""
return pulumi.get(self, "shared_key")
@property
@pulumi.getter
def state(self) -> Optional[str]:
"""
The peering state.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter
def stats(self) -> Optional['outputs.ExpressRouteCircuitStatsResponse']:
"""
The peering stats of express route circuit.
"""
return pulumi.get(self, "stats")
@property
@pulumi.getter
def type(self) -> str:
"""
Type of the resource.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="vlanId")
def vlan_id(self) -> Optional[int]:
"""
The VLAN ID.
"""
return pulumi.get(self, "vlan_id")
class AwaitableGetExpressRouteCircuitPeeringResult(GetExpressRouteCircuitPeeringResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetExpressRouteCircuitPeeringResult(
azure_asn=self.azure_asn,
connections=self.connections,
etag=self.etag,
express_route_connection=self.express_route_connection,
gateway_manager_etag=self.gateway_manager_etag,
id=self.id,
ipv6_peering_config=self.ipv6_peering_config,
last_modified_by=self.last_modified_by,
microsoft_peering_config=self.microsoft_peering_config,
name=self.name,
peer_asn=self.peer_asn,
peered_connections=self.peered_connections,
peering_type=self.peering_type,
primary_azure_port=self.primary_azure_port,
primary_peer_address_prefix=self.primary_peer_address_prefix,
provisioning_state=self.provisioning_state,
route_filter=self.route_filter,
secondary_azure_port=self.secondary_azure_port,
secondary_peer_address_prefix=self.secondary_peer_address_prefix,
shared_key=self.shared_key,
state=self.state,
stats=self.stats,
type=self.type,
vlan_id=self.vlan_id)
def get_express_route_circuit_peering(circuit_name: Optional[str] = None,
peering_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetExpressRouteCircuitPeeringResult:
"""
Peering in an ExpressRouteCircuit resource.
API Version: 2020-11-01.
:param str circuit_name: The name of the express route circuit.
:param str peering_name: The name of the peering.
:param str resource_group_name: The name of the resource group.
"""
__args__ = dict()
__args__['circuitName'] = circuit_name
__args__['peeringName'] = peering_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network:getExpressRouteCircuitPeering', __args__, opts=opts, typ=GetExpressRouteCircuitPeeringResult).value
return AwaitableGetExpressRouteCircuitPeeringResult(
azure_asn=__ret__.azure_asn,
connections=__ret__.connections,
etag=__ret__.etag,
express_route_connection=__ret__.express_route_connection,
gateway_manager_etag=__ret__.gateway_manager_etag,
id=__ret__.id,
ipv6_peering_config=__ret__.ipv6_peering_config,
last_modified_by=__ret__.last_modified_by,
microsoft_peering_config=__ret__.microsoft_peering_config,
name=__ret__.name,
peer_asn=__ret__.peer_asn,
peered_connections=__ret__.peered_connections,
peering_type=__ret__.peering_type,
primary_azure_port=__ret__.primary_azure_port,
primary_peer_address_prefix=__ret__.primary_peer_address_prefix,
provisioning_state=__ret__.provisioning_state,
route_filter=__ret__.route_filter,
secondary_azure_port=__ret__.secondary_azure_port,
secondary_peer_address_prefix=__ret__.secondary_peer_address_prefix,
shared_key=__ret__.shared_key,
state=__ret__.state,
stats=__ret__.stats,
type=__ret__.type,
vlan_id=__ret__.vlan_id)
@_utilities.lift_output_func(get_express_route_circuit_peering)
def get_express_route_circuit_peering_output(circuit_name: Optional[pulumi.Input[str]] = None,
peering_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetExpressRouteCircuitPeeringResult]:
"""
Peering in an ExpressRouteCircuit resource.
API Version: 2020-11-01.
:param str circuit_name: The name of the express route circuit.
:param str peering_name: The name of the peering.
:param str resource_group_name: The name of the resource group.
"""
...
| [
"[email protected]"
]
| |
e00fbc256f3ed06f3244641137c2c4a0e5b73e33 | 05d692469305dd1adb9ebc46080525bb4515b424 | /Exception handling/aritherror2.py | c75466829af775917e307f139c966ffebef05188 | []
| no_license | rajdharmkar/pythoncode | 979805bc0e672f123ca1460644a4bd71d7854fd5 | 15b758d373f27da5680a711bf12c07e86758c447 | refs/heads/master | 2020-08-07T18:30:55.575632 | 2019-10-14T12:46:09 | 2019-10-14T12:46:09 | 213,551,766 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | try:
a = 0 / 19
print a
except ArithmeticError:
print "This statement is raising an exception"
else:
print "Welcome"
#syntax
#
# try:
# #run code
# except exception/error name1:
# #run code
# except exception error name2:
# # run code
# else:
# # run code | [
"[email protected]"
]
| |
98cd0f98537d4de5abe64aee34a9cc391d8459f8 | 106ddccf8f19ca2dcdde9bc455a230f144222493 | /remoview/settings.py | 63e6d00fb8b857afbe3a1a4d5facf7637551f70d | []
| no_license | Simeon2001/dsc-backend-project | b7cea249bf0855af53fd1e189371474bfeeec590 | 96069df96c22973ce00ace9d043475ff326086ab | refs/heads/main | 2023-01-09T08:57:04.846997 | 2020-11-12T16:38:16 | 2020-11-12T16:38:16 | 312,234,502 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,352 | py | """
Django settings for remoview project.
Generated by 'django-admin startproject' using Django 3.0.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'fetw8yeqk_m&738-5s^#3+h2x*!1yag@%8&9xw3l)1^9b*o(&)'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1','removieww.herokuapp.com']
# Application definition
INSTALLED_APPS = [
'movie',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'remoview.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'remoview.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'movie/static'),
)
if os.getcwd()=='/app':
DEBUG=False | [
"[email protected]"
]
| |
4bb6112541cc85b424d4faf0558bc75faaa26289 | 670f4ba8ded99b420c3454c6ae35789667880cc8 | /tobiko/openstack/openstackclient/_port.py | b08f02ffe1daafec91e5b874d0db5f8185c53ddc | [
"Apache-2.0"
]
| permissive | FedericoRessi/tobiko | 892db522198ab48380892138459d801c4bd00efa | ce2a8734f8b4203ec38078207297062263c49f6f | refs/heads/master | 2022-07-26T22:52:10.273883 | 2022-07-20T20:04:43 | 2022-07-20T20:04:43 | 145,856,925 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,725 | py | # Copyright (c) 2020 Red Hat, Inc.
#
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from tobiko.openstack.openstackclient import _client
def port_list(*args, **kwargs):
cmd = 'openstack port list {params}'
kwargs['format'] = 'json'
return _client.execute(cmd, *args, **kwargs)
def port_show(port, *args, **kwargs):
cmd = f'openstack port show {{params}} {port}'
kwargs['format'] = 'json'
return _client.execute(cmd, *args, **kwargs)
def port_create(port_name, network_name, *args, **kwargs):
cmd = f'openstack port create {{params}} --network {network_name} '\
f'{port_name}'
kwargs['format'] = 'json'
return _client.execute(cmd, *args, **kwargs)
def port_delete(ports, *args, **kwargs):
cmd = f'openstack port delete {{params}} {" ".join(ports)}'
return _client.execute(cmd, *args, **kwargs)
def port_set(port, *args, **kwargs):
cmd = f'openstack port set {{params}} {port}'
return _client.execute(cmd, *args, **kwargs)
def port_unset(port, *args, **kwargs):
cmd = f'openstack port unset {{params}} {port}'
return _client.execute(cmd, *args, **kwargs)
| [
"[email protected]"
]
| |
d124e275a16bc01dc86309b27e0b3fe2746089f4 | 9aa5c69ec4ae4c1a54028f6add3327929d81b929 | /nodes/camnode | c8c7ba947819137cd6d9dda5703fa04f085ab012 | []
| no_license | strawlab/ros_flydra | 818fed6cdc9ed8afd17950ea5de7595e91a0483e | 0a922c24235d971b665461db6151f5867eee5870 | refs/heads/master | 2021-01-01T16:06:00.097118 | 2018-01-19T16:52:20 | 2018-01-19T16:52:20 | 4,662,078 | 0 | 2 | null | 2015-04-17T11:46:43 | 2012-06-14T10:40:47 | null | UTF-8 | Python | false | false | 635 | #!/usr/bin/env python
import threading
try:
import flydra.camnode
except ImportError:
import sys
import os.path
sys.path.insert(0, os.path.expanduser("~/flydra.git"))
import flydra.camnode
import roslib; roslib.load_manifest('rospy')
import rospy
def main():
rospy.init_node('flydra_camnode')
spinthread = threading.Thread(target=rospy.spin)
spinthread.setDaemon(True)
spinthread.start()
flydra.camnode.main(
rospy_init_node=False, #we have already done that
cmdline_args=rospy.myargv()[1:]
)
rospy.signal_shutdown("quit")
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| ||
15713a4aa17b0af607f4967edc241d1f1688c313 | b2403817f9221ee3550130572a808194ef4f3fda | /OOP/Polymorphism/SuperMethod.py | 660f0501f83896f9d29e77f3e7dd6ac1d1370e5a | []
| no_license | xaviergoby/Python-Data-Structure | e962444ef5b1313c3facbf1fcc315af182b73a26 | eaaf31ea98d63e812a75c1d6ecb8722b9c0cf142 | refs/heads/master | 2020-04-13T00:24:40.896592 | 2018-11-27T11:51:36 | 2018-11-27T11:51:36 | 162,844,732 | 1 | 0 | null | 2018-12-22T21:46:29 | 2018-12-22T21:46:29 | null | UTF-8 | Python | false | false | 873 | py | class SomeBaseClass(object):
def __init__(self):
print('SomeBaseClass.__init__(self) called')
class UnsuperChild(SomeBaseClass):
def __init__(self):
print('Child.__init__(self) called')
SomeBaseClass.__init__(self)
class SuperChild(SomeBaseClass):
def __init__(self):
print('SuperChild.__init__(self) called')
super(SuperChild, self).__init__()
s = SuperChild()
print s
u = UnsuperChild()
# print "**"
print u
# print "****"
class InjectMe(SomeBaseClass):
def __init__(self):
print('InjectMe.__init__(self) called')
super(InjectMe, self).__init__()
class UnsuperInjector(UnsuperChild, InjectMe): pass
class SuperInjector(SuperChild, InjectMe): pass
print "-----------------"
x = SuperInjector()
#x.mro
y = UnsuperInjector()
print "MRO.."
print SuperInjector.mro()
print UnsuperInjector.mro() | [
"[email protected]"
]
| |
168db45b1fc971c12d5b647b5f82a75bda4f9473 | f10053d489f102cd6eb81d7ab4ca5f1875570ebd | /data.py | 27e371d23d6ea353cd6106be94b19b932004145a | [
"MIT"
]
| permissive | dragonlong/DR_blend | 5aa30e4045052c2b5bc663e92780abd00325cf16 | 7eef0b55b0dcfdc45c740597721d147de3415413 | refs/heads/master | 2021-04-27T00:20:19.830628 | 2018-03-04T14:31:10 | 2018-03-04T14:31:10 | 123,796,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,260 | py | """IO and data augmentation.
The code for data augmentation originally comes from
https://github.com/benanne/kaggle-ndsb/blob/master/data.py
"""
from __future__ import division, print_function
from collections import Counter
import os
from glob import glob
import h5py
import numpy as np
import pandas as pd
from PIL import Image
import skimage
import skimage.transform
from skimage.transform._warps_cy import _warp_fast
from sklearn.utils import shuffle
from sklearn import cross_validation
RANDOM_STATE = 9
FEATURE_DIR = 'data/features'
# channel standard deviations
STD = np.array([70.53946096, 51.71475228, 43.03428563], dtype=np.float32)
# channel means
MEAN = np.array([108.64628601, 75.86886597, 54.34005737], dtype=np.float32)
# set of resampling weights that yields balanced classes
BALANCE_WEIGHTS = np.array([1.3609453700116234, 14.378223495702006,
6.637566137566138, 40.235967926689575,
49.612994350282484])
# for color augmentation, computed with make_pca.py
U = np.array([[-0.56543481, 0.71983482, 0.40240142],
[-0.5989477, -0.02304967, -0.80036049],
[-0.56694071, -0.6935729, 0.44423429]], dtype=np.float32)
EV = np.array([1.65513492, 0.48450358, 0.1565086], dtype=np.float32)
no_augmentation_params = {
'zoom_range': (1.0, 1.0),
'rotation_range': (0, 0),
'shear_range': (0, 0),
'translation_range': (0, 0),
'do_flip': False,
'allow_stretch': False,
}
def fast_warp(img, tf, output_shape, mode='constant', order=0):
"""
This wrapper function is faster than skimage.transform.warp
"""
m = tf.params
t_img = np.zeros((img.shape[0],) + output_shape, img.dtype)
for i in range(t_img.shape[0]):
t_img[i] = _warp_fast(img[i], m, output_shape=output_shape,
mode=mode, order=order)
return t_img
def build_centering_transform(image_shape, target_shape):
rows, cols = image_shape
trows, tcols = target_shape
shift_x = (cols - tcols) / 2.0
shift_y = (rows - trows) / 2.0
return skimage.transform.SimilarityTransform(translation=(shift_x, shift_y))
def build_center_uncenter_transforms(image_shape):
"""
These are used to ensure that zooming and rotation happens around the center of the image.
Use these transforms to center and uncenter the image around such a transform.
"""
center_shift = np.array(
[image_shape[1], image_shape[0]]) / 2.0 - 0.5 # need to swap rows and cols here apparently! confusing!
tform_uncenter = skimage.transform.SimilarityTransform(translation=-center_shift)
tform_center = skimage.transform.SimilarityTransform(translation=center_shift)
return tform_center, tform_uncenter
def build_augmentation_transform(zoom=(1.0, 1.0), rotation=0, shear=0, translation=(0, 0), flip=False):
if flip:
shear += 180
rotation += 180
# shear by 180 degrees is equivalent to rotation by 180 degrees + flip.
# So after that we rotate it another 180 degrees to get just the flip.
tform_augment = skimage.transform.AffineTransform(scale=(1 / zoom[0], 1 / zoom[1]), rotation=np.deg2rad(rotation),
shear=np.deg2rad(shear), translation=translation)
return tform_augment
def random_perturbation_transform(zoom_range, rotation_range, shear_range, translation_range, do_flip=True,
allow_stretch=False, rng=np.random):
shift_x = rng.uniform(*translation_range)
shift_y = rng.uniform(*translation_range)
translation = (shift_x, shift_y)
rotation = rng.uniform(*rotation_range)
shear = rng.uniform(*shear_range)
if do_flip:
flip = (rng.randint(2) > 0) # flip half of the time
else:
flip = False
# random zoom
log_zoom_range = [np.log(z) for z in zoom_range]
if isinstance(allow_stretch, float):
log_stretch_range = [-np.log(allow_stretch), np.log(allow_stretch)]
zoom = np.exp(rng.uniform(*log_zoom_range))
stretch = np.exp(rng.uniform(*log_stretch_range))
zoom_x = zoom * stretch
zoom_y = zoom / stretch
elif allow_stretch is True: # avoid bugs, f.e. when it is an integer
zoom_x = np.exp(rng.uniform(*log_zoom_range))
zoom_y = np.exp(rng.uniform(*log_zoom_range))
else:
zoom_x = zoom_y = np.exp(rng.uniform(*log_zoom_range))
# the range should be multiplicatively symmetric, so [1/1.1, 1.1] instead of [0.9, 1.1] makes more sense.
return build_augmentation_transform((zoom_x, zoom_y), rotation, shear, translation, flip)
def perturb(img, augmentation_params, target_shape, rng=np.random):
# # DEBUG: draw a border to see where the image ends up
# img[0, :] = 0.5
# img[-1, :] = 0.5
# img[:, 0] = 0.5
# img[:, -1] = 0.5
shape = img.shape[1:]
tform_centering = build_centering_transform(shape, target_shape)
tform_center, tform_uncenter = build_center_uncenter_transforms(shape)
tform_augment = random_perturbation_transform(rng=rng, **augmentation_params)
tform_augment = tform_uncenter + tform_augment + tform_center # shift to center, augment, shift back (for the rotation/shearing)
return fast_warp(img, tform_centering + tform_augment,
output_shape=target_shape,
mode='constant')
# for test-time augmentation
def perturb_fixed(img, tform_augment, target_shape=(50, 50)):
shape = img.shape[1:]
tform_centering = build_centering_transform(shape, target_shape)
tform_center, tform_uncenter = build_center_uncenter_transforms(shape)
tform_augment = tform_uncenter + tform_augment + tform_center # shift to center, augment, shift back (for the rotation/shearing)
return fast_warp(img, tform_centering + tform_augment,
output_shape=target_shape, mode='constant')
def load_perturbed(fname):
img = util.load_image(fname).astype(np.float32)
return perturb(img)
def augment_color(img, sigma=0.1, color_vec=None):
if color_vec is None:
if not sigma > 0.0:
color_vec = np.zeros(3, dtype=np.float32)
else:
color_vec = np.random.normal(0.0, sigma, 3)
alpha = color_vec.astype(np.float32) * EV
noise = np.dot(U, alpha.T)
return img + noise[:, np.newaxis, np.newaxis]
def load_augment(fname, w, h, aug_params=no_augmentation_params,
transform=None, sigma=0.0, color_vec=None):
"""Load augmented image with output shape (w, h).
Default arguments return non augmented image of shape (w, h).
To apply a fixed transform (color augmentation) specify transform
(color_vec).
To generate a random augmentation specify aug_params and sigma.
"""
img = load_image(fname)
if transform is None:
img = perturb(img, augmentation_params=aug_params, target_shape=(w, h))
else:
img = perturb_fixed(img, tform_augment=transform, target_shape=(w, h))
np.subtract(img, MEAN[:, np.newaxis, np.newaxis], out=img)
np.divide(img, STD[:, np.newaxis, np.newaxis], out=img)
img = augment_color(img, sigma=sigma, color_vec=color_vec)
return img.transpose(1, 2, 0)
def compute_mean(files, batch_size=128):
"""Load images in files in batches and compute mean."""
m = np.zeros(3)
for i in range(0, len(files), batch_size):
images = load_image(files[i: i + batch_size])
m += images.sum(axis=(0, 2, 3))
return (m / len(files)).astype(np.float32)
def std(files, batch_size=128):
s = np.zeros(3)
s2 = np.zeros(3)
shape = None
for i in range(0, len(files), batch_size):
print("done with {:>3} / {} images".format(i, len(files)))
images = np.array(load_image_uint(files[i: i + batch_size]),
dtype=np.float64)
shape = images.shape
s += images.sum(axis=(0, 2, 3))
s2 += np.power(images, 2).sum(axis=(0, 2, 3))
n = len(files) * shape[2] * shape[3]
var = (s2 - s ** 2.0 / n) / (n - 1)
return np.sqrt(var)
def get_labels(names, labels=None, label_file='data/trainLabels.csv',
per_patient=False):
if labels is None:
labels = pd.read_csv(label_file,
index_col=0).loc[names].values.flatten()
if per_patient:
left = np.array(['left' in n for n in names])
return np.vstack([labels[left], labels[~left]]).T
else:
return labels
def get_image_files(datadir, left_only=False):
fs = glob('{}/*'.format(datadir))
if left_only:
fs = [f for f in fs if 'left' in f]
return np.array(sorted(fs))
def get_names(files):
return [os.path.basename(x).split('.')[0] for x in files]
def load_image(fname):
if isinstance(fname, str):
#return np.array(Image.open(fname), dtype=np.float32).transpose(2, 1, 0)
return np.array(h5py.File(fname)['image'].value, dtype=np.float32).transpose(2, 1, 0)
else:
return np.array([load_image(f) for f in fname])
def balance_shuffle_indices(y, random_state=None, weight=BALANCE_WEIGHTS):
y = np.asarray(y)
counter = Counter(y)
max_count = np.max(counter.values())
indices = []
for cls, count in counter.items():
ratio = weight * max_count / count + (1 - weight)
idx = np.tile(np.where(y == cls)[0],
np.ceil(ratio).astype(int))
np.random.shuffle(idx)
indices.append(idx[:max_count])
return shuffle(np.hstack(indices), random_state=random_state)
def balance_per_class_indices(y, weights=BALANCE_WEIGHTS):
y = np.array(y)
weights = np.array(weights, dtype=float)
p = np.zeros(len(y))
for i, weight in enumerate(weights):
p[y == i] = weight
return np.random.choice(np.arange(len(y)), size=len(y), replace=True,
p=np.array(p) / p.sum())
def get_weights(y, weights=BALANCE_WEIGHTS):
y = np.array(y)
weights = np.array(weights, dtype=float)
p = np.zeros(len(y))
for i, weight in enumerate(weights):
p[y == i] = weight
return p / np.sum(p) * len(p)
def split_indices(files, labels, test_size=0.1, random_state=RANDOM_STATE):
names = get_names(files)
labels = get_labels(names, per_patient=True)
spl = cross_validation.StratifiedShuffleSplit(labels[:, 0],
test_size=test_size,
random_state=random_state,
n_iter=1)
tr, te = next(iter(spl))
tr = np.hstack([tr * 2, tr * 2 + 1])
te = np.hstack([te * 2, te * 2 + 1])
return tr, te
def split(files, labels, test_size=0.1, random_state=RANDOM_STATE):
train, test = split_indices(files, labels, test_size, random_state)
return files[train], files[test], labels[train], labels[test]
def per_patient_reshape(X, X_other=None):
X_other = X if X_other is None else X_other
right_eye = np.arange(0, X.shape[0])[:, np.newaxis] % 2
n = len(X)
left_idx = np.arange(n)
right_idx = left_idx + np.sign(2 * ((left_idx + 1) % 2) - 1)
return np.hstack([X[left_idx], X_other[right_idx],
right_eye]).astype(np.float32)
def load_features(fnames, test=False):
if test:
fnames = [os.path.join(os.path.dirname(f),
os.path.basename(f).replace('train', 'test'))
for f in fnames]
data = [np.load(f) for f in fnames]
data = [X.reshape([X.shape[0], -1]) for X in data]
return np.hstack(data)
def parse_blend_config(cnf):
return {run: [os.path.join(FEATURE_DIR, f) for f in files]
for run, files in cnf.items()}
'''
w= 224
h= 224
aug_params= {
'zoom_range': (1 / 1.15, 1.15),
'rotation_range': (0, 360),
'shear_range': (0, 0),
'translation_range': (-20, 20),
'do_flip': True,
'allow_stretch': True,
}
sigma= 0.25
fname = '/data/jeffery/kg/sample_256/31916_right.h5'
img = load_augment(fname, w, h, aug_params=aug_params, transform=None, sigma=0.0, color_vec=None)
print(type(img))
print(img.shape)
print ("adf")
'''
| [
"[email protected]"
]
| |
694bd1b545d6e74fa955877bac704efbbedcc3d4 | 141b42d9d72636c869ff2ce7a2a9f7b9b24f508b | /myvenv/Lib/site-packages/phonenumbers/shortdata/region_JO.py | 10652f5720029f060d27895417cf44fbe7fee355 | [
"BSD-3-Clause"
]
| permissive | Fa67/saleor-shop | 105e1147e60396ddab6f006337436dcbf18e8fe1 | 76110349162c54c8bfcae61983bb59ba8fb0f778 | refs/heads/master | 2021-06-08T23:51:12.251457 | 2018-07-24T08:14:33 | 2018-07-24T08:14:33 | 168,561,915 | 1 | 0 | BSD-3-Clause | 2021-04-18T07:59:12 | 2019-01-31T17:00:39 | Python | UTF-8 | Python | false | false | 860 | py | """Auto-generated file, do not edit by hand. JO metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_JO = PhoneMetadata(id='JO', country_code=None, international_prefix=None,
general_desc=PhoneNumberDesc(national_number_pattern='[19]\\d{2,4}', possible_length=(3, 5)),
emergency=PhoneNumberDesc(national_number_pattern='1(?:12|9[127])|911', example_number='112', possible_length=(3,)),
short_code=PhoneNumberDesc(national_number_pattern='1(?:09|1[0-2]|9[0-24-79])|9(?:0903|11|8788)', example_number='111', possible_length=(3, 5)),
carrier_specific=PhoneNumberDesc(national_number_pattern='9(?:0903|8788)', example_number='90903', possible_length=(5,)),
sms_services=PhoneNumberDesc(national_number_pattern='9(?:0903|8788)', example_number='90903', possible_length=(5,)),
short_data=True)
| [
"[email protected]"
]
| |
d7764b7bc2f3043d08cfc2717d91e64ea6735c41 | 321b4ed83b6874eeb512027eaa0b17b0daf3c289 | /142/142.linked-list-cycle-ii.234403149.Accepted.leetcode.py | f353efaede5f020953c36a7ea4c336d0ce14af66 | []
| no_license | huangyingw/submissions | 7a610613bdb03f1223cdec5f6ccc4391149ca618 | bfac1238ecef8b03e54842b852f6fec111abedfa | refs/heads/master | 2023-07-25T09:56:46.814504 | 2023-07-16T07:38:36 | 2023-07-16T07:38:36 | 143,352,065 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 443 | py | class Solution(object):
def detectCycle(self, head):
if not head or not head.next:
return None
q1 = head
q2 = head.next
while q1 != q2:
if not q2 or not q2.next:
return None
q1 = q1.next
q2 = q2.next.next
res = head
q1 = q1.next
while res != q1:
res = res.next
q1 = q1.next
return res
| [
"[email protected]"
]
| |
01d83774e476624b33ddf3a10ae333bcda615ea8 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /WsHSAEBQW6F7nsMjr_3.py | e76b9be89606cf93b3d7c28044740984c736df0f | []
| no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py |
def flatten_the_curve(lst):
return [round(sum(n for n in lst)/len(lst),1) for n in lst] if len(lst) > 0 else lst
| [
"[email protected]"
]
| |
9c4afa38b67e13163c77c102951dc681f27676dc | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/F/Fabien/wikipedia_power_plants_2.py | b1aa08a3ed31016e75e650d95cb5a313cb461888 | []
| no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,388 | py | import csv
from lxml import etree
import re
import scraperwiki
from StringIO import StringIO
from urllib import urlencode
#code inspired by https://scraperwiki.com/scrapers/dbpedia-us-hospitals/edit/
#code build using yql library doesn't seem to work, get a SSLHandshakeError when using code based on https://scraperwiki.com/scrapers/ouseful-two-step-linked-data-test/edit/
#this gets the url that is used to run the sparql query and return results in csv
def csv_url(sparql):
params = {
'default-graph-uri': 'http://dbpedia.org',
'query': sparql,
'format': 'text/csv',
}
return 'http://live.dbpedia.org/sparql?%s' % urlencode(params)
#it seems that category traversals are not possible using the main dbpedia endpoint, but are possible using the live endpoint
#return 'http://dbpedia.org/sparql?%s' % urlencode(params)
#run the sparql query and get the results in csv
def parse_csv(url):
print('creating data object')
data = StringIO()
print('about to run query')
data.write(scraperwiki.scrape(url))
print('just got the data')
data.seek(0)
data_csv = csv.DictReader(data)
print('about to iterate over the data')
for row in data_csv:
rowResult = dict()
#show what's in here
for key in row.keys():
if(row[key]): #only add if string is not empty
rowResult[key] = row[key]
print(rowResult)
#calculate the latitude and longitude, these appear in several forms
rowResult["latitude"] = ""
rowResult["longitude"] = ""
#values for geoLat and geoLong are preferred
if ("geoLat" in rowResult) and ("geoLong" in rowResult) :
rowResult["latitude"] = float(rowResult["geoLat"])
rowResult["longitude"] = float(rowResult["geoLong"])
elif ("latDMS" in rowResult) and ("longDMS" in rowResult) :
#otherwise use values for latDMS and longDMS
rowResult["latitude"] = float(rowResult["latDMS"])
rowResult["longitude"] = float(rowResult["longDMS"])
scraperwiki.sqlite.save(unique_keys=['plant'], data=rowResult)
#click on "clear data" and this will recreate the table with columns in the right order, there's no other way to re-order them
scraperwiki.sqlite.execute("create table if not exists swdata (plant text, categories text, latitude real, longitude real, country text, locale text, owner text, primaryFuel text, secondaryFuel text, installedCapacity text, status text, generationUnits text, averageAnnualGen text, commissionned text, geoLat real, geoLong real, latDMS real, longDMS real)")
#Below is an absolutely epic sparql query.
#For anyone reading this who wants to understand how it works,
#refer to the "Traversing Ontologies and (Sub)Classes; all subclasses of Person down the hierarchy"
#section of http://webr3.org/blog/linked-data/virtuoso-6-sparqlgeo-and-linked-data/
#for a much more simple example
#Some of these powerplants can have multiple matching categories, especially if they run on two different fuel types
#Some of them also have several coordinates that could be combined in a number of ways if using a flat SPARQL query
#By using "group by" we make sure to keep only one row per power plant
#Note: using coalesce to give precedence to geolat/geolon directly in sparql leads to a strange error
#In running a test over a minimal query based on the one used on the wikipedia_oil_refineries scraper, it seems that "group by"
#isn't enough to ensure only one row per power plant, but the use of sql:sample and sql:group_digest does the trick.
#It doesn't seem clear if this is causing fewer records to be retrieved since the database uses a primary key based on the plant name.
#According to this scraper's history, it looks like fewer results are being returned.
#Run succeeded: - ran 6 times, most recently for 335 seconds (1 scraped pages, 2654 records) 07:41, 25 October 2012
#Run succeeded: - ran 2 times, most recently for 973 seconds (1 scraped pages, 3116 records) 22:35, 18 October 2012
#Looking at the scraper's history it looks like the record number dropped while there were no more edits to the scraper's code
#"run_ended": "2012-10-20T21:17:22", "run_started": "2012-10-20T21:09:56", "records_produced": 3136
#"run_ended": "2012-10-21T21:27:36", "run_started": "2012-10-21T21:25:04", "records_produced": 2649
#So, something might have happened on DBPedia's side. And indeed there are now 6 more categories with encoding issue on colon...
queryString = """PREFIX skos:<http://www.w3.org/2004/02/skos/core#>
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX dc:<http://purl.org/dc/terms/>
PREFIX geo:<http://www.w3.org/2003/01/geo/wgs84_pos#>
PREFIX dbpprop:<http://dbpedia.org/property/>
SELECT ?plant
(sql:group_digest(replace(replace(str(?category),"http://dbpedia.org/resource/Category:",""),'_',' '),", ",255,1)
as ?categories)
(sql:sample(bif:either(str(?latNs)="S",-1,1)*(?latD+xsd:double(?latM)/60+xsd:double(?latS)/3600)) as ?latDMS)
(sql:sample(bif:either(str(?longEw)="W",-1,1)*(?longD+xsd:double(?longM)/60+xsd:double(?longS)/3600)) as ?longDMS)
(sql:sample(?geoLat) as ?geoLat)
(sql:sample(?geoLong) as ?geoLong)
(sql:sample(replace(replace(str(?country),"http://dbpedia.org/resource/",""),"_"," ")) as ?country)
(sql:sample(replace(replace(str(?locale),"http://dbpedia.org/resource/",""),"_"," ")) as ?locale)
(sql:sample(replace(replace(str(?owner),"http://dbpedia.org/resource/",""),"_"," ")) as ?owner)
(sql:sample(replace(replace(str(?primaryFuel),"http://dbpedia.org/resource/",""),"_"," ")) as ?primaryFuel)
(sql:sample(replace(replace(str(?secondaryFuel),"http://dbpedia.org/resource/",""),"_"," ")) as ?secondaryFuel)
(sql:group_digest(?installedCapacity," - ",255,1) as ?installedCapacity)
(sql:sample(?status) as ?status)
(sql:sample(?generationUnits) as ?generationUnits)
(sql:sample(?averageAnnualGen) as ?averageAnnualGen)
(min(?commissionned) as ?commissionned)
WHERE {
{ SELECT ?category ?y WHERE {
?category skos:broader ?y .
?category rdfs:label ?categoryName .
FILTER (regex(?categoryName, "power station", "i") ||
regex(?categoryName, "power plant", "i") ||
regex(?categoryName, "CHP plants", "i") ||
regex(?categoryName, "Wave farms", "i") ||
regex(?categoryName, "Wind farms", "i")) .
} }
OPTION ( TRANSITIVE, T_DISTINCT, t_in(?category), t_out(?y), t_step('path_id') as ?path, t_step(?category) as ?route, t_step('step_no') AS ?jump, T_DIRECTION 2 )
FILTER ( ?y = <http://dbpedia.org/resource/Category:Power_stations_by_country> ) .
?plant dc:subject ?category .
OPTIONAL{?plant dbpprop:latD ?latD} .
OPTIONAL{?plant dbpprop:latM ?latM} .
OPTIONAL{?plant dbpprop:latS ?latS} .
OPTIONAL{?plant dbpprop:latNs ?latNs} .
OPTIONAL{?plant dbpprop:longEw ?longEw} .
OPTIONAL{?plant dbpprop:longD ?longD} .
OPTIONAL{?plant dbpprop:longM ?longM} .
OPTIONAL{?plant dbpprop:longS ?longS} .
OPTIONAL{?plant geo:lat ?geoLat} .
OPTIONAL{?plant geo:long ?geoLong} .
OPTIONAL{?plant dbpprop:country ?country} .
OPTIONAL{?plant dbpprop:locale ?locale} .
OPTIONAL{?plant dbpprop:owner ?owner} .
OPTIONAL{?plant dbpprop:primaryFuel ?primaryFuel} .
OPTIONAL{?plant dbpprop:secondaryFuel ?secondaryFuel} .
OPTIONAL{?plant dbpprop:installedCapacity ?installedCapacity} .
OPTIONAL{?plant dbpprop:status ?status} .
OPTIONAL{?plant dbpprop:generationUnits ?generationUnits} .
OPTIONAL{?plant dbpprop:averageAnnualGen ?averageAnnualGen} .
OPTIONAL{?plant dbpprop:status ?status} .
OPTIONAL{?plant dbpprop:commissionned ?commissionned} .
OPTIONAL{?plant dbpprop:decommissionned ?decommissionned} .
OPTIONAL{?plant dbpprop:technology ?technology} .
OPTIONAL{?plant dbpprop:coolingWater ?coolingWater} .
} group by ?plant"""
#queryString = "select * where {?x ?y ?z} limit 10"
parse_csv(csv_url(queryString))
#If 10000 results are returned, the the query above needs to be modified to have LIMIT 10000 and then have values of OFFSET defined in increments of 10000import csv
from lxml import etree
import re
import scraperwiki
from StringIO import StringIO
from urllib import urlencode
#code inspired by https://scraperwiki.com/scrapers/dbpedia-us-hospitals/edit/
#code build using yql library doesn't seem to work, get a SSLHandshakeError when using code based on https://scraperwiki.com/scrapers/ouseful-two-step-linked-data-test/edit/
#this gets the url that is used to run the sparql query and return results in csv
def csv_url(sparql):
params = {
'default-graph-uri': 'http://dbpedia.org',
'query': sparql,
'format': 'text/csv',
}
return 'http://live.dbpedia.org/sparql?%s' % urlencode(params)
#it seems that category traversals are not possible using the main dbpedia endpoint, but are possible using the live endpoint
#return 'http://dbpedia.org/sparql?%s' % urlencode(params)
#run the sparql query and get the results in csv
def parse_csv(url):
print('creating data object')
data = StringIO()
print('about to run query')
data.write(scraperwiki.scrape(url))
print('just got the data')
data.seek(0)
data_csv = csv.DictReader(data)
print('about to iterate over the data')
for row in data_csv:
rowResult = dict()
#show what's in here
for key in row.keys():
if(row[key]): #only add if string is not empty
rowResult[key] = row[key]
print(rowResult)
#calculate the latitude and longitude, these appear in several forms
rowResult["latitude"] = ""
rowResult["longitude"] = ""
#values for geoLat and geoLong are preferred
if ("geoLat" in rowResult) and ("geoLong" in rowResult) :
rowResult["latitude"] = float(rowResult["geoLat"])
rowResult["longitude"] = float(rowResult["geoLong"])
elif ("latDMS" in rowResult) and ("longDMS" in rowResult) :
#otherwise use values for latDMS and longDMS
rowResult["latitude"] = float(rowResult["latDMS"])
rowResult["longitude"] = float(rowResult["longDMS"])
scraperwiki.sqlite.save(unique_keys=['plant'], data=rowResult)
#click on "clear data" and this will recreate the table with columns in the right order, there's no other way to re-order them
scraperwiki.sqlite.execute("create table if not exists swdata (plant text, categories text, latitude real, longitude real, country text, locale text, owner text, primaryFuel text, secondaryFuel text, installedCapacity text, status text, generationUnits text, averageAnnualGen text, commissionned text, geoLat real, geoLong real, latDMS real, longDMS real)")
#Below is an absolutely epic sparql query.
#For anyone reading this who wants to understand how it works,
#refer to the "Traversing Ontologies and (Sub)Classes; all subclasses of Person down the hierarchy"
#section of http://webr3.org/blog/linked-data/virtuoso-6-sparqlgeo-and-linked-data/
#for a much more simple example
#Some of these powerplants can have multiple matching categories, especially if they run on two different fuel types
#Some of them also have several coordinates that could be combined in a number of ways if using a flat SPARQL query
#By using "group by" we make sure to keep only one row per power plant
#Note: using coalesce to give precedence to geolat/geolon directly in sparql leads to a strange error
#In running a test over a minimal query based on the one used on the wikipedia_oil_refineries scraper, it seems that "group by"
#isn't enough to ensure only one row per power plant, but the use of sql:sample and sql:group_digest does the trick.
#It doesn't seem clear if this is causing fewer records to be retrieved since the database uses a primary key based on the plant name.
#According to this scraper's history, it looks like fewer results are being returned.
#Run succeeded: - ran 6 times, most recently for 335 seconds (1 scraped pages, 2654 records) 07:41, 25 October 2012
#Run succeeded: - ran 2 times, most recently for 973 seconds (1 scraped pages, 3116 records) 22:35, 18 October 2012
#Looking at the scraper's history it looks like the record number dropped while there were no more edits to the scraper's code
#"run_ended": "2012-10-20T21:17:22", "run_started": "2012-10-20T21:09:56", "records_produced": 3136
#"run_ended": "2012-10-21T21:27:36", "run_started": "2012-10-21T21:25:04", "records_produced": 2649
#So, something might have happened on DBPedia's side. And indeed there are now 6 more categories with encoding issue on colon...
queryString = """PREFIX skos:<http://www.w3.org/2004/02/skos/core#>
PREFIX rdfs:<http://www.w3.org/2000/01/rdf-schema#>
PREFIX dc:<http://purl.org/dc/terms/>
PREFIX geo:<http://www.w3.org/2003/01/geo/wgs84_pos#>
PREFIX dbpprop:<http://dbpedia.org/property/>
SELECT ?plant
(sql:group_digest(replace(replace(str(?category),"http://dbpedia.org/resource/Category:",""),'_',' '),", ",255,1)
as ?categories)
(sql:sample(bif:either(str(?latNs)="S",-1,1)*(?latD+xsd:double(?latM)/60+xsd:double(?latS)/3600)) as ?latDMS)
(sql:sample(bif:either(str(?longEw)="W",-1,1)*(?longD+xsd:double(?longM)/60+xsd:double(?longS)/3600)) as ?longDMS)
(sql:sample(?geoLat) as ?geoLat)
(sql:sample(?geoLong) as ?geoLong)
(sql:sample(replace(replace(str(?country),"http://dbpedia.org/resource/",""),"_"," ")) as ?country)
(sql:sample(replace(replace(str(?locale),"http://dbpedia.org/resource/",""),"_"," ")) as ?locale)
(sql:sample(replace(replace(str(?owner),"http://dbpedia.org/resource/",""),"_"," ")) as ?owner)
(sql:sample(replace(replace(str(?primaryFuel),"http://dbpedia.org/resource/",""),"_"," ")) as ?primaryFuel)
(sql:sample(replace(replace(str(?secondaryFuel),"http://dbpedia.org/resource/",""),"_"," ")) as ?secondaryFuel)
(sql:group_digest(?installedCapacity," - ",255,1) as ?installedCapacity)
(sql:sample(?status) as ?status)
(sql:sample(?generationUnits) as ?generationUnits)
(sql:sample(?averageAnnualGen) as ?averageAnnualGen)
(min(?commissionned) as ?commissionned)
WHERE {
{ SELECT ?category ?y WHERE {
?category skos:broader ?y .
?category rdfs:label ?categoryName .
FILTER (regex(?categoryName, "power station", "i") ||
regex(?categoryName, "power plant", "i") ||
regex(?categoryName, "CHP plants", "i") ||
regex(?categoryName, "Wave farms", "i") ||
regex(?categoryName, "Wind farms", "i")) .
} }
OPTION ( TRANSITIVE, T_DISTINCT, t_in(?category), t_out(?y), t_step('path_id') as ?path, t_step(?category) as ?route, t_step('step_no') AS ?jump, T_DIRECTION 2 )
FILTER ( ?y = <http://dbpedia.org/resource/Category:Power_stations_by_country> ) .
?plant dc:subject ?category .
OPTIONAL{?plant dbpprop:latD ?latD} .
OPTIONAL{?plant dbpprop:latM ?latM} .
OPTIONAL{?plant dbpprop:latS ?latS} .
OPTIONAL{?plant dbpprop:latNs ?latNs} .
OPTIONAL{?plant dbpprop:longEw ?longEw} .
OPTIONAL{?plant dbpprop:longD ?longD} .
OPTIONAL{?plant dbpprop:longM ?longM} .
OPTIONAL{?plant dbpprop:longS ?longS} .
OPTIONAL{?plant geo:lat ?geoLat} .
OPTIONAL{?plant geo:long ?geoLong} .
OPTIONAL{?plant dbpprop:country ?country} .
OPTIONAL{?plant dbpprop:locale ?locale} .
OPTIONAL{?plant dbpprop:owner ?owner} .
OPTIONAL{?plant dbpprop:primaryFuel ?primaryFuel} .
OPTIONAL{?plant dbpprop:secondaryFuel ?secondaryFuel} .
OPTIONAL{?plant dbpprop:installedCapacity ?installedCapacity} .
OPTIONAL{?plant dbpprop:status ?status} .
OPTIONAL{?plant dbpprop:generationUnits ?generationUnits} .
OPTIONAL{?plant dbpprop:averageAnnualGen ?averageAnnualGen} .
OPTIONAL{?plant dbpprop:status ?status} .
OPTIONAL{?plant dbpprop:commissionned ?commissionned} .
OPTIONAL{?plant dbpprop:decommissionned ?decommissionned} .
OPTIONAL{?plant dbpprop:technology ?technology} .
OPTIONAL{?plant dbpprop:coolingWater ?coolingWater} .
} group by ?plant"""
#queryString = "select * where {?x ?y ?z} limit 10"
parse_csv(csv_url(queryString))
#If 10000 results are returned, the the query above needs to be modified to have LIMIT 10000 and then have values of OFFSET defined in increments of 10000 | [
"[email protected]"
]
| |
a542c28168e1d849dfb3e5a94a62c3bf549828d5 | 92b8b1b0914a1bb4f6571bf7c3597ac33dbc58aa | /MLG-Crypto_90/solution.py | ad3cf3c7935532127e6bd9a25f91be9243680449 | []
| no_license | leogemetric/cryptoctf-2016 | 0cb5e5179125d88f8ad14bc3c29ff5aeaeac7787 | 69b02f0bbfa941d04a2f9af101d1420a096f2437 | refs/heads/master | 2021-01-19T07:18:53.908002 | 2016-05-27T22:33:02 | 2016-05-27T22:33:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 874 | py | lines = open("mlg_crypto.txt", "r").readlines()[1:-1]
subs = {}
for line in lines:
line = line.strip()
for word in line.split("_"):
if word in subs:
subs[word] += 1
else:
subs[word] = 1
print len(subs)
print subs
space = max(subs, key=lambda x: subs[x])
del subs[space]
total = "\n".join(lines)
total = total.replace(space, " ") # Most common character is " "
# Do some bs substitutions
alphabet = "abcdefghijklmnopqrstuvwxyz"
i = 0
for sub in subs:
total = total.replace(sub, alphabet[i])
i += 1
total = total.replace("_", "").replace("\n\n", "\n")
print total
# Given the hint, we need to crack the substitution cipher on the ciphertext.
# This script will assign each word a letter and print it out
# Plugging the output into quipqiup, we kinda decode the message and the flag:
# flag{leet_smoked_memes_bro}
| [
"[email protected]"
]
| |
9d7f40b1e1cb4a8d9e542a673113d94d0418e724 | c43fbcb4442428e85616f664964d1e27ca396070 | /runs/malte/snr_study/simparamsec.py | 7a5a04938a59f165f1b98855287c2f20abe47592 | []
| no_license | megalut/megalut | ddac89a0dca70e13979d31b80d52233226233ade | 63bd4bec8000ad13f4963d464d7b7b4d470a36ab | refs/heads/master | 2020-04-15T00:33:42.815988 | 2018-09-11T08:45:48 | 2018-09-11T08:45:48 | 20,882,727 | 2 | 1 | null | 2018-09-11T08:45:49 | 2014-06-16T11:39:14 | Python | UTF-8 | Python | false | false | 3,043 | py | import megalut.sim
import numpy as np
import random # np.random.choice is only available for newer numpys...
import itertools
class Simple1(megalut.sim.params.Params):
"""
No PSF, just round Gaussians, but with Euclid zeropoint and sky level etc.
"""
def __init__(self):
megalut.sim.params.Params.__init__(self)
def stat(self):
"""
stat: called for each catalog (stat is for stationnary)
"""
return {"snc_type":1}
def draw(self, ix, iy, nx, ny):
"""
draw: called for each galaxy
"""
######### No Lensing
tru_s1 = 0.0
tru_s2 = 0.0
tru_mu = 1.0
# Params
gain = 3.1 # electrons/ADU
ron = 4.2 # electrons
skyback = 22.35 # mag per arcsec2, dominated by zodiacal light
# Don't look at sextractor outptu if you do this!
zeropoint = 24.0 + float(ny - iy)/float(ny) * 1.0 # mag. Should give SNR 10 when observing with 3 x 565 second exposures.
#zeropoint = 24.9
exptime = 3.0*565.0 # seconds
########## Noise ##########
tru_sky_level = 0.01 * (exptime/gain) * 10**(-0.4*(skyback - zeropoint)) # In ADU per pixel. 0.01 because of the pixel size of 0.1 arcsec.
tru_gain = gain
tru_read_noise = ron
######### No Lensing
tru_s1 = 0.0
tru_s2 = 0.0
tru_mu = 1.0
########## Galaxy ##########
tru_type = 0 # 0 Gaussian, 1 sersic
tru_mag = 24.5
#tru_mag = 23.0 + float(ny - iy)/float(ny) * 2.5
tru_flux = (exptime / gain) * 10**((tru_mag - zeropoint)/(-2.5))
tru_rad = 4.3/2.0
# Croppers reference galaxy has an extension of 4.3 pixels, but we don't know exactly what this extension means.
size_factor = 1.0 # scales the galaxy with respect to Croppers reference
tru_sigma = size_factor * tru_rad / 1.1774 # We take Croppers "extension of the source" as the half-light-diameter
tru_cropper_snr = (tru_flux) / np.sqrt( np.pi * (size_factor * 13.0/2.0)**2 * tru_sky_level) # For a sky-limited obs, we don't use the gain here
tru_g = 0.0
tru_theta = 0.0
(tru_g1, tru_g2) = (tru_g * np.cos(2.0 * tru_theta), tru_g * np.sin(2.0 * tru_theta))
tru_g = np.hypot(tru_g1, tru_g2)
return { # It's ugly to not directly fill this dict, but this makes it clearer what is actually returned:
"tru_type":tru_type,
"tru_flux":tru_flux,
"tru_mag":tru_mag,
"zeropoint":zeropoint,
"skyback":skyback,
"tru_rad":tru_rad,
"tru_sigma":tru_sigma,
#"tru_sersicn":tru_sersicn,
"tru_g1":tru_g1,
"tru_g2":tru_g2,
"tru_g":tru_g, # only useful for some plots
"tru_sky_level":tru_sky_level, # in ADU, just for generating noise, will not remain in the image
"tru_gain":tru_gain, # in photons/ADU. Make this negative to have no Poisson noise
"tru_read_noise":tru_read_noise, # in photons if gain > 0.0, otherwise in ADU.Set this to zero to have no flat Gaussian noise
"tru_s1":tru_s1,
"tru_s2":tru_s2,
"tru_mu":tru_mu,
#"tru_psf_sigma":2.0,
#"tru_psf_g1":0.0,
#"tru_psf_g2":0.0,
"tru_cropper_snr":tru_cropper_snr,
}
| [
"[email protected]"
]
| |
2ff4be473a4847f049f4cc30b5101abf4ed13ff8 | cc64d03b132b773acae845c52f41fcdcdcaee273 | /test/functional/wallet_coinbase_category.py | 15e59fad3c5890d950270402edb2f1939d3bf62f | [
"MIT"
]
| permissive | phlsolo316/vidcoin | aa9aae1e0f2215edadd2df89e1c9b6669abbce76 | d6eec232378c329ebc2a31e7d21acf58cf62368d | refs/heads/main | 2023-05-26T05:01:32.379060 | 2021-06-07T02:46:07 | 2021-06-07T02:46:07 | 373,622,056 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,299 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test coinbase transactions return the correct categories.
Tests listtransactions, listsinceblock, and gettransaction.
"""
from test_framework.test_framework import VIDCoinTestFramework
from test_framework.util import (
assert_array_result
)
class CoinbaseCategoryTest(VIDCoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def assert_category(self, category, address, txid, skip):
assert_array_result(self.nodes[0].listtransactions(skip=skip),
{"address": address},
{"category": category})
assert_array_result(self.nodes[0].listsinceblock()["transactions"],
{"address": address},
{"category": category})
assert_array_result(self.nodes[0].gettransaction(txid)["details"],
{"address": address},
{"category": category})
def run_test(self):
# Generate one block to an address
address = self.nodes[0].getnewaddress()
self.nodes[0].generatetoaddress(1, address)
hash = self.nodes[0].getbestblockhash()
txid = self.nodes[0].getblock(hash)["tx"][0]
# Coinbase transaction is immature after 1 confirmation
self.assert_category("immature", address, txid, 0)
# Mine another 99 blocks on top
self.nodes[0].generate(99)
# Coinbase transaction is still immature after 100 confirmations
self.assert_category("immature", address, txid, 99)
# Mine one more block
self.nodes[0].generate(1)
# Coinbase transaction is now matured, so category is "generate"
self.assert_category("generate", address, txid, 100)
# Orphan block that paid to address
self.nodes[0].invalidateblock(hash)
# Coinbase transaction is now orphaned
self.assert_category("orphan", address, txid, 100)
if __name__ == '__main__':
CoinbaseCategoryTest().main()
| [
"[email protected]"
]
| |
d114bd12241b8318364df59f4a6569dd709ee16c | 6131b2738a7c087dfa6907c624453576f6f0e393 | /flask_project/fish/app/spider/yushu_book.py | 027eea09c9285f9ff2cb4b25722aa5d35719a702 | []
| no_license | heheddff/myPythonProcess | 60ef240130cd02906dc500eedb397a9662c02e5a | 885a25dd2a9cd43801306d9e70b9ce89daec4406 | refs/heads/master | 2020-04-08T19:09:18.192738 | 2019-08-06T02:52:54 | 2019-08-06T02:52:54 | 159,642,468 | 4 | 5 | null | null | null | null | UTF-8 | Python | false | false | 872 | py | from app.libs.download import DOWNLOAD
from flask import current_app
class YuShuBook:
# isbn_url = 'https://api.douban.com/v2/book/isbn/{}'
# keyword_url = 'https://api.douban.com/v2/book/search?q={}&count={}&start={}'
isbn_url = 'http://t.yushu.im/v2/book/isbn/{}'
keyword_url = 'http://t.yushu.im/v2/book/search?q={}&count={}&start={}'
per_page = 15
@classmethod
def search_by_isbn(cls, isbn):
url = cls.isbn_url.format(isbn)
res = DOWNLOAD.get(url)
return res
@classmethod
def search_by_keyword(cls, keyword, page=1):
url = cls.keyword_url.format(keyword,current_app.config['PER_PAGE'],cls.calaculed_start(page))
res = DOWNLOAD.get(url)
return res
@staticmethod
def calaculed_start(page):
return (page-1)*current_app.config['PER_PAGE'] | [
"[email protected]"
]
| |
f00aab7f8fb74e06325b57eac9054ee1eee7131a | 6ed9e72d2676447dcb68683d8823712110bb3b5e | /setup.py | b8e64f5229ce5f874831ed45e3a972b29e0365eb | []
| no_license | brandon-rhodes/python-johnhancock | 79a416e037817ee41fa384797a001ee6fcfa31b5 | 43b1128a9c217fad5ba6c6143cdbea97d6e44e51 | refs/heads/master | 2023-07-21T17:30:09.358965 | 2020-07-01T00:34:43 | 2020-07-01T00:34:43 | 6,408,682 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 308 | py | from distutils.core import setup
setup(
name='johnhancock',
version='0.1',
description='Sign a PDF using a PNG image',
author='Brandon Rhodes',
author_email='[email protected]',
#url='',
packages=['johnhancock'],
install_requires=['Pillow', 'pyPdf', 'reportlab'],
)
| [
"[email protected]"
]
| |
42bbe971b17f3ffdb89bfd9a3742a6d66de4dd62 | 85308aca1148f3b1223f39290dafde335d36d20b | /autoimpute/imputations/dataframe/single_imputer.py | f1df335daee93d17d6c984d29414930ae1879184 | [
"MIT"
]
| permissive | shabarka/autoimpute | 44ac4837e51b09f4f18e1c5de018664490d534ef | cc6f119387f9342e01126b2a8f925d116ae98488 | refs/heads/master | 2020-05-23T08:24:59.373502 | 2019-05-14T05:35:26 | 2019-05-14T05:35:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,094 | py | """This module performs one imputation of missing features in a dataset.
This module contains one class - the SingleImputer. Use this class to
impute each Series within a DataFrame one time. This class makes numerous
imputation methods available - both univariate and multivatiate. Each method
runs once on its specified column. When one pass through the columns is
complete, the SingleImputer returns the single imputed dataset.
"""
import numpy as np
import pandas as pd
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils.validation import check_is_fitted
from autoimpute.utils import check_nan_columns, check_predictors_fit
from autoimpute.utils import check_strategy_fit
from autoimpute.utils.helpers import _one_hot_encode
from autoimpute.imputations.helpers import _get_observed
from .base_imputer import BaseImputer
from ..series import DefaultUnivarImputer
# pylint:disable=attribute-defined-outside-init
# pylint:disable=arguments-differ
# pylint:disable=protected-access
# pylint:disable=too-many-arguments
# pylint:disable=too-many-locals
# pylint:disable=too-many-instance-attributes
# pylint:disable=unused-argument
class SingleImputer(BaseImputer, BaseEstimator, TransformerMixin):
"""Techniques to impute Series with missing values one time.
The SingleImputer class takes a DataFrame and performs imputations on
each Series within the DataFrame. The Imputer does one pass for each
column, and it supports numerous imputation methods for each column.
The SingleImputer delegates imputation to respective SeriesImputers,
each of which maps to a specific strategy supported by the SingleImputer.
Most of the SeriesImputers are inductive (fit and transform for new data).
Transductive SeriesImputers (such as InterpolateImputer) still perform a
"mock" fit stage but do all the imputation work in the transform step. The
fit stage is performed to remain consistent with the sklearn API. The
class is a valid sklearn transformer that can be used in an sklearn
Pipeline because it inherits from the TransformerMixin and implements both
fit and transform methods.
"""
def __init__(self, strategy="default predictive", predictors="all",
imp_kwgs=None, copy=True, verbose=False, seed=None,
visit="default"):
"""Create an instance of the SingleImputer class.
As with sklearn classes, all arguments take default values. Therefore,
SingleImputer() creates a valid class instance. The instance is
used to set up a SingleImputer and perform checks on arguments.
Args:
predictors (str, iter, dict, optional): defaults to `all`, i.e.
use all predictors. If `all`, every column will be used for
every class prediction. If a list, subset of columns used for
all predictions. If a dict, specify which columns to use as
predictors for each imputation. Columns not specified in dict
but present in `strategy` receive `all` other cols as preds.
Note predictors are IGNORED for univariate imputation methods,
so specifying is meaningless unless strategy is predictive.
imp_kwgs (dict, optional): keyword args for each SeriesImputer.
Default is None, which means default imputer created to match
specific strategy. `imp_kwgs` keys can be either columns or
strategies. If strategies, each column given that strategy is
instantiated with same arguments. When strategy is `default`,
`imp_kwgs` is ignored.
copy (bool, optional): create copy of DataFrame or operate inplace.
Default value is True. Copy created.
verbose (bool, optional): print more information to console.
Default value is False.
seed (int, optional): seed setting for reproducible results.
Defualt is None. No validation, but values should be integer.
"""
BaseImputer.__init__(
self,
strategy=strategy,
imp_kwgs=imp_kwgs,
verbose=verbose,
visit=visit
)
self.strategy = strategy
self.predictors = predictors
self.copy = copy
self.seed = seed
def _fit_strategy_validator(self, X):
"""Private method to validate strategies appropriate for fit.
Checks whether strategies match with type of column they are applied
to. If not, error is raised through `check_strategy_fit` method.
"""
# remove nan columns and store colnames
cols = X.columns.tolist()
self._strats = check_strategy_fit(self.strategy, cols)
self._preds = check_predictors_fit(self.predictors, cols)
def _transform_strategy_validator(self, X):
"""Private method to prep and validate before transformation."""
# initial checks before transformation and check columns are the same
check_is_fitted(self, "statistics_")
X_cols = X.columns.tolist()
fit_cols = set(self._strats.keys())
diff_fit = set(fit_cols).difference(X_cols)
if diff_fit:
err = "Same columns that were fit must appear in transform."
raise ValueError(err)
@check_nan_columns
def fit(self, X, y=None):
"""Fit specified imputation methods to each column within a DataFrame.
The fit method calculates the `statistics` necessary to later
transform a dataset (i.e. perform actual imputations). Inductive
methods calculate statistic on the fit data, then impute new missing
data with that value. Most currently supported methods are inductive.
It's important to note that we have to fit X regardless of whether any
data is missing. Transform step may have missing data if new data is
used, so fit each column that appears in the given strategies.
Args:
X (pd.DataFrame): pandas DataFrame on which imputer is fit.
y (pd.Series, pd.DataFrame Optional): response. Default is None.
Determined interally in fit method. Arg is present to remain
compatible with sklearn Pipelines.
Returns:
self: instance of the SingleImputer class.
Raises:
ValueError: error in specification of strategies. Raised through
`check_strategy_fit`. See its docstrings for more info.
ValueError: error in specification of predictors. Raised through
`check_predictors_fit`. See its docstrings for more info.
"""
# first, prep columns we plan to use and make sure they are valid
self._fit_strategy_validator(X)
self.statistics_ = {}
# header print statement if verbose = true
if self.verbose:
ft = "FITTING IMPUTATION METHODS TO DATA..."
st = "Strategies & Predictors used to fit each column:"
print(f"{ft}\n{st}\n{'-'*len(st)}")
# perform fit on each column, depending on that column's strategy
# note that right now, operations are COLUMN-by-COLUMN, iteratively
if self.seed is not None:
np.random.seed(self.seed)
for column, method in self._strats.items():
imp = self.strategies[method]
imp_params = self._fit_init_params(column, method, self.imp_kwgs)
# try to create an instance of the imputer, given the args
try:
if imp_params is None:
imputer = imp()
else:
imputer = imp(**imp_params)
except TypeError as te:
name = imp.__name__
err = f"Invalid arguments passed to {name} __init__ method."
raise ValueError(err) from te
# print strategies if verbose
if self.verbose:
print(f"Column: {column}, Strategy: {method}")
# identify the column for imputation
ys = X[column]
# the fit depends on what type of strategy we use.
# first, fit univariate methods, which are straightforward.
if method in self.univariate_strategies:
imputer.fit(ys, None)
# now, fit on predictive methods, which are more complex.
if method in self.predictive_strategies:
preds = self._preds[column]
if preds == "all":
xs = X.drop(column, axis=1)
else:
xs = X[preds]
# fit the data on observed values only.
x_, y_ = _get_observed(xs, ys, self.verbose)
# before imputing, need to encode categoricals
x_ = _one_hot_encode(x_)
imputer.fit(x_, y_)
# finally, store imputer for each column as statistics
self.statistics_[column] = imputer
return self
@check_nan_columns
def transform(self, X):
"""Impute each column within a DataFrame using fit imputation methods.
The transform step performs the actual imputations. Given a dataset
previously fit, `transform` imputes each column with it's respective
imputed values from fit (in the case of inductive) or performs new fit
and transform in one sweep (in the case of transductive).
Args:
X (pd.DataFrame): DataFrame to impute (same as fit or new data).
Returns:
X (pd.DataFrame): imputed in place or copy of original.
Raises:
ValueError: same columns must appear in fit and transform.
Raised through _transform_strategy_validator.
"""
# copy the dataset if necessary, then prep predictors
if self.copy:
X = X.copy()
self._transform_strategy_validator(X)
if self.verbose:
trans = "PERFORMING IMPUTATIONS ON DATA BASED ON FIT..."
print(f"{trans}\n{'-'*len(trans)}")
# transformation logic
self.imputed_ = {}
if self.seed is not None:
np.random.seed(self.seed)
for column, imputer in self.statistics_.items():
imp_ix = X[column][X[column].isnull()].index
self.imputed_[column] = imp_ix.tolist()
# print to console for transformation if self.verbose
if self.verbose:
strat = imputer.statistics_["strategy"]
print(f"Transforming {column} with strategy '{strat}'")
if not imp_ix.empty:
print(f"Numer of imputations to perform: {imp_ix.size}")
else:
print(f"No imputations, moving to next column...")
# continue if there are no imputations to make
if imp_ix.empty:
continue
# implement transform logic for univariate
if imputer.strategy in self.univariate_strategies:
x_ = X[column]
# implement transform logic for predictive
if imputer.strategy in self.predictive_strategies:
preds = self._preds[column]
if preds == "all":
x_ = X.drop(column, axis=1)
else:
x_ = X[preds]
# isolate missingness
if isinstance(x_, pd.Series):
x_ = x_.to_frame()
x_ = x_.loc[imp_ix]
else:
x_ = x_.loc[imp_ix, :]
# default univariate impute for missing covariates
mis_cov = pd.isnull(x_).sum()
mis_cov = mis_cov[mis_cov > 0]
if any(mis_cov):
x_m = mis_cov.index
if self.verbose:
print(f"Missing Covariates:\n{mis_cov}\n")
print("Using single imputer for missing covariates...")
for col in x_m:
d = DefaultUnivarImputer()
d_imps = d.fit_impute(x_[col], None)
x_null = x_[col][x_[col].isnull()].index
x_.loc[x_null, col] = d_imps
# handling encoding again for prediction of imputations
x_ = _one_hot_encode(x_)
# perform imputation given the specified imputer and value for x_
X.loc[imp_ix, column] = imputer.impute(x_)
return X
def fit_transform(self, X, y=None):
"""Convenience method to fit then transform the same dataset.
Args:
X (pd.DataFrame): DataFrame used for fit and transform steps.
y (pd.DataFrame, pd.Series, Optional): response. Default is None.
Set internally by `fit` method.
Returns:
X (pd.DataFrame): imputed in place or copy of original.
"""
return self.fit(X, y).transform(X)
| [
"[email protected]"
]
| |
26bc6917e32b1470a9c5e17be693dbd5ee407aea | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2560/60580/313785.py | 220666b21e9e0726571f13ffcbd14a6e29126aca | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 497 | py | size = int(input())
a = 0
while a < size:
b = input()
list = input().split()
num = int(input())
i = 0
d = {}
while i < len(list):
if (list[i] in d):
d[list[i]] = d[list[i]] + 1
else:
d[list[i]] = 1
i = i + 1
dict = sorted(d.items(), key=lambda item: item[1])
h = 0
for key, value in d.items():
if (num >= value):
num = num - value
else:
h = h + 1
print(h)
a = a + 1 | [
"[email protected]"
]
| |
cc3722d0a6ada9953627c61b49f3cfed650d7810 | cb1d6bd2bf5edb6e38a9094b6a003f8c8d6c01e8 | /carrent/myapp/migrations/0001_initial.py | d3efa695ec10d0db57d17a7ac52e39711a0794cf | []
| no_license | wasit7/cs459_2018s | b3d8444f2c697c1211ba949634e132af1692f7bd | dee7edcefd0964e342bf958bf91314e3c27b4be6 | refs/heads/master | 2020-04-19T15:20:43.543867 | 2019-02-20T05:01:39 | 2019-02-20T05:01:39 | 168,271,186 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,074 | py | # Generated by Django 2.1.5 on 2019-02-20 04:39
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Car',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('brand', models.CharField(max_length=10)),
('price', models.DecimalField(decimal_places=2, max_digits=10)),
('purchasing_date', models.DateField()),
],
),
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('firstname', models.CharField(max_length=20)),
('lastname', models.CharField(max_length=20)),
('dob', models.DateField()),
('tel', models.CharField(max_length=10)),
],
),
]
| [
"[email protected]"
]
| |
d7a4a0f9aeec15da14a66873cdfff142ccbe3ea8 | 70e79590cd66ba1cd37ff977890164baa3d5c53c | /blog/migrations/0002_blogpage.py | cb87b9ef8a74366436525193a302f6efca545ccb | []
| no_license | thibaudcolas/codersofcolour | 7206c18f9c0e31f206f7d1dccaef2df1e46f5ecf | b085d5ec14c08d04ab2439f0a79730996cb87785 | refs/heads/master | 2022-12-02T15:06:20.984555 | 2020-06-10T18:06:02 | 2020-06-10T18:06:02 | 270,047,484 | 0 | 0 | null | 2020-07-26T14:16:37 | 2020-06-06T17:06:10 | Python | UTF-8 | Python | false | false | 963 | py | # Generated by Django 3.0.7 on 2020-06-06 16:42
from django.db import migrations, models
import django.db.models.deletion
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0045_assign_unlock_grouppagepermission'),
('blog', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='BlogPage',
fields=[
('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')),
('date', models.DateField(verbose_name='Post date')),
('intro', models.CharField(max_length=250)),
('body', wagtail.core.fields.RichTextField(blank=True)),
],
options={
'abstract': False,
},
bases=('wagtailcore.page',),
),
]
| [
"[email protected]"
]
| |
cb6eb73063e7f44c86ad29a66545b4d90f775ce6 | ae3d0e3c2fb614d96f6c787583c6e2e4cb654ad4 | /leetcode/118_generate.py | b1346f72c2348527aadf663326c7f5841837c9c0 | []
| no_license | Cjz-Y/shuati | 877c3f162ff75f764aa514076caccad1b6b43638 | 9ab35dbffed7865e41b437b026f2268d133357be | refs/heads/master | 2023-02-02T10:34:05.705945 | 2020-12-14T01:41:39 | 2020-12-14T01:41:39 | 276,884,136 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 590 | py | from typing import List
class Solution:
def generate(self, numRows: int) -> List[List[int]]:
result = []
if numRows == 0:
return result
result.append([1])
for i in range(1, numRows):
temp_array = []
for j in range(i + 1):
temp_int = 0
if j - 1 >= 0:
temp_int += result[i - 1][j - 1]
if j < i:
temp_int += result[i - 1][j]
temp_array.append(temp_int)
result.append(temp_array)
return result
| [
"[email protected]"
]
| |
6bb690802c91855dd0f6fa1add90a9b09c97c432 | 1e53216c58f3c7843031721305590b83dbaed3f2 | /week_four/form_practice/form_app/views.py | c648c4ca3b1982be30485cd36518e4f4e2084c59 | []
| no_license | MTaylorfullStack/python_july_20 | 991852ba12d6f06d6b93b8efc60b66ee311b5cb3 | bdfb0d9a74300f2d6743ac2d108571692ca43ad9 | refs/heads/master | 2022-12-12T18:03:00.886048 | 2020-08-27T23:53:31 | 2020-08-27T23:53:31 | 277,956,745 | 2 | 2 | null | 2023-06-30T20:06:11 | 2020-07-08T01:09:34 | Python | UTF-8 | Python | false | false | 1,469 | py | from django.shortcuts import render, redirect
import random
def index(request):
return render(request, 'form.html')
def success(request):
if "gold" not in request.session:
request.session['gold'] = 0
return render(request, "result.html")
def process(request):
print(request.POST['user_name'])
request.session['name'] = request.POST['user_name']
request.session['loc'] = request.POST['location']
request.session['lang'] = request.POST['fav_lang']
request.session['comment'] = request.POST['comment']
return redirect('/success')
def process_gold(request):
print(request.POST)
# add to some gold variable that we can view on the templates
# need to know how much gold to add, which form was submitted
if "farm" in request.POST:
gold = int(random.random() * 10 + 10)
request.session['gold'] += gold
if "cave" in request.POST:
gold = int(random.random() * 5 + 5)
request.session['gold'] += gold
if "house" in request.POST:
gold = int(random.random() * 3 + 2)
request.session['gold'] += gold
if "casino" in request.POST:
gold = int(random.random() * 50)
if int(random.random()*10) > 5:
# gain
request.session['gold'] += gold
else:
# lose
request.session['gold'] -= gold
return redirect('/success')
def reset(request):
request.session.flush()
return redirect('/')
| [
"[email protected]"
]
| |
b1c59fe04d1ba3b07bb1c5709673c2d7f7d2fd61 | 8eab8ab725c2132bb8d090cdb2d23a5f71945249 | /virt/Lib/site-packages/pygments/lexers/gsql.py | 6af99b27ce040871edb2972c020afb40d527b0bb | [
"MIT"
]
| permissive | JoaoSevergnini/metalpy | 6c88a413a82bc25edd9308b8490a76fae8dd76ca | c2d0098a309b6ce8c756ff840bfb53fb291747b6 | refs/heads/main | 2023-04-18T17:25:26.474485 | 2022-09-18T20:44:45 | 2022-09-18T20:44:45 | 474,773,752 | 3 | 1 | MIT | 2022-11-03T20:07:50 | 2022-03-27T22:21:01 | Python | UTF-8 | Python | false | false | 3,772 | py | """
pygments.lexers.gsql
~~~~~~~~~~~~~~~~~~~~
Lexers for TigerGraph GSQL graph query language
:copyright: Copyright 2006-2022 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups, using, this, words
from pygments.token import Keyword, Punctuation, Comment, Operator, Name,\
String, Number, Whitespace, Token
__all__ = ["GSQLLexer"]
class GSQLLexer(RegexLexer):
"""
For GSQL queries (version 3.x).
.. versionadded:: 2.10
"""
name = 'GSQL'
url = 'https://docs.tigergraph.com/dev/gsql-ref'
aliases = ['gsql']
filenames = ['*.gsql']
flags = re.MULTILINE | re.IGNORECASE
tokens = {
'root': [
include('comment'),
include('keywords'),
include('clauses'),
include('accums'),
include('relations'),
include('strings'),
include('whitespace'),
include('barewords'),
include('operators'),
],
'comment': [
(r'\#.*', Comment.Single),
(r'/\*(.|\n)*?\*/', Comment.Multiline),
],
'keywords': [
(words((
'ACCUM', 'AND', 'ANY', 'API', 'AS', 'ASC', 'AVG', 'BAG', 'BATCH', 'BETWEEN', 'BOOL', 'BOTH',
'BREAK', 'BY', 'CASE', 'CATCH', 'COALESCE', 'COMPRESS', 'CONTINUE', 'COUNT',
'CREATE', 'DATETIME', 'DATETIME_ADD', 'DATETIME_SUB', 'DELETE', 'DESC', 'DISTRIBUTED', 'DO',
'DOUBLE', 'EDGE', 'ELSE', 'END', 'ESCAPE', 'EXCEPTION', 'FALSE', 'FILE', 'FILTER', 'FLOAT', 'FOREACH', 'FOR',
'FROM', 'GRAPH', 'GROUP', 'GSQL_INT_MAX', 'GSQL_INT_MIN', 'GSQL_UINT_MAX', 'HAVING', 'IF',
'IN', 'INSERT', 'INT', 'INTERPRET', 'INTERSECT', 'INTERVAL', 'INTO', 'IS', 'ISEMPTY', 'JSONARRAY', 'JSONOBJECT', 'LASTHOP',
'LEADING', 'LIKE', 'LIMIT', 'LIST', 'LOAD_ACCUM', 'LOG', 'MAP', 'MATCH', 'MAX', 'MIN', 'MINUS', 'NOT',
'NOW', 'NULL', 'OFFSET', 'OR', 'ORDER', 'PATH', 'PER', 'PINNED', 'POST_ACCUM', 'POST-ACCUM', 'PRIMARY_ID', 'PRINT',
'QUERY', 'RAISE', 'RANGE', 'REPLACE', 'RESET_COLLECTION_ACCUM', 'RETURN', 'RETURNS', 'RUN', 'SAMPLE', 'SELECT', 'SELECT_VERTEX',
'SET', 'SRC', 'STATIC', 'STRING', 'SUM', 'SYNTAX', 'TARGET', 'TAGSTGT', 'THEN', 'TO', 'TO_CSV', 'TO_DATETIME', 'TRAILING', 'TRIM', 'TRUE',
'TRY', 'TUPLE', 'TYPEDEF', 'UINT', 'UNION', 'UPDATE', 'VALUES', 'VERTEX', 'WHEN', 'WHERE', 'WHILE', 'WITH'), prefix=r'(?<!\.)', suffix=r'\b'), Token.Keyword)
],
'clauses': [
(words(('accum', 'having', 'limit', 'order', 'postAccum', 'sample', 'where')), Name.Builtin)
],
'accums': [
(words(('andaccum', 'arrayaccum', 'avgaccum', 'bagaccum', 'bitwiseandaccum',
'bitwiseoraccum', 'groupbyaccum', 'heapaccum', 'listaccum', 'MapAccum',
'maxaccum', 'minaccum', 'oraccum', 'setaccum', 'sumaccum')), Name.Builtin),
],
'relations': [
(r'(-\s?)(\(.*\:\w?\))(\s?-)', bygroups(Operator, using(this), Operator)),
(r'->|<-', Operator),
(r'[.*{}\[\]\<\>\_]', Punctuation),
],
'strings': [
(r'"([^"\\]|\\.)*"', String),
(r'@{1,2}\w+', Name.Variable),
],
'whitespace': [
(r'\s+', Whitespace),
],
'barewords': [
(r'[a-z]\w*', Name),
(r'(\d+\.\d+|\d+)', Number),
],
'operators': [
(r'\$|[^0-9|\/|\-](\-\=|\+\=|\*\=|\\\=|\=|\=\=|\=\=\=|\+|\-|\*|\\|\+\=|\>|\<)[^\>|\/]', Operator),
(r'(\||\(|\)|\,|\;|\=|\-|\+|\*|\/|\>|\<|\:)', Operator),
],
}
| [
"[email protected]"
]
| |
d87f498e4a7d959b5fc54f1785b26a4afec1578f | a8c95f5152c08b487c3f85246150f9f7cdd557e0 | /torabot/frontend/admin/auth.py | 1e1558234cd037fcc769c44fbecd2bb02d103592 | [
"MIT"
]
| permissive | sorunis/torabot | b58113adab85e78551095e8f4551b0bbaf48e8f1 | 6d1a0a524f184cc33c5dfb3d7fc5e95af791a018 | refs/heads/master | 2020-12-11T07:19:54.269478 | 2014-05-04T18:15:54 | 2014-05-04T18:15:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 343 | py | from functools import wraps
from ...core.local import is_admin
from ..auth import require_session
from .errors import AdminAuthError
def require_admin(f):
@require_session
@wraps(f)
def inner(user_id, *args, **kargs):
if not is_admin:
raise AdminAuthError()
return f(*args, **kargs)
return inner
| [
"[email protected]"
]
| |
fa66b83ea082481837e05a8ceeb01bb4f447c3b4 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02700/s830417832.py | 15b00c59d92d438ff9c194f96ea1df0e3dd49e2a | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 248 | py | a, b, c, d = map(int,input().split())
for i in range(100):
if c > b:
c -= b
elif c <= b:
print("Yes")
exit()
if a > d:
a -= d
elif a <= d:
print("No")
exit() | [
"[email protected]"
]
| |
9fb01516e4f8787ceff10bc29939e2893ff1d0d4 | 47596e586b3e21b31cf360be7cd1c7d3a5dc6163 | /Mode/TaxiExperiment.py | 9390b61dc4394538c8b4af40453059b67d5122ba | []
| no_license | jasonlingo/RoadSafety | bfef06abe0668a9cb8ead5b183008a53eabdefa2 | b20af54b915daf7635204e3b942b3ae4624887d7 | refs/heads/master | 2021-03-19T13:51:13.736277 | 2015-09-17T03:49:43 | 2015-09-17T03:49:43 | 36,019,601 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,514 | py | import sys
import os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from config import OUTPUT_DIRECTORY
from Util.kml import KmzParser
from Map.MapMatrix import MapMatrix
from Google.Direction import getDirection
from Google.Road import getRoadGPS
from Entity.Taxi import Taxi
from Entity.Crash import Crash
from Entity.Hospital import Hospital
from GPS.GPSPoint import GPSPoint
from File.Directory import createDirectory
from random import uniform
from time import sleep
import pygmaps
import webbrowser
class TaxiExperiment:
"""
This program is for Taxi-based EMS experiment.
When a crash happens, this system will find the taxi that
can arrive the crash's location with the shortest time
among all the taxis in the region. Then, the taxi will
send the patient from the crash's location to the nearest
hospital.
This system will record the traffic time for all patients
to be sent to a hospital and calculate the average traffic
time.
"""
def __init__(self, region_filename):
"""
Construct an experiment.
Args:
(String) region_filename: the location of the region
file from Google MAP kmz file
"""
# Parse a the region stored in a kmz file and get its GPS data stored
# in GPSPoint linked list.
self.region = KmzParser(region_filename)
# Create a MapMatrix used to store useful information for this experiment.
self.Map = MapMatrix(self.region)
# (Taxi) the location of all taxis.
taxis = None
# (Crash) the location of car creahes .
crashes = None
# (GPSPoint) list of hospital.
hospitals = None
# Send-to-hospital record, for average traffic time use.
sendHistory = []
def addHospital(self, hospital_filename):
"""
Add hospitals' locations to this region.
Args:
(String) hospital_filename: the locations of hospitals
to be added into the map
"""
# Parse the hospitals' GPS location from a kmz file and get their GPS
# linked list (GPSPoint).
self.hospitals = KmzParser(hospital_filename)
pointer = self.hospitals
while pointer != None:
# Find the sub-area that contains the GPS location of this hospital.
area = self.Map.findArea(pointer)
# Create a hospital object.
hos = Hospital(pointer.lat, pointer.lng)
# Add this hospital to the area.
area.addHospital(hos)
# Next hospital.
pointer = pointer.next
def addTaxi(self, taxis_filename):
"""
Add taxis according to the given locations.
Args:
(String) taxis_filename: the location of a list of all
the taxis in the region
"""
# Parse the locations of taxis stored in a kmz file and get their GPS
# location in a linked list (GPSPoint).
newTaxis = KmzParser(taxis_filename)
# Add each taxi to its area
pointer = newTaxis
while pointer != None:
# Find the sub-area that contains the GPS location of this taxi.
area = self.Map.findArea(pointer)
# Create a taxi object.
taxi = Taxi(pointer.lat, pointer.lng)
# Add this taxi to the area.
area.addTaxi(taxi)
# Next taxi.
pointer = pointer.next
if self.taxis != None:
# There already are taxis in this experiment region, so attach the new taxis
# to the tail of the taxis linked list
pointer = self.taxis.getTail()
pointer.next = newTaxis
else:
# There is no taxi in thie experiemnt region, so just replace the linked list
self.taxis = newTaxis
def addRandomTaxi(self, num):
"""
Add taxis at random location in the region.
Args:
(int) num: the number of taxis to be added.
"""
if self.taxis != None:
# If this region already has taxis, append the new taxi
# to the tail of the taxi list.
pointer = self.taxis.getTail()
while num > 0:
# Randomly create taxi's location.
# uniform(a, b) will gererate a number between a and b in
# a uniform distribution manner.
lat = uniform(self.Map.bottom, self.Map.top)
lng = uniform(self.Map.left, self.Map.right)
# create the taxi's GPS point.
taxiGPS = GPSPoint(lat, lng)
# Sometimes the genreated location is not on a road.
# If we want the location to be on a road, perform
# getRoadGPS() to get nearest road's GPS to that random
# location.
taxiGPS = getRoadGPS(taxiGPS)
# Check whether the taxi's GPS location is in the region.
# If the location is in the region, then add this taxi to
# this experiment.
if self.Map.isInnerPoint(taxiGPS):
num -= 1;
# Find the sub-area that contains the GPS location of this taxi.
area = self.Map.findArea(taxiGPS)
# Create two identical taxi objects in order to prevent "pass by reference".
taxi = Taxi(taxiGPS.lat, taxiGPS.lng)
taxi2 = Taxi(taxiGPS.lat, taxiGPS.lng)
taxi.next = None
area.addTaxi(taxi)
# Add this taxi to the taxis linked list.
if self.taxis == None:
self.taxis = taxi2
pointer = self.taxis
else:
pointer.next = taxi2
pointer = pointer.next
def addCrash(self, crash_filename):
"""
Add crashes according to the given locations.
Args:
(String) crash_filename: the file stores the locations
of crashes.
"""
# Parse the locations of crashes stored in a kmz file, and get
# their GPS data in a linked list (GPSPoint).
newCrashes = KmzParser(crash_filename)
# Add each crash to its area.
pointer = self.crashes
while pointer != None:
# Find the sub-area that contains the GPS location of this crash.
area = self.Map.findArea(pointer)
# Create a crash object.
crash = Crash(pointer.lat, pointer.lng)
# Add this crash to this area.
area.addCrash(crash)
# Next crash.
pointer = pointer.next
if self.crashes != None:
# There already are crashes in this experiment region, so attach
# the new crashes to the tail of the crashes linked list.
pointer = self.crashes.getTail()
pointer.next = newCrashes
else:
# There is no taxi in thie experiemnt region, so just replace
# the linked list.
self.taxis = newCrashes
def addRandomCrash(self, num):
"""
Add taxis at random locations in the region.
Args:
(int) num: the number of taxis to add.
"""
if self.crashes != None:
pointer = self.crashes.getTail()
while num > 0:
# Create random crash's location
lat = uniform(self.Map.bottom, self.Map.top)
lng = uniform(self.Map.left, self.Map.right)
# Create the crash's GPS point.
crashGPS = GPSPoint(lat, lng)
# Sometimes the genreated location is not on a road.
# If we want the location to be on a road, perform
# getRoadGPS() to get nearest road's GPS to that random
# location.
#crashGPS = getRoadGPS(crashGPS)
# Check whether the taxi's GPS is in the region.
# If the location is in the region, then add this crash to
# this experiment.
if self.Map.isInnerPoint(crashGPS):
num -= 1;
# Find the sub-area that contains the GPS location of this crash.
area = self.Map.findArea(crashGPS)
# Create two identical taxi objects in order to prevent "pass by reference".
crash = Crash(crashGPS.lat, crashGPS.lng)
crash2 = Crash(crashGPS.lat, crashGPS.lng)
crash.next = None
area.addCrash(crash)
# Add this crash to the crashes linked list.
if self.crashes == None:
self.crashes = crash2
pointer = self.crashes
else:
pointer.next = crash2
pointer = pointer.next
def sendPatients(self):
"""
Send every patient to hospitals.
"""
if self.crashes == None:
# No patient
return
# Print every patient's location on screen.
self.crashes.printNode()
pointer = self.crashes
while pointer != None:
# Send every patient to the nearest hospital.
self.sendToHospital(pointer)
pointer = pointer.next
def sendToHospital(self, crash):
"""
Send people to hospital from the crash's location by a Taxi.
Args:
(Crash) crash: the crash event.
"""
# Find the sub-area that contains this crash.
area = self.Map.findArea(crash)
if area == None:
# The system cannot find a sub-area that contains this crash.
print "This crash has a wrong location!!"
return
# Get the row and column number of the found area.
row = area.row
col = area.col
print "A crash happened in area[%d, %d]" % (row, col)
# The maximal number of row and column of this MapMatrix.
maxRow = len(self.Map.areas)
maxCol = len(self.Map.areas[0])
# Check ranges. If the row and column number are larger than the
# maximal row and column number of this mapMatrix, then stop this
# process.
if row > maxRow or col > maxCol:
print "This crash has a wrong location!!"
return
# Start to check from current location and its 8 neighbor sub-areas,
# a 3x3 grids centered at the current location, and expand the range
# until a taxi or hospital is found.
# When four flags (reachTop, reachBottom, reachRight, reachLeft)
# are all True, stop the checking process because all the grids have
# already been checked.
reachTop = False
reachBottom = False
reachRight = False
reachLeft = False
# The data set for tracking the nearest taxi.
foundTaxi = False
shortestTime = float("inf")
nearestTaxi = None
nearestDirection = None
# The GPS string (lat,lng) of this crash. Used in getDirection().
destination = str(crash.lat) + "," + str(crash.lng)
# Start to find the nearest taxi.
i = 0
while not (reachTop and reachBottom and reachRight and reachLeft):
for j in xrange(row - i, row + i + 1):
for k in xrange(col - i, col + i + 1):
if abs(row - j) < i and abs(col - k) < i:
# Those grids have already been checked, so skip to the next loop.
continue
if self.Map.hasTaxi(j,k):
# Get the taxis linked list of this sub-area.
taxi = self.Map.areas[j][k].taxis
while taxi != None:
if taxi.available:
# This taxi is available, so mark foundTaxi to True.
foundTaxi = True
# Get a direction from the taxi's location to
# the crash's location using Google Direciton API.
source = str(taxi.lat) + "," + str(taxi.lng)
direction = getDirection(source, destination)
duration = float("inf")
if direction != None:
duration = direction.getTotalDuration()
if duration < shortestTime:
# Update the nearest taxi's data.
shortestTime = duration
nearestTaxi = taxi
nearestDirection = direction
taxi = taxi.next
# If foundTaxi == True and i > 0, then we will assign the found taxi
# to this crash.
# i must be larger than 0 because we want to check at least 9 grids
# centered at the crash's location.
if foundTaxi and i > 0:
# Mark this taxi as non-empty.
nearestTaxi.available = False
# Update the taxi's GPS location to the crash's location.
nearestTaxi.lat = crash.lat
nearestTaxi.lng = crash.lng
# Market this patient as saved.
crash.isSaved = True
# Print the traffic time for the taxi to arrive the crash's
# location on screen.
minute, second = nearestDirection.getDurationMS()
print "The time for a nearest taxi to arrive this crash's location is "\
"%dmins %dsec" % (minute, second)
# Make the taxi to find the nearest hospital by get and compare
# the traffic time of each direction to different hospital.
HospitalDirection = nearestTaxi.toNearestHospital(self.hospitals)
minute, second = HospitalDirection.getDurationMS()
print "Sending this patient to the nearest hospital needs------------ "\
"%dmins %dsec" % (minute, second)
# Concatenate the two directions so that it become a direction from the
# taxi's original location to the crash's then to the hospital's.
tail = nearestDirection.getTail()
tail.next = HospitalDirection
# Get the total traffic time.
minute, second = nearestDirection.getDurationMS()
print "Total time---------------------------------------------------- "\
"%dmins %dsec" % (minute, second)
# Append this direction to the sendHistory list for calculating the
# average traffic time.
self.sendHistory.append(nearestDirection)
break
i += 1
# Check whether any side of this region has been reached.
if reachBottom == False and row + i >= maxRow:
reachBottom = True
if reachTop == False and row - i < 0:
reachTop = True
if reachRight == False and col + i >= maxCol:
reachRight = True
if reachLeft == False and col - i < 0:
reachLeft = True
def showMap(self):
"""
Show the experiment result on Google map, including hospitals,
taxis, crashes, and taxis' route.
"""
# Set the center point of this map.
midLat = (self.Map.top + self.Map.bottom) / 2.0
midLng = (self.Map.left + self.Map.right) / 2.0
mymap = pygmaps.maps(midLat, midLng, 10)
# Add region line (border).
mymap.addpath(self.region.toList(), "#FF0000") #red
# Add rectangle lines.
rectangle = [(self.Map.top, self.Map.left),
(self.Map.top, self.Map.right),
(self.Map.bottom, self.Map.right),
(self.Map.bottom, self.Map.left),
(self.Map.top, self.Map.left)]
mymap.addpath(rectangle, "#000000") #black
# Get the length of the side of each sub-area.
latDiff = self.Map.latDiff
lngDiff = self.Map.lngDiff
# Add vertical lines.
lng = self.Map.left
while lng <= self.Map.right: # From left to right.
line = [(self.Map.top, lng),
(self.Map.bottom, lng)]
mymap.addpath(line, "#000000") #black
lng += lngDiff
# Add last vertical line, using the residual length.
if lng - lngDiff < self.Map.right:
line = [(self.Map.top, self.Map.right),
(self.Map.bottom, self.Map.right)]
mymap.addpath(line, "#000000") #black
# Add horizontal lines.
lat = self.Map.top
while lat >= self.Map.bottom: # From top to bottom.
line = [(lat, self.Map.left),
(lat, self.Map.right)]
mymap.addpath(line, "#000000") #black
lat -= latDiff
# Add last horizontal line, using the residual length.
if lat + latDiff > self.Map.bottom:
line = [(self.Map.bottom, self.Map.left),
(self.Map.bottom, self.Map.right)]
mymap.addpath(line, "#000000") #black
# Add taxis' locations.
pointer = self.taxis
while pointer != None:
mymap.addpoint(pointer.lat, pointer.lng, "#0000FF") #blue
pointer = pointer.next
# Add crashes' locations.
pointer = self.crashes
while pointer != None:
mymap.addpoint(pointer.lat, pointer.lng, "#00FF00") #green
pointer = pointer.next
# Add hospitals' locations.
pointer = self.hospitals
while pointer != None:
mymap.addpoint(pointer.lat, pointer.lng, "#FF0000") #green
pointer = pointer.next
# Add taxi routes.
totalDuration = 0
if len(self.sendHistory) > 0:
for direction in self.sendHistory:
mymap.addpath(direction.toList(), "#0000FF") #blue
totalDuration += direction.getTotalDuration()
# Calculate average traffic time.
i = len(self.sendHistory)
if i > 0:
avgDuration = totalDuration / i
sec = avgDuration % 60
mins = (avgDuration - sec) / 60
print "Average traffic time: %dmins %dsec" % (mins, sec)
# The output directory.
output_directory = OUTPUT_DIRECTORY + "Taxi_based_EMS/"
# Check whether the output directory exists. If not, create the directory.
createDirectory(output_directory)
# The file name of the result map.
mapFilename = output_directory + "map.html"
# Draw the map.
mymap.draw('./' + mapFilename)
# Open the map file on a web browser.
url = "file://" + os.getcwd() + "/" + mapFilename
webbrowser.open_new(url)
| [
"[email protected]"
]
| |
4a86fdc37d06384976bba51986fb40e2f225bded | a62390c9497363a9afbaac79336b3e1cb04e096e | /Day 23/turtle_crossing_game.py | dee8b9aae7eb48119d8fb87fe7a405c05db3634d | []
| no_license | PravinSelva5/100-Days-of-Code | 2cf3ae9766bdfd053532823214276e291024d5a2 | 3a299af3de5f4f0cab05fc73563df29e3c292560 | refs/heads/master | 2023-07-13T06:29:15.360016 | 2021-08-22T16:50:49 | 2021-08-22T16:50:49 | 380,812,653 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 828 | py | import time
from turtle import Screen
from player import Player
from car_manager import CarManager
from scoreboard import Scoreboard
screen = Screen()
screen.setup(width=600, height=600)
screen.tracer(0)
player = Player()
car_manager = CarManager()
scoreboard = Scoreboard()
screen.listen()
screen.onkey(player.go_up, "Up")
game_is_on = True
while game_is_on:
time.sleep(0.1)
screen.update()
car_manager.create_car()
car_manager.move_cars()
# Detect collision with car
for car in car_manager.all_cars:
if car.distance(player) < 20:
game_is_on = False
scoreboard.game_over()
# Detect a successful crossing
if player.is_at_finish_line():
player.go_to_start()
car_manager.next_level()
scoreboard.increase_level()
screen.exitonclick()
| [
"[email protected]"
]
| |
0d89b4421f4197551bd71933e7e38fc0a07c5a69 | 012837eafe45c8f7ee5fc77d4c4d7725d5314c5c | /workshops/9-section/5-clazz.py | 387fdf5912dcfa0b0f19553d2a5d7c3b235a1fe0 | [
"MIT"
]
| permissive | ai-erorr404/opencv-practice | e9408cf006779a678cf3a30fc60e9dbeb3c8e493 | 60ef5e4aec61ee5f7e675fb919e8f612e59f664a | refs/heads/master | 2021-02-08T11:17:04.763522 | 2020-02-22T09:43:04 | 2020-02-22T09:43:04 | 244,146,060 | 1 | 1 | MIT | 2020-03-01T12:35:02 | 2020-03-01T12:35:01 | null | UTF-8 | Python | false | false | 2,927 | py | #!/usr/bin/env python3
# -*- coding=utf-8 -*-
import cv2 as cv
from goto import with_goto
import math
import numpy as np
"""
KLT光流跟踪法二:
静止点删除与跟踪轨迹绘制。处置流程为 输入第一帧图像 -> 特征点检测 -> 保持特征点 -> 输入第二帧图像(开始跟踪) -> 跟踪特征点 -> 删除
损失特征点 -> 保存跟踪特征点 -> 用第二帧图像替换第一帧图像 -> 用后续输入帧替换第二帧 -> 选择新的特征点替换损失的特征点 -> 保存特征点数据
并回到输入第二帧图像,开始循环。
"""
MAX_CORNERS = 100
features_params = dict(maxCorners=MAX_CORNERS, qualityLevel=0.01, minDistance=10, blockSize=3, mask=None)
lk_params = dict(nextPts=None, winSize=(31, 31), maxLevel=3,
criteria=(cv.TERM_CRITERIA_EPS | cv.TERM_CRITERIA_COUNT, 30, 0.01))
color_set = np.random.randint(0, 255, (MAX_CORNERS, 3))
# points = []
@with_goto
def main():
capture = cv.VideoCapture("../../../raspberry-auto/pic/vtest.avi")
ret, frame = capture.read()
if True is not ret:
print("can't read any video")
goto .end
prv_frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
prv_frame = cv.medianBlur(prv_frame, 3)
prv_corners = cv.goodFeaturesToTrack(prv_frame, **features_params)
# points += prv_corners
while True:
ret, frame = capture.read()
if True is not ret:
print("can't read next frame.")
break
next_frame = cv.cvtColor(frame, cv.COLOR_BGR2GRAY)
next_frame = cv.medianBlur(next_frame, 3)
next_corners, status, err = cv.calcOpticalFlowPyrLK(prv_frame, next_frame, prv_corners, **lk_params)
old_pts = prv_corners[1 == status]
new_pts = next_corners[1 == status]
for i, (older, newer) in enumerate(zip(old_pts, new_pts)):
a, b = older.ravel()
c, d = newer.ravel()
width = math.pow(abs(a - c), 2)
height = math.pow(abs(b - d), 2)
hypotenuse = math.sqrt(width + height)
if 2 < hypotenuse:
cv.circle(frame, (c, d), 5, color_set[i].tolist(), -1)
cv.line(frame, (c, d), (a, b), color_set[i].tolist(), 2, cv.LINE_8)
# else:
# new_pts.remove(older)
# if 40 > len(new_pts):
# next_corners, status, err = cv.calcOpticalFlowPyrLK(prv_frame, next_frame, prv_corners, **lk_params)
# new_pts = next_corners[1 == status]
cv.imshow("frame", frame)
key = cv.waitKey(30) & 0xff
if 27 == key:
break
# 更新前一帧的内容
prv_frame = next_frame.copy()
prv_corners = new_pts.reshape(-1, 1, 2)
label .end
capture.release()
cv.destroyAllWindows()
if "__main__" == __name__:
main()
| [
"[email protected]"
]
| |
a35891111803780d3dcda76b80534ba76ce26d0e | b87ea98bc166cade5c78d246aeb0e23c59183d56 | /samples/openapi3/client/petstore/python/petstore_api/paths/store_order/post.py | d0b0ddb2402df4eb56c081be1fa45e79a4cb2ea7 | [
"Apache-2.0"
]
| permissive | holisticon/openapi-generator | 88f8e6a3d7bc059c8f56563c87f6d473694d94e5 | 6a67551ea54a1aa9a49eb48ee26b4e9bb7fb1272 | refs/heads/master | 2023-05-12T02:55:19.037397 | 2023-04-14T08:31:59 | 2023-04-14T08:31:59 | 450,034,139 | 1 | 0 | Apache-2.0 | 2022-01-20T09:34:14 | 2022-01-20T09:34:13 | null | UTF-8 | Python | false | false | 11,837 | py | # coding: utf-8
"""
Generated by: https://openapi-generator.tech
"""
from dataclasses import dataclass
import typing_extensions
import urllib3
from urllib3._collections import HTTPHeaderDict
from petstore_api import api_client, exceptions
from datetime import date, datetime # noqa: F401
import decimal # noqa: F401
import functools # noqa: F401
import io # noqa: F401
import re # noqa: F401
import typing # noqa: F401
import typing_extensions # noqa: F401
import uuid # noqa: F401
import frozendict # noqa: F401
from petstore_api import schemas # noqa: F401
from petstore_api.model.order import Order
from . import path
# body param
SchemaForRequestBodyApplicationJson = Order
request_body_order = api_client.RequestBody(
content={
'application/json': api_client.MediaType(
schema=SchemaForRequestBodyApplicationJson),
},
required=True,
)
SchemaFor200ResponseBodyApplicationXml = Order
SchemaFor200ResponseBodyApplicationJson = Order
@dataclass
class ApiResponseFor200(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: typing.Union[
SchemaFor200ResponseBodyApplicationXml,
SchemaFor200ResponseBodyApplicationJson,
]
headers: schemas.Unset = schemas.unset
_response_for_200 = api_client.OpenApiResponse(
response_cls=ApiResponseFor200,
content={
'application/xml': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationXml),
'application/json': api_client.MediaType(
schema=SchemaFor200ResponseBodyApplicationJson),
},
)
@dataclass
class ApiResponseFor400(api_client.ApiResponse):
response: urllib3.HTTPResponse
body: schemas.Unset = schemas.unset
headers: schemas.Unset = schemas.unset
_response_for_400 = api_client.OpenApiResponse(
response_cls=ApiResponseFor400,
)
_status_code_to_response = {
'200': _response_for_200,
'400': _response_for_400,
}
_all_accept_content_types = (
'application/xml',
'application/json',
)
class BaseApi(api_client.Api):
@typing.overload
def _place_order_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: typing_extensions.Literal["application/json"] = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def _place_order_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def _place_order_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
skip_deserialization: typing_extensions.Literal[True],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def _place_order_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def _place_order_oapg(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = 'application/json',
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
"""
Place an order for a pet
:param skip_deserialization: If true then api_response.response will be set but
api_response.body and api_response.headers will not be deserialized into schema
class instances
"""
used_path = path.value
_headers = HTTPHeaderDict()
# TODO add cookie handling
if accept_content_types:
for accept_content_type in accept_content_types:
_headers.add('Accept', accept_content_type)
if body is schemas.unset:
raise exceptions.ApiValueError(
'The required body parameter has an invalid value of: unset. Set a valid value instead')
_fields = None
_body = None
serialized_data = request_body_order.serialize(body, content_type)
_headers.add('Content-Type', content_type)
if 'fields' in serialized_data:
_fields = serialized_data['fields']
elif 'body' in serialized_data:
_body = serialized_data['body']
response = self.api_client.call_api(
resource_path=used_path,
method='post'.upper(),
headers=_headers,
fields=_fields,
body=_body,
stream=stream,
timeout=timeout,
)
if skip_deserialization:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
else:
response_for_status = _status_code_to_response.get(str(response.status))
if response_for_status:
api_response = response_for_status.deserialize(response, self.api_client.configuration)
else:
api_response = api_client.ApiResponseWithoutDeserialization(response=response)
if not 200 <= response.status <= 299:
raise exceptions.ApiException(
status=response.status,
reason=response.reason,
api_response=api_response
)
return api_response
class PlaceOrder(BaseApi):
# this class is used by api classes that refer to endpoints with operationId fn names
@typing.overload
def place_order(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: typing_extensions.Literal["application/json"] = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def place_order(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def place_order(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
skip_deserialization: typing_extensions.Literal[True],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def place_order(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def place_order(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = 'application/json',
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
return self._place_order_oapg(
body=body,
content_type=content_type,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
class ApiForpost(BaseApi):
# this class is used by api classes that refer to endpoints by path and http method names
@typing.overload
def post(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: typing_extensions.Literal["application/json"] = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def post(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: typing_extensions.Literal[False] = ...,
) -> typing.Union[
ApiResponseFor200,
]: ...
@typing.overload
def post(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
skip_deserialization: typing_extensions.Literal[True],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
) -> api_client.ApiResponseWithoutDeserialization: ...
@typing.overload
def post(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = ...,
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = ...,
) -> typing.Union[
ApiResponseFor200,
api_client.ApiResponseWithoutDeserialization,
]: ...
def post(
self,
body: typing.Union[SchemaForRequestBodyApplicationJson,],
content_type: str = 'application/json',
accept_content_types: typing.Tuple[str] = _all_accept_content_types,
stream: bool = False,
timeout: typing.Optional[typing.Union[int, typing.Tuple]] = None,
skip_deserialization: bool = False,
):
return self._place_order_oapg(
body=body,
content_type=content_type,
accept_content_types=accept_content_types,
stream=stream,
timeout=timeout,
skip_deserialization=skip_deserialization
)
| [
"[email protected]"
]
| |
b7bb5a4bd176aab25895eb1ca53fd4d48febbf54 | 5054c805be8bf94d660b806dc7ee5a9f426d49bc | /ee/clickhouse/views/test/test_clickhouse_paths.py | 90ffd672f7c6c5fc992aae663c7cdba169a39947 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-warranty-disclaimer"
]
| permissive | BElluu/posthog | 31a121ee1e690ee5d88cb268a3cd922031dcbee4 | 1e207230ba285c64ea8e7f58fd1f78ef7d7c160a | refs/heads/master | 2022-11-29T09:37:13.072632 | 2022-07-06T16:38:37 | 2022-07-06T16:38:37 | 254,587,554 | 0 | 0 | null | 2020-04-10T08:48:41 | 2020-04-10T08:48:41 | null | UTF-8 | Python | false | false | 9,271 | py | import json
from rest_framework import status
from posthog.constants import FUNNEL_PATH_AFTER_STEP, INSIGHT_FUNNELS, INSIGHT_PATHS
from posthog.test.base import APIBaseTest, ClickhouseTestMixin, _create_event, _create_person
class TestClickhousePaths(ClickhouseTestMixin, APIBaseTest):
def _create_sample_data(self, num, delete=False):
for i in range(num):
person = _create_person(distinct_ids=[f"user_{i}"], team=self.team)
_create_event(
event="step one",
distinct_id=f"user_{i}",
team=self.team,
timestamp="2021-05-01 00:00:00",
properties={"$browser": "Chrome"},
)
if i % 2 == 0:
_create_event(
event="step two",
distinct_id=f"user_{i}",
team=self.team,
timestamp="2021-05-01 00:10:00",
properties={"$browser": "Chrome"},
)
_create_event(
event="step three",
distinct_id=f"user_{i}",
team=self.team,
timestamp="2021-05-01 00:20:00",
properties={"$browser": "Chrome"},
)
if delete:
person.delete()
def test_insight_paths_basic(self):
_create_person(team=self.team, distinct_ids=["person_1"])
_create_event(
properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about"}, distinct_id="person_1", event="$pageview", team=self.team,
)
response = self.client.get(f"/api/projects/{self.team.id}/insights/path",).json()
self.assertEqual(len(response["result"]), 1)
def test_insight_paths_basic_exclusions(self):
_create_person(team=self.team, distinct_ids=["person_1"])
_create_event(
distinct_id="person_1", event="first event", team=self.team,
)
_create_event(
distinct_id="person_1", event="second event", team=self.team,
)
_create_event(
distinct_id="person_1", event="third event", team=self.team,
)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path", data={"exclude_events": '["second event"]'}
).json()
self.assertEqual(len(response["result"]), 1)
def test_backwards_compatible_path_types(self):
_create_person(team=self.team, distinct_ids=["person_1"])
_create_event(
properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/something else"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$screen_name": "/screen1"}, distinct_id="person_1", event="$screen", team=self.team,
)
_create_event(
distinct_id="person_1", event="custom1", team=self.team,
)
_create_event(
distinct_id="person_1", event="custom2", team=self.team,
)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path", data={"path_type": "$pageview", "insight": "PATHS",}
).json()
self.assertEqual(len(response["result"]), 2)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path", data={"path_type": "custom_event", "insight": "PATHS"}
).json()
self.assertEqual(len(response["result"]), 1)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path", data={"path_type": "$screen", "insight": "PATHS"}
).json()
self.assertEqual(len(response["result"]), 0)
def test_backwards_compatible_start_point(self):
_create_person(team=self.team, distinct_ids=["person_1"])
_create_event(
properties={"$current_url": "/"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/something else"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$screen_name": "/screen1"}, distinct_id="person_1", event="$screen", team=self.team,
)
_create_event(
properties={"$screen_name": "/screen2"}, distinct_id="person_1", event="$screen", team=self.team,
)
_create_event(
distinct_id="person_1", event="custom1", team=self.team,
)
_create_event(
distinct_id="person_1", event="custom2", team=self.team,
)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path",
data={"path_type": "$pageview", "insight": "PATHS", "start_point": "/about",},
).json()
self.assertEqual(len(response["result"]), 1)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path",
data={"path_type": "custom_event", "insight": "PATHS", "start_point": "custom2",},
).json()
self.assertEqual(len(response["result"]), 0)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path",
data={"path_type": "$screen", "insight": "PATHS", "start_point": "/screen1",},
).json()
self.assertEqual(len(response["result"]), 1)
def test_path_groupings(self):
_create_person(team=self.team, distinct_ids=["person_1"])
_create_event(
properties={"$current_url": "/about_1"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about_2"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/something else"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about3"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about4"}, distinct_id="person_1", event="$pageview", team=self.team,
)
_create_person(team=self.team, distinct_ids=["person_2"])
_create_event(
properties={"$current_url": "/about_1"}, distinct_id="person_2", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about_2"}, distinct_id="person_2", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/something else"}, distinct_id="person_2", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about3"}, distinct_id="person_2", event="$pageview", team=self.team,
)
_create_event(
properties={"$current_url": "/about4"}, distinct_id="person_2", event="$pageview", team=self.team,
)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path",
data={"insight": "PATHS", "path_groupings": json.dumps(["/about*"])},
).json()
self.assertEqual(len(response["result"]), 2)
response = self.client.get(
f"/api/projects/{self.team.id}/insights/path",
data={"insight": "PATHS", "path_groupings": json.dumps(["/about_*"])},
).json()
self.assertEqual(len(response["result"]), 3)
def test_funnel_path_post(self):
self._create_sample_data(7)
request_data = {
"insight": INSIGHT_PATHS,
"funnel_paths": FUNNEL_PATH_AFTER_STEP,
"filter_test_accounts": "false",
"date_from": "2021-05-01",
"date_to": "2021-05-07",
}
funnel_filter = {
"insight": INSIGHT_FUNNELS,
"interval": "day",
"date_from": "2021-05-01 00:00:00",
"date_to": "2021-05-07 00:00:00",
"funnel_window_interval": 7,
"funnel_window_interval_unit": "day",
"funnel_step": 2,
"events": [
{"id": "step one", "order": 0},
{"id": "step two", "order": 1},
{"id": "step three", "order": 2},
],
}
post_response = self.client.post(
f"/api/projects/{self.team.id}/insights/path/", data={**request_data, "funnel_filter": funnel_filter}
)
self.assertEqual(post_response.status_code, status.HTTP_200_OK)
post_j = post_response.json()
self.assertEqual(
post_j["result"],
[{"source": "1_step two", "target": "2_step three", "value": 4, "average_conversion_time": 600000.0}],
)
| [
"[email protected]"
]
| |
a149db09fb1a3483f75201ee41bba85f77be8210 | 8999b8522b18a52d09e1c76d28ee77f0d022e8fd | /pyrarcrack/pyrarcrack.py | 1e8a4da42a11f4804de1c921b09bf62577372f1e | [
"Apache-2.0"
]
| permissive | abhushansahu/py-rarcrack | 7bbba825e7e9794b538818e3f1958758f58ca767 | 4326da72d9a351f1e64c8657cacada08bdbada0b | refs/heads/master | 2021-01-14T16:04:03.282548 | 2019-10-27T18:09:40 | 2019-10-27T18:09:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,151 | py | #!/usr/bin/python
#
##################################################
######## Please Don't Remove Author Name #########
############### Thanks ###########################
##################################################
#
#
__author__='''
Suraj Singh
[email protected]
http://bitforestinfo.blogspot.in/
'''
# =================Other Configuration================
# Usages :
usage = "usage: %prog [options] "
# Version
Version="%prog 0.0.1"
# ====================================================
print __author__
# Import Modules
import rarfile,optparse,sys,fileinput,time
class main:
def __init__(self):
self.extract_input_data()
self.check_input_conditions()
self.start_cracking_engine()
def time_management(self):
print "[*] Starting Time ",self.starttime
print "[*] Closing Time ",self.closetime
print "[*] Password Try ",self.pwdtries
print "[*] Average Speed ",self.pwdtries/(self.closetime-self.starttime)
return
def start_cracking_engine(self):
print "[+] Loading rarfile... ",
fileload=rarfile.RarFile(self.filename)
print "OK"
if self.dictionery:
print "[+] Using Dictonery Option.... OK"
print "[+] Loading Dictonery File... OK"
print "[+] Brute Force Started ..."
for i in fileinput.input(self.dictionery):
pwd=i.strip('\n')
self.extracting_engine(fileload,pwd)
if self.crunch:
print "[+] Connection Stablished as Pipe... OK"
print "[+] Brute Force Started ..."
for i in sys.stdin:
pwd=i.strip('\n')
self.extracting_engine(fileload,pwd)
self.show_info_message()
return
def check_input_conditions(self):
if not self.filename:
print "[ Error ] Please Provide Rar File Path "
sys.exit(0)
print "[+] Checking Rar File Condition ...",
if not rarfile.is_rarfile(self.filename):
print "[ Error ] Bad Rar file"
sys.exit(0)
print " Ok"
if not self.dictionery and not self.crunch:
print "[ Error ] Please Provide Dictonery Or Crunch Or Password Option"
sys.exit(0)
if self.dictionery and self.crunch:
print "[ Error ] Please Choose Any One Option From Dict or Crunch"
sys.exit(0)
return
def extracting_engine(self,file,pwd):
self.pwdresult=None
try:
file.extractall(self.output,pwd=str(pwd))
self.show_info_message(pwd=pwd)
self.pwdresult=True
except Exception as e:
if str(e).find('Permission')!=-1:
self.show_info_message(pwd=pwd)
self.pwdresult=True
else:
self.pwdresult=None
self.pwdtries=self.pwdtries+1
return
def show_info_message(self,pwd=None):
if pwd:
data="\n\t !-Congratulation-! \n\t\tPassword Found = "+pwd+'\n'
else:
data="\n\t Sorry! Password Not Found \n\n"
print data
if self.result:
print "[+] Saving Output in ",self.result
f=open(self.result,'a')
f.write(data)
f.close()
self.closetime=time.time()
self.time_management()
if pwd:
print "[+] Exiting..."
sys.exit(0)
return
def extract_input_data(self):
self.starttime=time.time()
self.pwdtries=0
# Extracting Function
parser = optparse.OptionParser(usage, version=Version)
parser.add_option("-f", "--file", action="store", type="string", dest="filename",help="Please Specify Path of Rar File", default=None)
parser.add_option("-d", "--dict", action="store", type="string", dest="dictionery", help="Please Specify Path of Dictionery.", default=None)
parser.add_option("-o", "--output", action="store", type="string", dest="output", help="Please Specify Path for Extracting", default='.')
parser.add_option("-r", "--result", action="store", type="string", dest="result", help="Please Specify Path if You Want to Save Result", default=None)
parser.add_option("-c", "--crunch", action="store", type="string", dest="crunch", help="For Using Passwords Directly from crunch use this arguments: -c True or --crunch True", default=None)
(option, args)=parser.parse_args()
# Record Inputs Data
print "[+] Extracting Input Data..."
self.filename=option.filename
self.dictionery=option.dictionery
self.output=option.output
self.result=option.result
self.crunch=option.crunch
return
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
d5a296c83ab4f727f06b118c20ec16b6cc4fe178 | 34652a47355a8dbe9200db229a1bbc62619de364 | /BASE SCRIPTS/dictionary_basic_ops.py | fc5105e8a1d904c625467fc820f6c0c6b8dba090 | []
| no_license | btrif/Python_dev_repo | df34ab7066eab662a5c11467d390e067ab5bf0f8 | b4c81010a1476721cabc2621b17d92fead9314b4 | refs/heads/master | 2020-04-02T13:34:11.655162 | 2019-11-10T11:08:23 | 2019-11-10T11:08:23 | 154,487,015 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,876 | py | __author__ = 'trifb'
# List the values from a Dictionary
d = {'one': 1, 'two':2, 'three':3, 'four':4, 'five':5}
for k in d:
print(k, d[k], end = ' ; ')
print('\nPrint (Reference) only a value from the dictionary : ',d['three'])
# DICTIONARY (MUTABLE) , KEYS are Immutable because are based on hashes and to be able to keep track of the values
# Define an empty dictionary
girls = {}
#### If Clause Check if an element in the dictionary has values :
print('\n------------- If Clause Check if an element in the dictionary has values : ------------------ ')
D={ 3:[], 5:[5] }
if D[5] : print('yes')
else : print('no')
if D[3] : print('yes')
else : print('no')
# Make a CLONE to a dictionary :
print('\n--------------------Make a CLONE to a dictionary :-----------------------------')
d_clone = d.copy()
for k, v in d_clone.items() : print(k,v, end=' ; ')
print('\n----------------------Listing & Printing--------------------------')
#Add a couple of names to the dictionary
girls['Gianina'] = 23
girls['Rebeca'] = 19
girls['Pamela'] = 78
girls['Denise'] = 45
# Get the type of the the girls object
print('Print the type of the object: ', type(girls))
print('Print only Keys: ', girls.keys())
print('Print only Values: ', girls.values())
print('Print the entire Dictionary: \n', girls.items())
print('---'*15)
# Modify a key:
print('\nModify a key:')
girls['Denise'] = 10
print('The new dictionary is now: \n' ,girls)
# Delete, Remove a key from Dictionary without generating Error:
my_dict = {'13': [5], '76': [4], '77': [9]}
if 'Denise' in my_dict: del girls['88']
# This will return my_dict[key] if key exists in the dictionary, and None otherwise.
# If the second parameter is not specified (ie. my_dict.pop('key')) and key does not exist, a KeyError is raised.
my_dict.pop('key', None) # The MOST EFFICIENT WAY
# You should consider, though, that this way of deleting an object from a dict is not atomic—
# it is possible that 'key' may be in myDict during the if statement, but may be deleted before del is executed,
# in which case del will fail with a KeyError.
# Given this, it would be safest to either use dict.pop or something along the lines of
try: del my_dict['key']
except KeyError: pass
# List Only the keys
print('\n------------# List Only the keys -------------------')
print(list(girls.keys()))
print('\n------------# List Only the keys -------------------')
for k in list(girls.keys()): print(k,type(k) ,end=' ')
print('\n----------------- Interogate about an item or a value---------------------')
# Interogate about an item: This is the "safe" way to access a value if we are not sure the key is in the dictionary.
print('\nInterogate about an item:')
print(girls.get('Bogdan', 0), ', the item is not on the list')
print(girls.get('Denise', 0))
print(girls.get('Pamela') == None)
# To avoid this, we can use the call hand.get('a',0). This is the "safe" way to access a value if we are not sure the key is in the dictionary.
# Interogate about a value :
print('\n Interogate about a value :' , list(girls.keys())[list(girls.values()).index(78)] )
# Reverse the Dictionary , keys become values : reverse
print('\n------------- Reverse ( reverse) the Dictionary , keys become values : ----------------------')
new_girls = dict(zip(girls.values(),girls.keys()))
print(new_girls)
print(new_girls[19])
# Length, Count the number of elements
print('\nLength (size, count elements) of the dictionary: ', len(girls))
print('Print the list with ordered (sorted ) names: ', sorted(girls))
print('\nGet elements using a for loop:')
for k, v in girls.items():
print(k, v)
print('---'*10,' Define a second dictionary','---'*10)
boys = {'Andrei' : 33, 'Marius' : 12, 'Bogdan' : 2, 'Viorel' : 13, 'Ionut' : 4, 'Dan' : 18}
print('the boys dictionary is: ', boys)
print('---'*15)
#Update (extend, Extend) the first dictionary with the second one, this joins the girls dictionary with the boys dictionary:
girls.update(boys)
print('The new dictionary containing GIRLS & BOYS is : ',girls)
print('\n -------------------- Join, join two dictionaries ---------------------')
x = {'a':1, 'b': 2}
y = {'b':10, 'c': 11}
print('Initial dictionaries : ', x, y )
print('\n### Mehod I - In Python 3.5 or greater')
z = {**x, **y}
print(z)
print('\n### Mehod II - In Python 2, (or 3.4 or lower) write a function:')
def merge_two_dicts(x, y):
z = x.copy() # start with x's keys and values
z.update(y) # modifies z with y's keys and values & returns None
return z
z = merge_two_dicts(x, y)
print(z)
print('\n### Mehod III ')
x = {'a':1, 'b': 2 }
y = {'b':10, 'c': 11 }
print( list(x.items()) , list(y.items()) )
print( list(x.items()) + list(y.items()) )
### Transform a list of tuples of two elements directly into a dictionary !!!!
z = dict(list(x.items()) + list(y.items()))
print(z)
print('\n### Mehod IV ')
z = x.copy()
z.update(y)
print(z)
print('\n### Mehod V ')
z = dict(x, **y)
print(z)
print('\n','---'*15,' Dictionary Comprehension ','---'*15)
x = { 2**k : k*(k+1)//2 for k in range(1, 5) } # Dictionary Comprehension
print(x)
#Python 2.7+ introduces Dictionary Comprehension. Building the dictionary from the list will get you the count as well as get rid of duplicates.
a = [1,1,1,1,2,2,2,2,3,3,4,5,5]
d = {x : a.count(x) for x in a}
print(d)
a, b = d.keys(), d.values()
print(a , b)
print('\n------------------Return the Key with the Largest / Smallest Value--------------------------')
stats = {'a':1000, 'b':3000, 'c': 100}
# There are two ways
print(max(stats, key=stats.get))
print(min(d, key=lambda k: d[k]))
def biggest(aDict):
'''
aDict: A dictionary, where all the values are lists.
returns: The key with the largest number of values associated with it
'''
big=0
for k, v in aDict.items() :
if big < len(v):
big, key = len(v), k
#print(big, len(v), key, k)
return (key, len(v))
animals = {'a': ['aardvark'], 'b': ['baboon'], 'c': ['coati'], 'd': ['donkey', 'dog', 'dingo']}
print(biggest(animals))
print('\n------------------Return the largest (max) Key --------------------------')
Tracker = { 2 : 4, 3 : 2 , 5: 2 }
print('Maximum value of the key in a dict :', max(Tracker, key=int) )
print('\n--------------Pb060 Euler, Nice function to store sorted nrs as key with lists as values ------------------------------')
def cubic_permutations():
'''Uses a dictionary to store sorted numbers as keys and lists as values '''
from itertools import count
d = {}
for i in count():
cube = i**3
signature = ''.join(sorted(str(cube)))
if signature in d:
d[signature].append(cube)
#print(d[signature])
if len(d[signature]) == 3:
print(d[signature][0])
break
else :
d[signature] = [cube] # Here we add a list value to each key of the dictionary, Nice idea !
cubic_permutations()
print('\n--------------PB155 - Counting Capacitors circuits - Dictionary Flatten ------------------------------------')
print('-----This procedure it shows how to flatten a dictionary containing lists : ----------------')
D = {1: [60], 2: [30.0, 120], 3: [20.0, 40.0, 90.0, 180], 4: [15.0, 24.0, 36.0, 45.0, 60.0, 80.0, 100.0, 150.0, 240]}
# Method I
print(sorted({x for v in D.values() for x in v}))
# Method II
from itertools import chain
print(sorted(set(chain.from_iterable(D.values()))))
# Method III
from functools import reduce
print( list(reduce(lambda a, b: a.union(set(b)), D.values(), set())) )
print('\n--------Convert , map, zip two lists into a dictionary -------------')
keys = ['a', 'b', 'c']
values = [1, 2, 3]
# Method II - Using dict
dictionary = dict(zip(keys, values))
print (dictionary)
# Method II - Using List Comprehension
print({k: v for k, v in zip(keys, values)})
# Method III - Using itertools
import itertools
print(dict(itertools.zip_longest(keys,values)))
print('\n--------dictionary setdefault() Method, equivalent to get Method -------------')
# This method returns the key value available in the dictionary and if given key is not available then it will return provided default value.
#!/usr/bin/python
dict = {'Name': 'Zara', 'Age': 7}
print ("Value : %s" % dict.setdefault('Age', None))
print ("Value : %s" % dict.setdefault('Sex', None))
# It is equivalent to the get Metod :
print('Value :', dict.get('Sex'))
print('\n--------------Order, Sort, Arrange a Dictionary after the keys / values ---------------')
print('-------------- Order, Sort, after values ---------------')
# METHOD I
import operator
x = {1: 1, 2: 2, 3: 3, 4: 2, 5: 5, 6: 6, 7: 7, 8: 2, 9: 3, 10: 10}
sorted_x = sorted( x.items(), key=operator.itemgetter(1) )
print('Method I - Sort after the VALUES :' ,sorted_x)
# METHOD II - not what I want at this moment
print('Method II - Sort after the VALUES :' ,sorted(x, key=x.get))
# METHOD III -- Using lambda function
print('Method III - Sort after the VALUES :' , sorted(x.items(), key=lambda i: i[1]) )
print('-------------- Sort, after keys ---------------')
# METHOD I -
x = {1: 1, 42: 2, 13: 3, 14: 2, 117: 5, 16: 6, 27: 7, 17: 2, 39: 3, 11: 10}
sorted_y = sorted(x.items(), key=operator.itemgetter(0))
print('Method I - Sort after the KEYS :' ,sorted_y)
print('\n-------------- Check if a key is in dictionary ---------------')
dd = {}
for i in range(100):
key = i % 10
if key in dd:
dd[key] += 1
else:
dd[key] = 1
print(dd)
print('\n ----------------- Dictionary comprehensions -------------------')
m = {x: x ** 2 for x in range(5)}
print( m )
print('-------- VERY USEFUL to use to GENERATE (variables , vars ) VARIABLES DYNAMICALLY (dynamically) ------------- ')
m = {x: 'A' + str(x) for x in range(10)}
print( m )
print('\n ----------------- Inverting a dictionary using a dictionary comprehension -------------------')
m = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
print(m)
print( {v: k for k, v in m.items()} )
print('\n---------- Build Dictionary within dictionary -------------------')
D = { x : { y: 0 for y in range(10)} for x in range(10)}
print('\n---------- Print only a range of values from a Dictionary -------------- ')
SQD = {x*x: x for x in range(1000) }
print('This prints only the k,v fro the dictionary if the values are < 100 :')
print( 'Length : ',len(SQD) , '\n', { k:v for k,v in SQD.items() if v<100 } )
print('\n----- Get a random pair of key, value from a Dictionary ------------')
# https://stackoverflow.com/questions/4859292/how-to-get-a-random-value-in-python-dictionary
import random
d = {'VENEZUELA':'CARACAS', 'CANADA':'OTTAWA'}
country, capital = random.choice(list(d.items()))
print('country, capital = ', country, capital) | [
"[email protected]"
]
| |
250bd77824bcedf7034eb92f335b65c75371bac9 | c6abddbc632b2362db0817aeab89387ea6a92902 | /qiskit/extensions/standard/ry.py | 5ac582759d10f05f7b9c33e624e57ba90dae61a4 | [
"Apache-2.0"
]
| permissive | azulehner/qiskit-sdk-py | 10c3c8d5e198e06e668d356bb78a98b279c8b3b8 | 138484e41eb8bd504f3b6977e267efdd0d9f208b | refs/heads/master | 2021-05-12T19:59:23.051113 | 2018-01-11T09:12:18 | 2018-01-11T09:12:18 | 117,109,018 | 2 | 0 | null | 2018-01-11T14:11:08 | 2018-01-11T14:11:07 | null | UTF-8 | Python | false | false | 2,131 | py | # -*- coding: utf-8 -*-
# pylint: disable=invalid-name
# Copyright 2017 IBM RESEARCH. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""
Rotation around the y-axis.
"""
from qiskit import CompositeGate
from qiskit import Gate
from qiskit import InstructionSet
from qiskit import QuantumCircuit
from qiskit import QuantumRegister
from qiskit.extensions.standard import header # pylint: disable=unused-import
class RYGate(Gate):
"""rotation around the y-axis."""
def __init__(self, theta, qubit, circ=None):
"""Create new ry single qubit gate."""
super(RYGate, self).__init__("ry", [theta], [qubit], circ)
def qasm(self):
"""Return OPENQASM string."""
qubit = self.arg[0]
theta = self.param[0]
return self._qasmif("ry(%s) %s[%d];" % (theta, qubit[0].name,
qubit[1]))
def inverse(self):
"""Invert this gate.
ry(theta)^dagger = ry(-theta)
"""
self.param[0] = -self.param[0]
return self
def reapply(self, circ):
"""Reapply this gate to corresponding qubits in circ."""
self._modifiers(circ.ry(self.param[0], self.arg[0]))
def ry(self, theta, q):
"""Apply ry to q."""
if isinstance(q, QuantumRegister):
gs = InstructionSet()
for j in range(q.sz):
gs.add(self.ry(theta, (q, j)))
return gs
self._check_qubit(q)
return self._attach(RYGate(theta, q, self))
QuantumCircuit.ry = ry
CompositeGate.ry = ry
| [
"[email protected]"
]
| |
0185585a72a6d43205d0b55e5098876e40118a49 | 1726f4c11106d09313324d12d274705540baa9f4 | /server/apps/rooms/urls.py | 56e2c3207ecacd33289f58af7f389a47a8768e7b | []
| no_license | AlAstroMoody/chat | 3c1f33a343c72836867587200abbe2adedf0bbc4 | 7a528d62ccf5e4ed1d478a6479e41d37d08b87f8 | refs/heads/main | 2023-03-25T04:29:55.547258 | 2021-03-15T02:30:19 | 2021-03-15T02:30:19 | 347,811,950 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | from django.urls import path
from .views import RoomView
from .views import ReportView
rooms_urlpatterns = [
path('room/', RoomView.as_view(), name='room'),
path('room/<uuid:uuid>-<str:token>/report/', ReportView.as_view(), name='report'),
]
| [
"[email protected]"
]
| |
232ae1fa95eccd3f919594407f942bf53ac1636b | 6c3dbc51b19ddd21c389de79c29fa3706fc44733 | /models/cnn_bilstm_attention.py | 823f648be7ed06d81bfbd1e5e92a28a9c426eb31 | [
"MIT"
]
| permissive | Eurus-Holmes/Tumor2Graph | 7db12920a21b1b8609087fd9d7ceb245420cb536 | 6e52748d8cd2e8fe33092e2c67e92e6454a964b3 | refs/heads/main | 2023-07-07T22:15:17.869378 | 2021-09-02T03:31:46 | 2021-09-02T03:31:46 | 390,660,583 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,951 | py | from torch import nn
import torch
import torch.nn.functional as F
class TextCNN(nn.Module):
def __init__(self, feaSize, contextSizeList, filterNum, name='textCNN'):
super(TextCNN, self).__init__()
self.name = name
moduleList = []
for i in range(len(contextSizeList)):
moduleList.append(
nn.Sequential(
nn.Conv1d(in_channels=feaSize, out_channels=filterNum, kernel_size=contextSizeList[i], padding=contextSizeList[i]//2)
)
)
self.conv1dList = nn.ModuleList(moduleList)
self.process=nn.Sequential(nn.BatchNorm1d(filterNum), nn.ReLU())
def forward(self, x):
# x: batchSize × seqLen × feaSize
x = x.transpose(1,2) # => batchSize × feaSize × seqLen
x = [conv(x) for conv in self.conv1dList] # => scaleNum * (batchSize × filterNum × seqLen)
x_new=[self.process(new) for new in x]
pooler=[torch.max(new,dim=2)[0] for new in x]
pooling=[torch.relu(pool) for pool in pooler]
return torch.cat(x_new, dim=1).transpose(1,2),torch.cat(pooling,dim=1) # => batchSize × seqLen × scaleNum*filterNum
class Simple_Protein_Predict(nn.Module):
def __init__(self,lstm_hidden_size,lstm_hidden_size_1,contextSizeList,filter_number):
super(Simple_Protein_Predict, self).__init__()
self.lstm_hidden_size = lstm_hidden_size
self.lstm_hidden_size_1 =lstm_hidden_size_1
self.embed_matrix=nn.Embedding(22,64)
self.lstm = nn.LSTM(len(contextSizeList)*filter_number,self.lstm_hidden_size,num_layers=2,batch_first=True,bidirectional=True)
self.embed_matrix1 = nn.Embedding(22, 128)
self.lstm1 = nn.LSTM(len(contextSizeList)*filter_number, self.lstm_hidden_size_1, num_layers=2,batch_first=True,bidirectional=True)
self.dropout=nn.Dropout(.2)
self.predict = nn.Linear(4 * self.lstm_hidden_size_1, 4 * self.lstm_hidden_size)
self.linear=nn.Linear(self.lstm_hidden_size+self.lstm_hidden_size_1+2*filter_number*len(contextSizeList),2 * self.lstm_hidden_size)
self.final_linear=nn.Linear(2 * self.lstm_hidden_size,1)
self.activation = nn.Sigmoid()
self.attention_layer1 = nn.Sequential(
nn.Linear(self.lstm_hidden_size_1, self.lstm_hidden_size_1),
nn.ReLU(inplace=True))
self.attention_layer = nn.Sequential(
nn.Linear(self.lstm_hidden_size, self.lstm_hidden_size),
nn.ReLU(inplace=True))
self.textCNN = TextCNN(64 , contextSizeList, filter_number)
self.textCNN_second = TextCNN(128 , contextSizeList, filter_number)
def exponent_neg_manhattan_distance(self, x1, x2):
''' Helper function for the similarity estimate of the LSTMs outputs '''
return torch.exp(-torch.sum(torch.abs(x1 - x2), dim=1))
def attention_net_with_w(self, lstm_out, lstm_hidden):
'''
:param lstm_out: [batch_size, len_seq, n_hidden * 2]
:param lstm_hidden: [batch_size, num_layers * num_directions, n_hidden]
:return: [batch_size, n_hidden]
'''
lstm_tmp_out = torch.chunk(lstm_out, 2, -1)
# h [batch_size, time_step, hidden_dims]
h = lstm_tmp_out[0] + lstm_tmp_out[1]
# [batch_size, num_layers * num_directions, n_hidden]
lstm_hidden = torch.sum(lstm_hidden, dim=1)
# [batch_size, 1, n_hidden]
lstm_hidden = lstm_hidden.unsqueeze(1)
# atten_w [batch_size, 1, hidden_dims]
atten_w = self.attention_layer(lstm_hidden)
# [batch_size, time_step, hidden_dims]
m = nn.Tanh()(h)
# atten_context [batch_size, 1, time_step]
atten_context = torch.bmm(atten_w, m.transpose(1, 2))
# softmax_w [batch_size, 1, time_step]
softmax_w = F.softmax(atten_context, dim=-1)
# context [batch_size, 1, hidden_dims]
context = torch.bmm(softmax_w, h)
result = context.squeeze(1)
return result
def attention_net_with_w_virus(self, lstm_out, lstm_hidden):
'''
:param lstm_out: [batch_size, len_seq, n_hidden * 2]
:param lstm_hidden: [batch_size, num_layers * num_directions, n_hidden]
:return: [batch_size, n_hidden]
'''
lstm_tmp_out = torch.chunk(lstm_out, 2, -1)
# h [batch_size, time_step, hidden_dims]
h = lstm_tmp_out[0] + lstm_tmp_out[1]
# [batch_size, num_layers * num_directions, n_hidden]
lstm_hidden = torch.sum(lstm_hidden, dim=1)
# [batch_size, 1, n_hidden]
lstm_hidden = lstm_hidden.unsqueeze(1)
# atten_w [batch_size, 1, hidden_dims]
atten_w = self.attention_layer1(lstm_hidden)
# [batch_size, time_step, hidden_dims]
m = nn.Tanh()(h)
# atten_context [batch_size, 1, time_step]
atten_context = torch.bmm(atten_w, m.transpose(1, 2))
# softmax_w [batch_size, 1, time_step]
softmax_w = F.softmax(atten_context, dim=-1)
# context [batch_size, 1, hidden_dims]
context = torch.bmm(softmax_w, h)
result = context.squeeze(1)
return result
def forward(self, seq_1, seq_2, label):
output1 = self.embed_matrix(seq_1)
conv1, pooling_abs = self.textCNN(output1)
orgin_output, output1 = self.lstm(conv1)
# final_hidden_state : [batch_size, num_layers * num_directions, n_hidden]
final_hidden_state = output1[0].permute(1, 0, 2)
atten_out = self.attention_net_with_w(orgin_output, final_hidden_state)
output2 = self.embed_matrix1(seq_2)
conv2, pooling_virus = self.textCNN_second(output2)
orgin_output1, output2= self.lstm1(conv2)
# final_hidden_state : [batch_size, num_layers * num_directions, n_hidden]
final_hidden_state1 = output2[0].permute(1, 0, 2)
atten_out1 = self.attention_net_with_w_virus(orgin_output1, final_hidden_state1)
predictions = self.linear(torch.cat((pooling_abs,atten_out,pooling_virus,atten_out1), 1))
#predictions=self.exponent_neg_manhattan_distance(output1,output2).squeeze()
predictions=self.final_linear(predictions)
predictions=self.activation(predictions)
predictions=predictions.squeeze()
#predictions = self.activation(output).squeeze()
# 截取#CLS#标签所对应的一条向量, 也就是时间序列维度(seq_len)的第0条
# 下面是[batch_size, hidden_dim] 到 [batch_size, 1]的映射
# 我们在这里要解决的是二分类问题
# predictions = self.dense(first_token_tensor)
# 用sigmoid函数做激活, 返回0-1之间的值
#predictions = self.activation(outputs)
compute_loss = nn.BCELoss()
if label is not None:
# 计算loss
loss = compute_loss(predictions, label)
return loss, predictions
else:
return predictions
| [
"[email protected]"
]
| |
18f94647b65564f8455ffaaba1d6773b058d9354 | 6a2c101774903441bc43bcafaef788a7465d38bb | /music_controller/spotify/migrations/0001_initial.py | 6d380eb7954ae335440d0885ddd1edc8b8424db7 | []
| no_license | naistangz/house-party | 9faa66e12a528881cd7e613fede9da2a1ccf8c19 | 2bad502dececbdf7d273c14b9ea96dd9dc9a0c45 | refs/heads/main | 2023-02-17T04:35:31.012691 | 2021-01-17T12:07:36 | 2021-01-17T12:07:36 | 330,140,403 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 835 | py | # Generated by Django 3.1.4 on 2021-01-03 14:17
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='SpotifyToken',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('user', models.CharField(max_length=50, unique=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('refresh_token', models.CharField(max_length=150)),
('access_token', models.CharField(max_length=150)),
('expires_in', models.DateTimeField()),
('token_type', models.CharField(max_length=50)),
],
),
]
| [
"[email protected]"
]
| |
d5d79711a8ecbee5719606b201e7c9849b0a16e5 | e4607b5c37a4eba4d8c93e910375bbaf711e208b | /task/utils/ansible_api_v2.py | 242b4d9df49c41971be46265ff9757ee0e97af62 | []
| no_license | scholarg/Ops | 4d703d3fd3cb83ea0fb06b22918e40037e9171e5 | a409b07b6a256b2df2182b0441a33dc1ba29b307 | refs/heads/master | 2020-04-28T21:09:58.789788 | 2019-03-13T10:19:58 | 2019-03-13T10:19:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,630 | py | # -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: ansible_api
Description:
Author: Administrator
date: 2018/6/11
-------------------------------------------------
Change Activity:
2018/6/11:
-------------------------------------------------
"""
import json
import re
from ansible import constants as C
from collections import namedtuple
from ansible.parsing.dataloader import DataLoader
from ansible.playbook.play import Play
from ansible.executor.task_queue_manager import TaskQueueManager
from ansible.executor.playbook_executor import PlaybookExecutor
from ansible.plugins.callback import CallbackBase
from ansible.inventory.manager import InventoryManager
from ansible.vars.manager import VariableManager
from projs.utils.deploy_websocket import DeployResultsCollector
from conf.logger import ansible_logger
from Ops import settings
class ModuleResultsCollector(CallbackBase):
"""
直接执行模块命令的回调类
"""
def __init__(self, sock, *args, **kwargs):
super(ModuleResultsCollector, self).__init__(*args, **kwargs)
self.module_results = []
self.sock = sock
def v2_runner_on_unreachable(self, result):
if 'msg' in result._result:
data = '<code style="color: #FF0000">\n{host} | unreachable | rc={rc} >> \n{stdout}\n</code>'.format(
host=result._host.name, rc=result._result.get('rc'),
stdout=result._result.get('msg'))
else:
data = '<code style="color: #FF0000">\n{host} | unreachable >> \n{stdout}\n</code>'.format(
host=result._host.name,
stdout=json.dumps(
result._result,
indent=4))
self.sock.send(data)
self.module_results.append(data)
def v2_runner_on_ok(self, result, *args, **kwargs):
if 'rc' in result._result and 'stdout' in result._result:
data = '<code style="color: #008000">\n{host} | success | rc={rc} >> \n{stdout}\n</code>'.format(
host=result._host.name, rc=result._result.get('rc'),
stdout=result._result.get('stdout'))
elif 'results' in result._result and 'rc' in result._result:
data = '<code style="color: #008000">\n{host} | success | rc={rc} >> \n{stdout}\n</code>'.format(
host=result._host.name, rc=result._result.get('rc'),
stdout=result._result.get('results')[0])
elif 'module_stdout' in result._result and 'rc' in result._result:
data = '<code style="color: #008000">\n{host} | success | rc={rc} >> \n{stdout}\n</code>'.format(
host=result._host.name, rc=result._result.get('rc'),
stdout=result._result.get(
'module_stdout').encode().decode(
'utf-8'))
else:
data = '<code style="color: #008000">\n{host} | success >> \n{stdout}\n</code>'.format(
host=result._host.name,
stdout=json.dumps(
result._result,
indent=4))
self.sock.send(data)
self.module_results.append(data)
def v2_runner_on_failed(self, result, *args, **kwargs):
if 'stderr' in result._result:
data = '<code style="color: #FF0000">\n{host} | failed | rc={rc} >> \n{stdout}\n</code>'.format(
host=result._host.name,
rc=result._result.get(
'rc'),
stdout=result._result.get(
'stderr').encode().decode(
'utf-8'))
else:
data = '<code style="color: #FF0000">\n{host} | failed >> \n{stdout}\n</code>'.format(
host=result._host.name,
stdout=json.dumps(
result._result,
indent=4))
self.sock.send(data)
self.module_results.append(data)
class PlayBookResultsCollector(CallbackBase):
"""
执行playbook的回调类
"""
def __init__(self, sock, *args, **kwargs):
super(PlayBookResultsCollector, self).__init__(*args, **kwargs)
self.playbook_results = []
self.sock = sock
def v2_playbook_on_play_start(self, play):
name = play.get_name().strip()
if not name:
msg = '<code style="color: #FFFFFF">\nPLAY {}\n</code>'.format('*' * 100)
else:
msg = '<code style="color: #FFFFFF">\nPLAY [{}] {}\n</code>'.format(name, '*' * 100)
self.send_save(msg)
def v2_playbook_on_task_start(self, task, is_conditional):
msg = '<code style="color: #FFFFFF">\nTASK [{}] {}\n</code>'.format(task.get_name(), '*' * 100)
self.send_save(msg)
def v2_runner_on_ok(self, result, *args, **kwargs):
if result.is_changed():
data = '<code style="color: #FFFF00">[{}]=> changed\n</code>'.format(result._host.name)
else:
data = '<code style="color: #008000">[{}]=> ok\n</code>'.format(result._host.name)
self.send_save(data)
def v2_runner_on_failed(self, result, *args, **kwargs):
if 'changed' in result._result:
del result._result['changed']
data = '<code style="color: #FF0000">[{}]=> {}: {}\n</code>'.format(result._host.name, 'failed',
self._dump_results(result._result))
self.send_save(data)
def v2_runner_on_unreachable(self, result):
if 'changed' in result._result:
del result._result['changed']
data = '<code style="color: #FF0000">[{}]=> {}: {}\n</code>'.format(result._host.name, 'unreachable',
self._dump_results(result._result))
self.send_save(data)
def v2_runner_on_skipped(self, result):
if 'changed' in result._result:
del result._result['changed']
data = '<code style="color: #FFFF00">[{}]=> {}: {}\n</code>'.format(result._host.name, 'skipped',
self._dump_results(result._result))
self.send_save(data)
def v2_playbook_on_stats(self, stats):
hosts = sorted(stats.processed.keys())
data = '<code style="color: #FFFFFF">\nPLAY RECAP {}\n'.format('*' * 100)
self.send_save(data)
for h in hosts:
s = stats.summarize(h)
msg = '<code style="color: #FFFFFF">{} : ok={} changed={} unreachable={} failed={} skipped={}\n</code>'.format(
h, s['ok'], s['changed'], s['unreachable'], s['failures'], s['skipped'])
self.send_save(msg)
def send_save(self, data):
self.sock.send(data)
self.playbook_results.append(data)
class MyInventory(InventoryManager):
"""
用于动态生成Inventory的类.
"""
def __init__(self, loader, resource=None, sources=None):
"""
resource的数据格式是一个列表字典,比如
{
"group1": {
"hosts": [{"ip": "10.0.0.0", "port": "22", "username": "test", "password": "pass"}, ...],
"group_vars": {"var1": value1, "var2": value2, ...}
}
}
如果你只传入1个列表,这默认该列表内的所有主机属于default 组,比如
[{"ip": "10.0.0.0", "port": "22", "username": "test", "password": "pass"}, ...]
sources是原生的方法,参数是配置的inventory文件路径,可以指定一个,也可以以列表的形式可以指定多个
"""
super(MyInventory, self).__init__(loader=loader, sources=sources)
self.resource = resource
self.dynamic_inventory()
def add_dynamic_group(self, hosts, group_name, group_vars=None):
"""
将从数据库读取的组信息,主机信息等生成的resource信息解析成ansible可以读取的内容
:param hosts: 包含主机所有信息的的列表
:type hosts: list
:param group_name:
:param group_vars:
:type group_vars: dict
:return:
"""
# 添加主机组
self.add_group(group_name)
# 添加主机组变量
if group_vars:
for key, value in group_vars.items():
self.groups[group_name].set_variable(key, value)
for host in hosts:
ip = host.get('ip')
port = host.get('port')
# 添加主机到主机组
self.add_host(ip, group_name, port)
username = host.get('username')
password = host.get('password')
# 生成ansible主机变量
self.get_host(ip).set_variable('ansible_ssh_host', ip)
self.get_host(ip).set_variable('ansible_ssh_port', port)
self.get_host(ip).set_variable('ansible_ssh_user', username)
self.get_host(ip).set_variable('ansible_ssh_pass', password)
self.get_host(ip).set_variable('ansible_sudo_pass', password)
# 如果使用同一个密钥管理所有机器,只需把下方的注释去掉,ssh_key指定密钥文件,若是不同主机使用不同密钥管理,则需要单独设置主机变量或组变量
# self.get_host(ip).set_variable('ansible_ssh_private_key_file', ssh_key)
# set other variables
for key, value in host.items():
if key not in ["ip", "port", "username", "password"]:
self.get_host(ip).set_variable(key, value)
def dynamic_inventory(self):
if isinstance(self.resource, list):
self.add_dynamic_group(self.resource, 'default')
elif isinstance(self.resource, dict):
for groupname, hosts_and_vars in self.resource.items():
self.add_dynamic_group(hosts_and_vars.get("hosts"), groupname, hosts_and_vars.get("group_vars"))
class ANSRunner(object):
"""
执行ansible模块或者playbook的类,这里默认采用了用户名+密码+sudo的方式
"""
def __init__(self, resource=None, sources=None, sock=None, **kwargs):
Options = namedtuple('Options', ['connection', 'module_path', 'forks', 'timeout', 'remote_user',
'ask_pass', 'private_key_file', 'ssh_common_args', 'ssh_extra_args',
'sftp_extra_args', 'strategy',
'scp_extra_args', 'become', 'become_method', 'become_user', 'ask_value_pass',
'verbosity', 'retry_files_enabled',
'check', 'listhosts', 'listtasks', 'listtags', 'syntax', 'diff',
'gathering', 'roles_path'])
self.options = Options(connection='smart',
module_path=None,
forks=50, timeout=10,
remote_user=kwargs.get('remote_user', None), ask_pass=False, private_key_file=None,
ssh_common_args=None,
ssh_extra_args=None,
sftp_extra_args=None, strategy='free', scp_extra_args=None,
become=kwargs.get('become', None),
become_method=kwargs.get('become_method', None),
become_user=kwargs.get('become_user', None), ask_value_pass=False, verbosity=None,
retry_files_enabled=False, check=False, listhosts=False,
listtasks=False, listtags=False, syntax=False, diff=True, gathering='smart',
roles_path=settings.ANSIBLE_ROLE_PATH)
self.loader = DataLoader()
self.inventory = MyInventory(resource=resource, loader=self.loader, sources=sources)
self.variable_manager = VariableManager(loader=self.loader, inventory=self.inventory)
self.passwords = dict(sshpass=None, becomepass=None)
self.callback = None
self.sock = sock
def run_module(self, host_list, module_name, module_args, deploy=False, send_msg=True):
"""
run module from ansible ad-hoc.
"""
self.callback = DeployResultsCollector(self.sock, send_msg=send_msg) if deploy else ModuleResultsCollector(
self.sock)
play_source = dict(
name="Ansible Play",
hosts=host_list,
gather_facts='no',
tasks=[dict(action=dict(module=module_name, args=module_args))]
)
play = Play().load(play_source, variable_manager=self.variable_manager, loader=self.loader)
# actually run it
tqm = None
try:
tqm = TaskQueueManager(
inventory=self.inventory,
variable_manager=self.variable_manager,
loader=self.loader,
options=self.options,
passwords=self.passwords,
stdout_callback=self.callback,
)
C.HOST_KEY_CHECKING = False # 关闭第一次使用ansible连接客户端是输入命令
tqm.run(play)
except Exception as e:
ansible_logger.error('执行{}失败,原因: {}'.format(module_name, e))
finally:
if tqm is not None:
tqm.cleanup()
def run_playbook(self, playbook_path, extra_vars=None):
"""
run ansible playbook
"""
try:
self.callback = PlayBookResultsCollector(sock=self.sock)
if extra_vars:
self.variable_manager.extra_vars = extra_vars
executor = PlaybookExecutor(
playbooks=[playbook_path], inventory=self.inventory, variable_manager=self.variable_manager,
loader=self.loader,
options=self.options, passwords=self.passwords,
)
executor._tqm._stdout_callback = self.callback
C.HOST_KEY_CHECKING = False # 关闭第一次使用ansible连接客户端是输入命令
executor.run()
except Exception as e:
ansible_logger.error('执行{}失败,原因: {}'.format(playbook_path, e))
@property
def get_module_results(self):
return self.callback.module_results
@property
def get_playbook_results(self):
return self.callback.playbook_results
@staticmethod
def handle_setup_data(data):
"""处理setup模块数据,用于收集服务器信息功能"""
server_facts = {}
result = json.loads(data[data.index('{'): data.rindex('}') + 1])
facts = result['ansible_facts']
server_facts['hostname'] = facts['ansible_hostname']
server_facts['cpu_model'] = facts['ansible_processor'][-1]
server_facts['cpu_number'] = int(facts['ansible_processor_count'])
server_facts['vcpu_number'] = int(facts['ansible_processor_vcpus'])
server_facts['disk_total'], disk_size = 0, 0
for k, v in facts['ansible_devices'].items():
if k[0:2] in ['sd', 'hd', 'ss', 'vd']:
if 'G' in v['size']:
disk_size = float(v['size'][0: v['size'].rindex('G') - 1])
elif 'T' in v['size']:
disk_size = float(v['size'][0: v['size'].rindex('T') - 1]) * 1024
server_facts['disk_total'] += round(disk_size, 2)
server_facts['ram_total'] = round(int(facts['ansible_memtotal_mb']) / 1024)
server_facts['kernel'] = facts['ansible_kernel']
server_facts['system'] = '{} {} {}'.format(facts['ansible_distribution'],
facts['ansible_distribution_version'],
facts['ansible_userspace_bits'])
server_model = facts['ansible_product_name']
# 获取网卡信息
nks = []
for nk in facts.keys():
networkcard_facts = {}
if re.match(r"^ansible_(eth|bind|eno|ens|em)\d+?", nk):
networkcard_facts['network_card_name'] = facts.get(nk).get('device')
networkcard_facts['network_card_mac'] = facts.get(nk).get('macaddress')
networkcard_facts['network_card_ip'] = facts.get(nk).get('ipv4').get('address') if 'ipv4' in facts.get(
nk) else 'unknown'
networkcard_facts['network_card_model'] = facts.get(nk).get('type')
networkcard_facts['network_card_mtu'] = facts.get(nk).get('mtu')
networkcard_facts['network_card_status'] = 1 if facts.get(nk).get('active') else 0
nks.append(networkcard_facts)
return server_facts, server_model, nks
@staticmethod
def handle_mem_data(data):
"""
处理获取的内存信息
:param data: 通过ansible获取的内存信息
:return:
"""
result = json.loads(data[data.index('{'): data.rindex('}') + 1])
facts = result['ansible_facts']
return facts['mem_info']
| [
"[email protected]"
]
| |
2f58447eca540956aaaab1ccc21b07ff3717c1a8 | 4d2443d54c8a1104cad8ecc60e417e8a5af69450 | /entertainment_center.py | e0bef0107dacc1b001e5bdd21b53091f6f0b3a90 | []
| no_license | xueweiyema/movie_website | 4fa73dfbaf0a395e72eb6edcf91995dd5a2136e5 | 9770f994a291b51d4fd1a7032fc21a5ac3c537aa | refs/heads/master | 2021-01-19T07:09:44.646186 | 2017-04-24T06:41:19 | 2017-04-24T06:41:19 | 87,527,117 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,334 | py | import media
import fresh_tomatoes
titles = [
"Pirate Radio", "The Social Network", "Hidden Figures", "The Martian",
"Good Will Hunting", "The Lives of Others"
]
storylines = [
'''A band of rogue DJs that captivated Britain, playing the music that defined a generation and
standing up to a government that wanted classical music, and nothing else, on the airwaves.''',
'''Harvard student Mark Zuckerberg creates the social networking site that would become known as
Facebook, but is later sued by two brothers who claimed he stole their idea, and the co-founder
who was later squeezed out of the business.''',
'''The story of a team of African-American women mathematicians who served a vital role in NASA
during the early years of the US space program.''',
'''An astronaut becomes stranded on Mars after his team assume him dead, and must rely on his
ingenuity to find a way to signal to Earth that he is alive.''',
'''Will Hunting, a janitor at M.I.T., has a gift for mathematics, but needs help from a psychologist
to find direction in his life.''',
'''In 1984 East Berlin, an agent of the secret police, conducting surveillance on a writer and his
lover, finds himself becoming increasingly absorbed by their lives.''',
]
wiki_head = "https://upload.wikimedia.org/wikipedia/en/"
youtube_head = "https://youtu.be/"
wiki_suffixes = [
"e/e3/The_boat_that_rocked_poster.jpg",
"7/7a/Social_network_film_poster.jpg",
'4/4f/The_official_poster_for_the_film_Hidden_Figures%2C_2016.jpg',
'thumb/c/cd/The_Martian_film_poster.jpg/220px-The_Martian_film_poster.jpg',
'thumb/b/b8/Good_Will_Hunting_theatrical_poster.jpg/220px-Good_Will_Hunting_theatrical_poster.jpg',
'9/9f/Leben_der_anderen.jpg'
]
youtube_suffixes = [
"qX1SSiFWF-s", "lB95KLmpLR4", 'RK8xHq6dfAo', 'ej3ioOneTy8', 'PaZVjZEFkRs',
'FppW5ml4vdw'
]
imdbs = ["7.4", "7.7", "7.9", "8", "8.3", "8.5"]
posters = [wiki_head + wiki_suffix for wiki_suffix in wiki_suffixes]
trailers = [
youtube_head + youtube_suffix for youtube_suffix in youtube_suffixes
]
movies = []
for n in range(len(titles)):
movies.append(
media.Movie(titles[n], storylines[n], posters[n], trailers[n], imdbs[
n]))
print media.Movie.__doc__
fresh_tomatoes.open_movies_page(movies)
| [
"123456"
]
| 123456 |
34e2afbf4f41a4aff51f96b7411be3d80992143d | 95495baeb47fd40b9a7ecb372b79d3847aa7a139 | /swagger_client/models/i_ospfv3_log_adjacency_changes.py | 7ded1a2c2292e58b760cb914f7d1efd4e458559b | []
| no_license | pt1988/fmc-api | b1d8ff110e12c13aa94d737f3fae9174578b019c | 075f229585fcf9bd9486600200ff9efea5371912 | refs/heads/main | 2023-01-07T09:22:07.685524 | 2020-10-30T03:21:24 | 2020-10-30T03:21:24 | 308,226,669 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,614 | py | # coding: utf-8
"""
Cisco Firepower Management Center Open API Specification
**Specifies the REST URLs and methods supported in the Cisco Firepower Management Center API. Refer to the version specific [REST API Quick Start Guide](https://www.cisco.com/c/en/us/support/security/defense-center/products-programming-reference-guides-list.html) for additional information.** # noqa: E501
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class IOspfv3LogAdjacencyChanges(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'include_details': 'bool'
}
attribute_map = {
'include_details': 'includeDetails'
}
def __init__(self, include_details=None): # noqa: E501
"""IOspfv3LogAdjacencyChanges - a model defined in Swagger""" # noqa: E501
self._include_details = None
self.discriminator = None
if include_details is not None:
self.include_details = include_details
@property
def include_details(self):
"""Gets the include_details of this IOspfv3LogAdjacencyChanges. # noqa: E501
:return: The include_details of this IOspfv3LogAdjacencyChanges. # noqa: E501
:rtype: bool
"""
return self._include_details
@include_details.setter
def include_details(self, include_details):
"""Sets the include_details of this IOspfv3LogAdjacencyChanges.
:param include_details: The include_details of this IOspfv3LogAdjacencyChanges. # noqa: E501
:type: bool
"""
self._include_details = include_details
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(IOspfv3LogAdjacencyChanges, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, IOspfv3LogAdjacencyChanges):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
0e2f843c6f77029a11b47217263877d7bcd0f437 | aeea7889a986e23ababbfc470e5fa97a4982bca0 | /devel/lib/python2.7/dist-packages/pal_interaction_msgs/msg/_WebGuiEvent.py | 068b236c1fdeb1d78c4037e05350bf894fd1ee86 | []
| no_license | robstolarz/sturdy-broccoli | 834798751985a0e77c8791859d9d5a8398da0416 | 2e4ae8f1966f01cab4938b8c5b42e3cfd1d9370a | refs/heads/master | 2021-01-20T07:09:07.867184 | 2017-05-15T14:10:56 | 2017-05-15T14:10:56 | 89,967,465 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,214 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from pal_interaction_msgs/WebGuiEvent.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class WebGuiEvent(genpy.Message):
_md5sum = "bd4a90be174b9e14b06cf397c1359fb1"
_type = "pal_interaction_msgs/WebGuiEvent"
_has_header = False #flag to mark the presence of a Header object
_full_text = """# message used by rb_flango
string name
# Expected contents:
# goTo
# setLanguage
string arg
"""
__slots__ = ['name','arg']
_slot_types = ['string','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
name,arg
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(WebGuiEvent, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.name is None:
self.name = ''
if self.arg is None:
self.arg = ''
else:
self.name = ''
self.arg = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.arg
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.name = str[start:end].decode('utf-8')
else:
self.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.arg = str[start:end].decode('utf-8')
else:
self.arg = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.name
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self.arg
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(_x))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(_x))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.name = str[start:end].decode('utf-8')
else:
self.name = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.arg = str[start:end].decode('utf-8')
else:
self.arg = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
| [
"[email protected]"
]
| |
9b2983584ad068a54f43777bcf968b094e4c2bb3 | ae5b2f3b1a37a32fcf6388cc7ce14ea69df3eade | /Password Hacker/Problems/Input/main.py | a05ae94c21ef72424d4ff263bdaae9368e4fa2df | []
| no_license | Lyasinkovska/PycharmProjects | 9e05e9161c2f5019f68d5ec456c3871ec96835ca | 02ab034f6618c013cc42197f97ba56355f9b6163 | refs/heads/master | 2021-07-14T03:59:44.146017 | 2021-03-10T18:59:53 | 2021-03-10T18:59:53 | 242,104,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 96 | py | # write the code here
line = input()
file = open('input.txt', 'w')
file.write(line)
file.close() | [
"[email protected]"
]
| |
974d23b2db78a98dff8a60dd5fd88c8a96574f4a | 3636cc88862e72016932459bf5a14afa5249a628 | /manage.py | d6ba9f7bc8246711107ec0cdc403ac2efad8fd77 | []
| no_license | crowdbotics-apps/punch-17156 | a19f2d55db2b3b15cd9c582e1576e06033191169 | 332fb19031bef7b26101e0e77f6e3dca70b45780 | refs/heads/master | 2023-05-30T20:26:47.145506 | 2020-05-19T19:21:38 | 2020-05-19T19:21:38 | 265,340,874 | 0 | 0 | null | 2021-06-12T08:02:20 | 2020-05-19T19:13:40 | Python | UTF-8 | Python | false | false | 631 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "punch_17156.settings")
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
d86ce35082ffe54da97e3fc64900cb94273d31e3 | 01031a3d3a33f7591185049e0e44526d9b852821 | /SLAM/FastSLAM/fast_slam.py | 5fb58f0f6c79cffce9e14f0fb876674a3d575e77 | [
"MIT"
]
| permissive | matthewgan/PythonRobotics | 8555e2afe95d09c12c5e18ab4658b8e9e3f6817c | ba926c6307e353dbef0d6ee67f5156ec923dc974 | refs/heads/master | 2021-04-06T03:39:43.702250 | 2018-03-10T18:38:06 | 2018-03-10T18:38:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,420 | py | """
Fast SLAM example
author: Atsushi Sakai (@Atsushi_twi)
"""
import numpy as np
import math
import matplotlib.pyplot as plt
# EKF state covariance
Cx = np.diag([0.5, 0.5, math.radians(30.0)])**2
# Simulation parameter
Qsim = np.diag([0.2, math.radians(1.0)])**2
Rsim = np.diag([1.0, math.radians(10.0)])**2
DT = 0.1 # time tick [s]
SIM_TIME = 50.0 # simulation time [s]
MAX_RANGE = 20.0 # maximum observation range
M_DIST_TH = 2.0 # Threshold of Mahalanobis distance for data association.
STATE_SIZE = 3 # State size [x,y,yaw]
LM_SIZE = 2 # LM srate size [x,y]
N_PARTICLE = 100 # number of particle
show_animation = True
class Particle:
def __init__(self, N_LM):
self.w = 1.0 / N_PARTICLE
self.x = 0.0
self.y = 0.0
self.yaw = 0.0
self.lm = np.zeros((N_LM, 2))
def normalize_weight(particles):
sumw = sum([particles[ip].w for ip in range(N_PARTICLE)])
for i in range(N_PARTICLE):
particles[i].w = particles[i].w / sumw
return particles
def calc_final_state(particles):
particles = normalize_weight(particles)
xEst = np.zeros((STATE_SIZE, 1))
for i in range(N_PARTICLE):
xEst[0, 0] += particles[i].w * particles[i].x
xEst[1, 0] += particles[i].w * particles[i].y
xEst[2, 0] += particles[i].w * particles[i].yaw
xEst[2, 0] = pi_2_pi(xEst[2, 0])
return xEst
def predict_particles(particles, u):
for i in range(N_PARTICLE):
px = np.zeros((STATE_SIZE, 1))
px[0, 0] = particles[i].x
px[1, 0] = particles[i].y
px[2, 0] = particles[i].yaw
ud = u + np.matrix(np.random.randn(1, 2)) * Rsim # add noise
px = motion_model(px, ud)
particles[i].x = px[0, 0]
particles[i].y = px[1, 0]
particles[i].yaw = px[2, 0]
return particles
def add_new_lm(particle, z):
r = z[0, 0]
b = z[0, 1]
lm_id = int(z[0, 2])
s = math.sin(particle.yaw + b)
c = math.cos(particle.yaw + b)
particle.lm[lm_id, 0] = particle.x + r * c
particle.lm[lm_id, 1] = particle.y + r * s
return particle
def compute_weight(particle, z):
lm_id = int(z[0, 2])
lmxy = np.matrix(particle.lm[lm_id, :])
zxy = z[0, 0:2]
# print(lmxy)
# print(zxy)
dx = (lmxy - zxy).T
S = np.eye(2)
num = math.exp(-0.5 * dx.T * np.linalg.inv(S) * dx)
den = 2.0 * math.pi * math.sqrt(np.linalg.det(S))
w = num / den
return w
def update_with_observation(particles, z):
for iz in range(len(z[:, 0])):
lmid = int(z[iz, 2])
for ip in range(N_PARTICLE):
# new landmark
if abs(particles[ip].lm[lmid, 0]) <= 0.1:
particles[ip] = add_new_lm(particles[ip], z[iz, :])
# known landmark
else:
w = compute_weight(particles[ip], z[iz, :]) # w = p(z_k | x_k)
particles[ip].w = particles[ip].w * w
# particles(i)= feature_update(particles(i), zf, idf, R)
return particles
def fast_slam(particles, PEst, u, z):
# Predict
particles = predict_particles(particles, u)
# Observation
particles = update_with_observation(particles, z)
xEst = calc_final_state(particles)
return xEst, PEst
def calc_input():
v = 1.0 # [m/s]
yawrate = 0.1 # [rad/s]
u = np.matrix([v, yawrate]).T
return u
def observation(xTrue, xd, u, RFID):
xTrue = motion_model(xTrue, u)
# add noise to gps x-y
z = np.matrix(np.zeros((0, 3)))
for i in range(len(RFID[:, 0])):
dx = RFID[i, 0] - xTrue[0, 0]
dy = RFID[i, 1] - xTrue[1, 0]
d = math.sqrt(dx**2 + dy**2)
angle = pi_2_pi(math.atan2(dy, dx))
if d <= MAX_RANGE:
dn = d + np.random.randn() * Qsim[0, 0] # add noise
anglen = angle + np.random.randn() * Qsim[1, 1] # add noise
zi = np.matrix([dn, anglen, i])
z = np.vstack((z, zi))
# add noise to input
ud1 = u[0, 0] + np.random.randn() * Rsim[0, 0]
ud2 = u[1, 0] + np.random.randn() * Rsim[1, 1]
ud = np.matrix([ud1, ud2]).T
xd = motion_model(xd, ud)
return xTrue, z, xd, ud
def motion_model(x, u):
F = np.matrix([[1.0, 0, 0],
[0, 1.0, 0],
[0, 0, 1.0]])
B = np.matrix([[DT * math.cos(x[2, 0]), 0],
[DT * math.sin(x[2, 0]), 0],
[0.0, DT]])
x = F * x + B * u
return x
def calc_n_LM(x):
n = int((len(x) - STATE_SIZE) / LM_SIZE)
return n
def calc_LM_Pos(x, z):
zp = np.zeros((2, 1))
zp[0, 0] = x[0, 0] + z[0, 0] * math.cos(x[2, 0] + z[0, 1])
zp[1, 0] = x[1, 0] + z[0, 0] * math.sin(x[2, 0] + z[0, 1])
return zp
def get_LM_Pos_from_state(x, ind):
lm = x[STATE_SIZE + LM_SIZE * ind: STATE_SIZE + LM_SIZE * (ind + 1), :]
return lm
def search_correspond_LM_ID(xAug, PAug, zi):
"""
Landmark association with Nearest Neighbor
"""
nLM = calc_n_LM(xAug)
mdist = []
for i in range(nLM):
# lm = get_LM_Pos_from_state(xAug, i)
# # y, S, H = calc_innovation(lm, xAug, PAug, zi, i)
# mdist.append(y.T * np.linalg.inv(S) * y)
pass
mdist.append(M_DIST_TH) # new landmark
minid = mdist.index(min(mdist))
return minid
def pi_2_pi(angle):
while(angle > math.pi):
angle = angle - 2.0 * math.pi
while(angle < -math.pi):
angle = angle + 2.0 * math.pi
return angle
def main():
print(__file__ + " start!!")
time = 0.0
# RFID positions [x, y]
RFID = np.array([[10.0, -2.0],
[15.0, 10.0],
[3.0, 15.0],
[-5.0, 20.0]])
N_LM = RFID.shape[0]
# State Vector [x y yaw v]'
xEst = np.matrix(np.zeros((STATE_SIZE, 1)))
xTrue = np.matrix(np.zeros((STATE_SIZE, 1)))
PEst = np.eye(STATE_SIZE)
xDR = np.matrix(np.zeros((STATE_SIZE, 1))) # Dead reckoning
# history
hxEst = xEst
hxTrue = xTrue
hxDR = xTrue
particles = [Particle(N_LM) for i in range(N_PARTICLE)]
while SIM_TIME >= time:
time += DT
u = calc_input()
xTrue, z, xDR, ud = observation(xTrue, xDR, u, RFID)
xEst, PEst = fast_slam(particles, PEst, ud, z)
x_state = xEst[0:STATE_SIZE]
# store data history
hxEst = np.hstack((hxEst, x_state))
hxDR = np.hstack((hxDR, xDR))
hxTrue = np.hstack((hxTrue, xTrue))
if show_animation:
plt.cla()
plt.plot(RFID[:, 0], RFID[:, 1], "*k")
plt.plot(xEst[0], xEst[1], "xr")
for i in range(N_PARTICLE):
plt.plot(particles[i].x, particles[i].y, ".r")
# plot landmark
for i in range(calc_n_LM(xEst)):
plt.plot(xEst[STATE_SIZE + i * 2],
xEst[STATE_SIZE + i * 2 + 1], "xg")
plt.plot(np.array(hxTrue[0, :]).flatten(),
np.array(hxTrue[1, :]).flatten(), "-b")
plt.plot(np.array(hxDR[0, :]).flatten(),
np.array(hxDR[1, :]).flatten(), "-k")
plt.plot(np.array(hxEst[0, :]).flatten(),
np.array(hxEst[1, :]).flatten(), "-r")
plt.axis("equal")
plt.grid(True)
plt.pause(0.001)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
1cc5122e5c66d396e0dc0b524d9525bc39c29fb8 | 9f7c106d50681b394d822fbdc5e3ad25f04d927c | /week6_nissi_miika/week6_ass10_nissi_miika.py | ffc57d3b785e28d30b956c0c90436868710caa64 | []
| no_license | miikanissi/python_course_summer_2020 | edf032b1d9815dfa6e0b5f7c902f7b469117c04f | 3969288b969b3db8f9d7f2fdb67905f13d4969fa | refs/heads/master | 2022-12-02T09:33:42.625374 | 2020-08-24T17:38:59 | 2020-08-24T17:38:59 | 273,909,320 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 219 | py | def search(array, n):
for i in array:
if i == n:
return True
return False
arr = [23,4,89,19,0,700,30]
print("Number 19 found: ", search(arr, 19))
print("Number 20 found: ", search(arr, 20))
| [
"[email protected]"
]
| |
52fe8f06f0857f20301770af0233c347deb1dcc6 | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/reservation/azext_reservation/aaz/latest/reservations/reservation/_archive.py | 734b5fe8a45e886531655e01256e5a4e690f51e7 | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 3,904 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
# pylint: skip-file
# flake8: noqa
from azure.cli.core.aaz import *
@register_command(
"reservations reservation archive",
)
class Archive(AAZCommand):
"""Archiving a `Reservation` which is in cancelled/expired state and move it to `Archived` state.
:example: Archiving a reservation
az reservations reservation archive --reservation-order-id 40000000-aaaa-bbbb-cccc-20000000000 --reservation-id 50000000-aaaa-bbbb-cccc-200000000000
"""
_aaz_info = {
"version": "2022-11-01",
"resources": [
["mgmt-plane", "/providers/microsoft.capacity/reservationorders/{}/reservations/{}/archive", "2022-11-01"],
]
}
def _handler(self, command_args):
super()._handler(command_args)
self._execute_operations()
return None
_args_schema = None
@classmethod
def _build_arguments_schema(cls, *args, **kwargs):
if cls._args_schema is not None:
return cls._args_schema
cls._args_schema = super()._build_arguments_schema(*args, **kwargs)
# define Arg Group ""
_args_schema = cls._args_schema
_args_schema.reservation_id = AAZStrArg(
options=["--reservation-id"],
help="Id of the Reservation Item",
required=True,
)
_args_schema.reservation_order_id = AAZStrArg(
options=["--reservation-order-id"],
help="Order Id of the reservation",
required=True,
)
return cls._args_schema
def _execute_operations(self):
self.pre_operations()
self.ReservationArchive(ctx=self.ctx)()
self.post_operations()
@register_callback
def pre_operations(self):
pass
@register_callback
def post_operations(self):
pass
class ReservationArchive(AAZHttpOperation):
CLIENT_TYPE = "MgmtClient"
def __call__(self, *args, **kwargs):
request = self.make_request()
session = self.client.send_request(request=request, stream=False, **kwargs)
if session.http_response.status_code in [200]:
return self.on_200(session)
return self.on_error(session.http_response)
@property
def url(self):
return self.client.format_url(
"/providers/Microsoft.Capacity/reservationOrders/{reservationOrderId}/reservations/{reservationId}/archive",
**self.url_parameters
)
@property
def method(self):
return "POST"
@property
def error_format(self):
return "ODataV4Format"
@property
def url_parameters(self):
parameters = {
**self.serialize_url_param(
"reservationId", self.ctx.args.reservation_id,
required=True,
),
**self.serialize_url_param(
"reservationOrderId", self.ctx.args.reservation_order_id,
required=True,
),
}
return parameters
@property
def query_parameters(self):
parameters = {
**self.serialize_query_param(
"api-version", "2022-11-01",
required=True,
),
}
return parameters
def on_200(self, session):
pass
class _ArchiveHelper:
"""Helper class for Archive"""
__all__ = ["Archive"]
| [
"[email protected]"
]
| |
e70678a04a2923ec5b42793b99a701886f815120 | 023763d9f86116381f5765c51fb8b403e8eef527 | /Other/M-SOLUTIONS プロコンオープン 2020/m_solutions2020_c.py | a2a8e2a8ef4bdc030af9099c1af7be71984e6691 | []
| no_license | Hilary02/atcoder | d45589682159c0f838561fc7d0bd25f0828e578b | 879c74f3acc7befce75abd10abf1ab43967fc3c7 | refs/heads/master | 2021-07-18T11:34:22.702502 | 2021-07-11T09:04:12 | 2021-07-11T09:04:12 | 144,648,001 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | n, k = [int(w) for w in input().split()]
la = [int(w) for w in input().split()]
for i in range(k, n):
cond = la[i] > la[i-k]
print("Yes" if cond else "No")
| [
"[email protected]"
]
| |
f8392b31b550f3e9534ab9a3e39f8baed6780ccb | bd5c1f86971a068e9a4ea57459653d649ea4c50e | /tests/unit/test_util.py | 18973c6f16a056c938ac6e25dcb06fc61da6117c | [
"MIT"
]
| permissive | monoflo/bloop | 553372c7155de386afd098e3c91435186064a5d4 | c476298e5a40decf9fdf2ed50df74be8f91fdffd | refs/heads/master | 2020-04-06T12:54:19.184591 | 2018-11-14T22:06:01 | 2018-11-14T22:06:01 | 157,475,484 | 0 | 0 | MIT | 2018-11-14T02:04:44 | 2018-11-14T02:04:44 | null | UTF-8 | Python | false | false | 3,510 | py | import collections
import gc
import pytest
from bloop.models import BaseModel, Column
from bloop.types import Integer
from bloop.util import (
Sentinel,
WeakDefaultDictionary,
index,
ordered,
walk_subclasses,
)
def test_index():
"""Index by each object's value for an attribute"""
class Person:
def __init__(self, name):
self.name = name
p1, p2, p3 = Person("foo"), Person("bar"), Person("baz")
assert index([p1, p2, p3], "name") == {
"foo": p1,
"bar": p2,
"baz": p3
}
@pytest.mark.parametrize("obj", [None, object(), 2, False, "abc"])
def test_ordered_basic_objects(obj):
"""Things that don't need to be unpacked or flattened for comparison"""
assert ordered(obj) is obj
@pytest.mark.parametrize("it", [
iter(list("bac")),
["b", "c", "a"],
("c", "a", "b"),
(x for x in "cba"),
{"a", "c", "b"}
])
def test_ordered_iterable(it):
"""Any non-mapping iterable is sorted, even if it's consumable"""
expected = ["a", "b", "c"]
assert ordered(it) == expected
@pytest.mark.parametrize("mapping", [
{"b": True, "a": "zebra", "c": None},
collections.OrderedDict([("c", None), ("b", True), ("a", "zebra")])
])
def test_ordered_mapping(mapping):
"""Mappings are flattened into (key, value) tuples and then those tuples are sorted"""
expected = [
("a", "zebra"),
("b", True),
("c", None)
]
assert ordered(mapping) == expected
@pytest.mark.parametrize("obj, expected", [
# mapping int -> set(str)
({3: {"a", "b"}, 2: {"c", "b"}, 1: {"a", "c"}}, [(1, ["a", "c"]), (2, ["b", "c"]), (3, ["a", "b"])]),
# mapping str -> list(int)
({"b": [1, 2], "a": [3, 2], "c": [1, 3]}, [("a", [2, 3]), ("b", [1, 2]), ("c", [1, 3])]),
# list(set(bool))
([{False}, {True}], [[False], [True]]),
])
def test_ordered_recursion(obj, expected):
"""Mappings and iterables inside each other are sorted and flattened"""
assert ordered(obj) == expected
def test_walk_subclasses():
class A:
pass
class B: # Not included
pass
class C(A):
pass
class D(A):
pass
class E(C, A): # would be visited twice without dedupe
pass
class F(D, A): # would be visited twice without dedupe
pass
# list instead of set ensures we don't false succeed on duplicates
subclasses = sorted(walk_subclasses(A), key=lambda c: c.__name__)
assert subclasses == [C, D, E, F]
def test_sentinel_uniqueness():
sentinel = Sentinel("name")
same_sentinel = Sentinel("NAME")
assert sentinel is same_sentinel
def test_sentinel_repr():
foo = Sentinel("foo")
assert repr(foo) == "<Sentinel[foo]>"
def test_weakref_default_dict():
"""Provides defaultdict behavior for a WeakKeyDictionary"""
class MyModel(BaseModel):
id = Column(Integer, hash_key=True)
data = Column(Integer)
def new(i):
obj = MyModel(id=i, data=2 * i)
return obj
weak_dict = WeakDefaultDictionary(lambda: {"foo": "bar"})
n_objs = 10
objs = [new(i) for i in range(n_objs)]
for obj in objs:
# default_factory is called
assert weak_dict[obj] == {"foo": "bar"}
# don't keep a reference to the last obj, throws off the count below
del obj
calls = 0
while weak_dict:
del objs[0]
gc.collect()
calls += 1
assert len(weak_dict) == len(objs)
assert calls == n_objs
| [
"[email protected]"
]
| |
321af11c680482b013a293d81093854eec9201fc | 4680b7f858232806ea15bf2464ec4b6401d93cf0 | /src/joins/models.py | e62ea42028a7e3943d20ac673c2baa3bcb0d8b56 | []
| no_license | Tushant/socialSharingCampaign | 69017e602648ea8ef6e02092668039d61844b61f | 96dc8176be1cf64e9ef4ec6a305c61666612be20 | refs/heads/master | 2020-04-02T06:45:17.103905 | 2016-07-18T02:09:51 | 2016-07-18T02:09:51 | 63,562,915 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,079 | py | from django.db import models
# Create your models here.
class Join(models.Model):
email = models.EmailField()
friend = models.ForeignKey("self", related_name='referral',\
null=True, blank=True)
ref_id = models.CharField(max_length=120, default='ABC', unique=True)
count_added = models.ForeignKey("self",null=True,related_name='count',blank=True)
ip_address = models.CharField(max_length=120, default='ABC')
timestamp = models.DateTimeField(auto_now_add = True, auto_now=False)
updated = models.DateTimeField(auto_now_add = False, auto_now=True)
def __unicode__(self):
return "%s" %(self.email)
class Meta:
unique_together = ("email", "ref_id",)
# class JoinFriends(models.Model):
# email = models.OneToOneField(Join, related_name="Sharer")
# friends = models.ManyToManyField(Join, related_name="Friend", \
# null=True, blank=True)
# emailall = models.ForeignKey(Join, related_name='emailall')
# def __unicode__(self):
# print "friends are ", self.friends.all()
# print self.emailall
# print self.email
# return self.email.email | [
"[email protected]"
]
| |
98cfe74c39eaab9a20964b5ba9dd22e3e4ede5a4 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /workmail_write_2/organization_delete.py | 6780aa72ae6f9c251cf32b3f0cb708c6b7c074ff | []
| no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,263 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_two_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/workmail/delete-organization.html
if __name__ == '__main__':
"""
create-organization : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/workmail/create-organization.html
describe-organization : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/workmail/describe-organization.html
list-organizations : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/workmail/list-organizations.html
"""
parameter_display_string = """
# organization-id : The organization ID.
# delete-directory | --no-delete-directory : If true, deletes the AWS Directory Service directory associated with the organization.
"""
add_option_dict = {}
add_option_dict["parameter_display_string"] = parameter_display_string
# ex: add_option_dict["no_value_parameter_list"] = "--single-parameter"
write_two_parameter("workmail", "delete-organization", "organization-id", "delete-directory | --no-delete-directory", add_option_dict)
| [
"[email protected]"
]
| |
59591e6819e784285a66c1e75672d25166bbaab7 | 3f6c16ea158a8fb4318b8f069156f1c8d5cff576 | /.PyCharm2019.1/system/python_stubs/-1317042838/fcntl.py | bec2cdc25d0f7a7a50514e4edeaf1497362d20c3 | []
| no_license | sarthak-patidar/dotfiles | 08494170d2c0fedc0bbe719cc7c60263ce6fd095 | b62cd46f3491fd3f50c704f0255730af682d1f80 | refs/heads/master | 2020-06-28T23:42:17.236273 | 2019-10-01T13:56:27 | 2019-10-01T13:56:27 | 200,369,900 | 0 | 0 | null | 2019-08-03T12:56:33 | 2019-08-03T11:53:29 | Shell | UTF-8 | Python | false | false | 8,460 | py | # encoding: utf-8
# module fcntl
# from (built-in)
# by generator 1.147
"""
This module performs file control and I/O control on file
descriptors. It is an interface to the fcntl() and ioctl() Unix
routines. File descriptors can be obtained with the fileno() method of
a file or socket object.
"""
# no imports
# Variables with simple values
DN_ACCESS = 1
DN_ATTRIB = 32
DN_CREATE = 4
DN_DELETE = 8
DN_MODIFY = 2
DN_MULTISHOT = 2147483648
DN_RENAME = 16
FASYNC = 8192
FD_CLOEXEC = 1
F_DUPFD = 0
F_DUPFD_CLOEXEC = 1030
F_EXLCK = 4
F_GETFD = 1
F_GETFL = 3
F_GETLEASE = 1025
F_GETLK = 5
F_GETLK64 = 5
F_GETOWN = 9
F_GETSIG = 11
F_NOTIFY = 1026
F_RDLCK = 0
F_SETFD = 2
F_SETFL = 4
F_SETLEASE = 1024
F_SETLK = 6
F_SETLK64 = 6
F_SETLKW = 7
F_SETLKW64 = 7
F_SETOWN = 8
F_SETSIG = 10
F_SHLCK = 8
F_UNLCK = 2
F_WRLCK = 1
I_ATMARK = 21279
I_CANPUT = 21282
I_CKBAND = 21277
I_FDINSERT = 21264
I_FIND = 21259
I_FLUSH = 21253
I_FLUSHBAND = 21276
I_GETBAND = 21278
I_GETCLTIME = 21281
I_GETSIG = 21258
I_GRDOPT = 21255
I_GWROPT = 21268
I_LINK = 21260
I_LIST = 21269
I_LOOK = 21252
I_NREAD = 21249
I_PEEK = 21263
I_PLINK = 21270
I_POP = 21251
I_PUNLINK = 21271
I_PUSH = 21250
I_RECVFD = 21262
I_SENDFD = 21265
I_SETCLTIME = 21280
I_SETSIG = 21257
I_SRDOPT = 21254
I_STR = 21256
I_SWROPT = 21267
I_UNLINK = 21261
LOCK_EX = 2
LOCK_MAND = 32
LOCK_NB = 4
LOCK_READ = 64
LOCK_RW = 192
LOCK_SH = 1
LOCK_UN = 8
LOCK_WRITE = 128
# functions
def fcntl(*args, **kwargs): # real signature unknown
"""
Perform the operation `cmd` on file descriptor fd.
The values used for `cmd` are operating system dependent, and are available
as constants in the fcntl module, using the same names as used in
the relevant C header files. The argument arg is optional, and
defaults to 0; it may be an int or a string. If arg is given as a string,
the return value of fcntl is a string of that length, containing the
resulting value put in the arg buffer by the operating system. The length
of the arg string is not allowed to exceed 1024 bytes. If the arg given
is an integer or if none is specified, the result value is an integer
corresponding to the return value of the fcntl call in the C code.
"""
pass
def flock(*args, **kwargs): # real signature unknown; NOTE: unreliably restored from __doc__
"""
Perform the lock operation `operation` on file descriptor `fd`.
See the Unix manual page for flock(2) for details (On some systems, this
function is emulated using fcntl()).
"""
pass
def ioctl(*args, **kwargs): # real signature unknown
"""
Perform the operation `request` on file descriptor `fd`.
The values used for `request` are operating system dependent, and are available
as constants in the fcntl or termios library modules, using the same names as
used in the relevant C header files.
The argument `arg` is optional, and defaults to 0; it may be an int or a
buffer containing character data (most likely a string or an array).
If the argument is a mutable buffer (such as an array) and if the
mutate_flag argument (which is only allowed in this case) is true then the
buffer is (in effect) passed to the operating system and changes made by
the OS will be reflected in the contents of the buffer after the call has
returned. The return value is the integer returned by the ioctl system
call.
If the argument is a mutable buffer and the mutable_flag argument is false,
the behavior is as if a string had been passed.
If the argument is an immutable buffer (most likely a string) then a copy
of the buffer is passed to the operating system and the return value is a
string of the same length containing whatever the operating system put in
the buffer. The length of the arg buffer in this case is not allowed to
exceed 1024 bytes.
If the arg given is an integer or if none is specified, the result value is
an integer corresponding to the return value of the ioctl call in the C
code.
"""
pass
def lockf(*args, **kwargs): # real signature unknown
"""
A wrapper around the fcntl() locking calls.
`fd` is the file descriptor of the file to lock or unlock, and operation is one
of the following values:
LOCK_UN - unlock
LOCK_SH - acquire a shared lock
LOCK_EX - acquire an exclusive lock
When operation is LOCK_SH or LOCK_EX, it can also be bitwise ORed with
LOCK_NB to avoid blocking on lock acquisition. If LOCK_NB is used and the
lock cannot be acquired, an OSError will be raised and the exception will
have an errno attribute set to EACCES or EAGAIN (depending on the operating
system -- for portability, check for either value).
`len` is the number of bytes to lock, with the default meaning to lock to
EOF. `start` is the byte offset, relative to `whence`, to that the lock
starts. `whence` is as with fileobj.seek(), specifically:
0 - relative to the start of the file (SEEK_SET)
1 - relative to the current buffer position (SEEK_CUR)
2 - relative to the end of the file (SEEK_END)
"""
pass
# classes
class __loader__(object):
"""
Meta path import for built-in modules.
All methods are either class or static methods to avoid the need to
instantiate the class.
"""
@classmethod
def create_module(cls, *args, **kwargs): # real signature unknown
""" Create a built-in module """
pass
@classmethod
def exec_module(cls, *args, **kwargs): # real signature unknown
""" Exec a built-in module """
pass
@classmethod
def find_module(cls, *args, **kwargs): # real signature unknown
"""
Find the built-in module.
If 'path' is ever specified then the search is considered a failure.
This method is deprecated. Use find_spec() instead.
"""
pass
@classmethod
def find_spec(cls, *args, **kwargs): # real signature unknown
pass
@classmethod
def get_code(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have code objects. """
pass
@classmethod
def get_source(cls, *args, **kwargs): # real signature unknown
""" Return None as built-in modules do not have source code. """
pass
@classmethod
def is_package(cls, *args, **kwargs): # real signature unknown
""" Return False as built-in modules are never packages. """
pass
@classmethod
def load_module(cls, *args, **kwargs): # real signature unknown
"""
Load the specified module into sys.modules and return it.
This method is deprecated. Use loader.exec_module instead.
"""
pass
def module_repr(module): # reliably restored by inspect
"""
Return repr for the module.
The method is deprecated. The import machinery does the job itself.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__weakref__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""list of weak references to the object (if defined)"""
__dict__ = None # (!) real value is "mappingproxy({'__module__': '_frozen_importlib', '__doc__': 'Meta path import for built-in modules.\\n\\n All methods are either class or static methods to avoid the need to\\n instantiate the class.\\n\\n ', 'module_repr': <staticmethod object at 0x7f3ceeae5048>, 'find_spec': <classmethod object at 0x7f3ceeae5080>, 'find_module': <classmethod object at 0x7f3ceeae50b8>, 'create_module': <classmethod object at 0x7f3ceeae50f0>, 'exec_module': <classmethod object at 0x7f3ceeae5128>, 'get_code': <classmethod object at 0x7f3ceeae5198>, 'get_source': <classmethod object at 0x7f3ceeae5208>, 'is_package': <classmethod object at 0x7f3ceeae5278>, 'load_module': <classmethod object at 0x7f3ceeae52b0>, '__dict__': <attribute '__dict__' of 'BuiltinImporter' objects>, '__weakref__': <attribute '__weakref__' of 'BuiltinImporter' objects>})"
# variables with complex values
__spec__ = None # (!) real value is "ModuleSpec(name='fcntl', loader=<class '_frozen_importlib.BuiltinImporter'>, origin='built-in')"
| [
"[email protected]"
]
| |
ccd41b50340ff40a19e60d5e20e3b3ea9833977d | b2e93927aad0ddf373b1078dc1adfdaf0a3157a7 | /corr_test_terms.py | 70fdd49cd00b6ee488b4751ed908c9586320636d | []
| no_license | TaiSakuma/metrecoat | 30dc084458cf48f8c63f7c6c8ec47ec50cdd6b73 | f62b269a5cb5f066a7cc1a748f999ebe36d6bd41 | refs/heads/master | 2016-09-02T00:07:00.514760 | 2013-12-16T22:03:45 | 2013-12-16T22:03:45 | 12,684,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,141 | py | #!/usr/bin/env python
# Tai Sakuma <[email protected]>
import ROOT
import sys
import math
import json
import re
import unittest
from optparse import OptionParser
ROOT.gROOT.SetBatch(1)
##____________________________________________________________________________||
parser = OptionParser()
parser.add_option('-e', '--expectedPath', default = './corr_terms_expected.root', action = 'store', type = 'string')
parser.add_option('-a', '--actualPath', default = './corr_terms_actual.root', action = 'store', type = 'string')
(options, args) = parser.parse_args(sys.argv)
##____________________________________________________________________________||
class METProducerTest(unittest.TestCase):
def setUp(self):
self.exEvents = Events([options.expectedPath])
self.acEvents = Events([options.actualPath])
self.exHandleCorrMETData = Handle("CorrMETData")
self.acHandleCorrMETData = Handle("CorrMETData")
def test_n_events(self):
self.assertEqual(self.exEvents.size(), self.acEvents.size())
def test_corrCaloMetType2(self):
label = ("corrCaloMetType2", "", "CORR")
self.assert_CorrMETData(label)
def test_corrPfMetShiftXY(self):
label = ("corrPfMetShiftXY", "", "CORR")
self.assert_CorrMETData(label)
def test_corrPfMetType0RecoTrack(self):
label = ("corrPfMetType0RecoTrack", "", "CORR")
self.assert_CorrMETData(label)
def test_corrPfMetType0RecoTrackForType2(self):
label = ("corrPfMetType0RecoTrackForType2", "", "CORR")
self.assert_CorrMETData(label)
def test_corrPfMetType2(self):
label = ("corrPfMetType2", "", "CORR")
self.assert_CorrMETData(label)
def test_corrPfMetType1_offset(self):
label = ("corrPfMetType1", "offset", "CORR")
self.assert_CorrMETData(label)
def test_corrPfMetType1_type1(self):
label = ("corrPfMetType1", "type1", "CORR")
self.assert_CorrMETData(label)
def test_corrPfMetType1_type2(self):
label = ("corrPfMetType1", "type2", "CORR")
self.assert_CorrMETData(label)
def test_pfCandMETcorr(self):
label = ("pfCandMETcorr", "", "CORR")
self.assert_CorrMETData(label)
def test_pfchsMETcorr_type0(self):
label = ("pfchsMETcorr", "type0", "CORR")
self.assert_CorrMETData(label)
def test_corrPfMetType0PfCand(self):
label = ("corrPfMetType0PfCand", "", "CORR")
self.assert_CorrMETData(label)
def test_muonCaloMETcorr(self):
label = ("muonCaloMETcorr", "", "CORR")
self.assert_CorrMETData(label)
def test_corrCaloMetType1_offset(self):
label = ("corrCaloMetType1", "offset", "CORR")
self.assert_CorrMETData(label)
def test_corrCaloMetType1_type1(self):
label = ("corrCaloMetType1", "type1", "CORR")
self.assert_CorrMETData(label)
def test_corrCaloMetType1_type2(self):
label = ("corrCaloMetType1", "type2", "CORR")
self.assert_CorrMETData(label)
def assert_CorrMETData(self, label):
exHandle = self.exHandleCorrMETData
acHandle = self.acHandleCorrMETData
exEventIter = self.exEvents.__iter__()
acEventIter = self.acEvents.__iter__()
nevents = min(self.exEvents.size(), self.acEvents.size())
for i in range(nevents):
exEvent = exEventIter.next()
acEvent = acEventIter.next()
exEvent.getByLabel(label, exHandle)
exCorr = exHandle.product()
acEvent.getByLabel(label, acHandle)
acCorr = acHandle.product()
self.assertAlmostEqual(acCorr.mex, exCorr.mex, 12)
self.assertAlmostEqual(acCorr.mey, exCorr.mey, 12)
self.assertAlmostEqual(acCorr.sumet, exCorr.sumet, 12)
self.assertAlmostEqual(acCorr.significance, exCorr.significance, 12)
##____________________________________________________________________________||
class ROOT_STL_Test(unittest.TestCase):
def test_vector(self):
a = ROOT.vector("double")()
b = ROOT.vector("double")()
self.assertEqual(a, b)
a.push_back(2.2)
self.assertNotEqual(a, b)
a.push_back(3.5)
a.push_back(4.2)
b.push_back(2.2)
b.push_back(3.5)
b.push_back(4.2)
self.assertEqual(a, b)
a.push_back(2.7)
b.push_back(8.9)
self.assertNotEqual(a, b)
##____________________________________________________________________________||
def loadLibraries():
argv_org = list(sys.argv)
sys.argv = [e for e in sys.argv if e != '-h']
ROOT.gSystem.Load("libFWCoreFWLite")
ROOT.AutoLibraryLoader.enable()
ROOT.gSystem.Load("libDataFormatsFWLite")
ROOT.gSystem.Load("libDataFormatsPatCandidates")
sys.argv = argv_org
##____________________________________________________________________________||
loadLibraries()
from DataFormats.FWLite import Events, Handle
##____________________________________________________________________________||
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
84291218bcb69359e7565f114049da7a9bd357a7 | fafb89a3552e4dbb47d134966462ef5f3f37f576 | /KEMP/v0.1/fdtd3d/exchange_boundary/355-measure-mpi_persistent_nonblocking.py | c1fbb85ec48b24c37996bf959eafdc9e592df130 | []
| no_license | EMinsight/fdtd_accelerate | 78fa1546df5264550d12fba3cf964838b560711d | a566c60753932eeb646c4a3dea7ed25c7b059256 | refs/heads/master | 2021-12-14T03:26:52.070069 | 2012-07-25T08:25:21 | 2012-07-25T08:25:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,846 | py | #!/usr/bin/env python
import numpy as np
import sys
from datetime import datetime
from mpi4py import MPI
comm = MPI.COMM_WORLD
rank = comm.Get_rank()
size = comm.Get_size()
tmax = 100
start = 2 * np.nbytes['float32'] * (3 * 32)**2 # 96, 73728 B, 72.0 KIB
end = 2 * np.nbytes['float32'] * (15 * 32)**2 # 480, 1843200 B, 1.76 MiB
increment = (end - start) / 16
nbytes = np.arange(start, end+1, increment)
dts = np.zeros(nbytes.size)
# verify h5 file exist
if rank == 0:
import os
h5_path = './bandwidth_GbE_nonblocking.h5'
if os.path.exists(h5_path):
print('Error: File exist %s' % h5_path)
sys.exit()
for i, nbyte in enumerate(nbytes):
if rank == 0:
dts[i] = comm.recv(source=1, tag=10) # source, tag
print('nbyte = %d, dt = %f' % (nbyte, dts[i]))
elif rank == 1:
arr_send = np.random.rand(nbyte/np.nbytes['float32']).astype(np.float32)
arr_recv = np.zeros_like(arr_send)
req_send = comm.Send_init(arr_send, dest=2, tag=10)
req_recv = comm.Recv_init(arr_recv, source=2, tag=20)
reqs = [req_send, req_recv]
t0 = datetime.now()
for tstep in xrange(1, tmax+1):
for req in reqs: req.Start()
for req in reqs: req.Wait()
dt0 = datetime.now() - t0
dt = (dt0.seconds + dt0.microseconds * 1e-6) / tmax
#print('[%d] dt = %f' % (rank, dt))
comm.send(dt, dest=0, tag=10) # data, dest, tag
elif rank == 2:
arr_send = np.random.rand(nbyte/np.nbytes['float32']).astype(np.float32)
arr_recv = np.zeros_like(arr_send)
req_send = comm.Send_init(arr_send, dest=1, tag=20)
req_recv = comm.Recv_init(arr_recv, source=1, tag=10)
reqs = [req_send, req_recv]
for tstep in xrange(1, tmax+1):
for req in reqs: req.Start()
for req in reqs: req.Wait()
# Save as h5
if rank == 0:
import h5py as h5
f = h5.File(h5_path, 'w')
f.create_dataset('nbytes', data=nbytes)
f.create_dataset('dts', data=dts)
| [
"[email protected]"
]
| |
039324dbe1f538d8614a5cc1c3f43b35c154f65b | 65329299fca8dcf2e204132624d9b0f8f8f39af7 | /napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes_/__init__.py | a2e03cc506bcde6ac792116445848e658db09a92 | [
"Apache-2.0"
]
| permissive | darylturner/napalm-yang | bf30420e22d8926efdc0705165ed0441545cdacf | b14946b884ad2019b896ee151285900c89653f44 | refs/heads/master | 2021-05-14T12:17:37.424659 | 2017-11-17T07:32:49 | 2017-11-17T07:32:49 | 116,404,171 | 0 | 0 | null | 2018-01-05T16:21:37 | 2018-01-05T16:21:36 | null | UTF-8 | Python | false | false | 33,968 | py |
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import state
import default_metric
import delay_metric
import expense_metric
import error_metric
class prefixes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/ipv4-external-reachability/prefixes/prefixes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: IPv4 external prefixes and reachability attributes.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__state','__default_metric','__delay_metric','__expense_metric','__error_metric',)
_yang_name = 'prefixes'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__default_metric = YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__delay_metric = YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__error_metric = YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__expense_metric = YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'protocols', u'protocol', u'isis', u'levels', u'level', u'link-state-database', u'lsp', u'tlvs', u'tlv', u'ipv4-external-reachability', u'prefixes', u'prefixes']
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
YANG Description: State parameters of IPv4 standard prefix.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IPv4 standard prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
YANG Description: This container defines ISIS Default Metric.
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
YANG Description: This container defines ISIS Default Metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """default_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__default_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_delay_metric(self):
"""
Getter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
YANG Description: This container defines the ISIS delay metric.
"""
return self.__delay_metric
def _set_delay_metric(self, v, load=False):
"""
Setter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_delay_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_delay_metric() directly.
YANG Description: This container defines the ISIS delay metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """delay_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__delay_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_delay_metric(self):
self.__delay_metric = YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_expense_metric(self):
"""
Getter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
YANG Description: This container defines the ISIS expense metric.
"""
return self.__expense_metric
def _set_expense_metric(self, v, load=False):
"""
Setter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_expense_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_expense_metric() directly.
YANG Description: This container defines the ISIS expense metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """expense_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__expense_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_expense_metric(self):
self.__expense_metric = YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_error_metric(self):
"""
Getter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
YANG Description: This container defines the ISIS error metric.
"""
return self.__error_metric
def _set_error_metric(self, v, load=False):
"""
Setter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_metric() directly.
YANG Description: This container defines the ISIS error metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """error_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__error_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_error_metric(self):
self.__error_metric = YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
state = __builtin__.property(_get_state)
default_metric = __builtin__.property(_get_default_metric)
delay_metric = __builtin__.property(_get_delay_metric)
expense_metric = __builtin__.property(_get_expense_metric)
error_metric = __builtin__.property(_get_error_metric)
_pyangbind_elements = {'state': state, 'default_metric': default_metric, 'delay_metric': delay_metric, 'expense_metric': expense_metric, 'error_metric': error_metric, }
import state
import default_metric
import delay_metric
import expense_metric
import error_metric
class prefixes(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/ipv4-external-reachability/prefixes/prefixes. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: IPv4 external prefixes and reachability attributes.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_extmethods', '__state','__default_metric','__delay_metric','__expense_metric','__error_metric',)
_yang_name = 'prefixes'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__default_metric = YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__delay_metric = YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__error_metric = YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
self.__expense_metric = YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'network-instances', u'network-instance', u'protocols', u'protocol', u'isis', u'levels', u'level', u'link-state-database', u'lsp', u'tlvs', u'tlv', u'ipv4-external-reachability', u'prefixes', u'prefixes']
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
YANG Description: State parameters of IPv4 standard prefix.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IPv4 standard prefix.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_default_metric(self):
"""
Getter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
YANG Description: This container defines ISIS Default Metric.
"""
return self.__default_metric
def _set_default_metric(self, v, load=False):
"""
Setter method for default_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/default_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_default_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_default_metric() directly.
YANG Description: This container defines ISIS Default Metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """default_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__default_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_default_metric(self):
self.__default_metric = YANGDynClass(base=default_metric.default_metric, is_container='container', yang_name="default-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_delay_metric(self):
"""
Getter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
YANG Description: This container defines the ISIS delay metric.
"""
return self.__delay_metric
def _set_delay_metric(self, v, load=False):
"""
Setter method for delay_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/delay_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_delay_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_delay_metric() directly.
YANG Description: This container defines the ISIS delay metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """delay_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__delay_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_delay_metric(self):
self.__delay_metric = YANGDynClass(base=delay_metric.delay_metric, is_container='container', yang_name="delay-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_expense_metric(self):
"""
Getter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
YANG Description: This container defines the ISIS expense metric.
"""
return self.__expense_metric
def _set_expense_metric(self, v, load=False):
"""
Setter method for expense_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/expense_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_expense_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_expense_metric() directly.
YANG Description: This container defines the ISIS expense metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """expense_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__expense_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_expense_metric(self):
self.__expense_metric = YANGDynClass(base=expense_metric.expense_metric, is_container='container', yang_name="expense-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
def _get_error_metric(self):
"""
Getter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
YANG Description: This container defines the ISIS error metric.
"""
return self.__error_metric
def _set_error_metric(self, v, load=False):
"""
Setter method for error_metric, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/ipv4_external_reachability/prefixes/prefixes/error_metric (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_error_metric is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_error_metric() directly.
YANG Description: This container defines the ISIS error metric.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
except (TypeError, ValueError):
raise ValueError({
'error-string': """error_metric must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
})
self.__error_metric = t
if hasattr(self, '_set'):
self._set()
def _unset_error_metric(self):
self.__error_metric = YANGDynClass(base=error_metric.error_metric, is_container='container', yang_name="error-metric", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)
state = __builtin__.property(_get_state)
default_metric = __builtin__.property(_get_default_metric)
delay_metric = __builtin__.property(_get_delay_metric)
expense_metric = __builtin__.property(_get_expense_metric)
error_metric = __builtin__.property(_get_error_metric)
_pyangbind_elements = {'state': state, 'default_metric': default_metric, 'delay_metric': delay_metric, 'expense_metric': expense_metric, 'error_metric': error_metric, }
| [
"[email protected]"
]
| |
13a79905720eff6384e566cd5ce78aabc98fad6e | 449f410b621049c4049a4f7d4b0858f53d56a7d7 | /wsgi.py | 4de7f7338ea0c3cfe5726fae5cb96379bb08522d | [
"MIT"
]
| permissive | mvwicky/holdmypics | c02f25fd05d9694ff61d5839bd039a3a1bea4b01 | 194b135f885ef76d55975727a4a5125a6f9d33ee | refs/heads/main | 2023-05-10T19:36:20.978697 | 2023-05-06T21:27:29 | 2023-05-06T21:27:29 | 196,925,416 | 0 | 0 | MIT | 2023-03-31T15:23:01 | 2019-07-15T04:45:27 | Python | UTF-8 | Python | false | false | 98 | py | from __future__ import annotations
from holdmypics import create_app
application = create_app()
| [
"[email protected]"
]
| |
91e2340c81606ad57b2da8e47bcf915ee87475ec | 36dfd21c845f37d1b01c093715d6688513aec704 | /contrib/diggext/drivers/devices/powerstrips/__init__.py | 6fa4d8b6c852defbf7a7be20a2b27cc78c22c843 | []
| permissive | clusto/clusto | f992040ef935cc43a9f967d1412888f56ec82f71 | 7ac64e94482cc71075227dacda48953439f46dab | refs/heads/master | 2021-07-15T21:51:23.247277 | 2021-03-13T13:23:54 | 2021-03-13T13:23:54 | 1,432,240 | 246 | 63 | BSD-3-Clause | 2021-03-13T13:12:45 | 2011-03-02T20:17:28 | Python | UTF-8 | Python | false | false | 25 | py | from servertech import *
| [
"[email protected]"
]
| |
d591713f33f151a2652cf6743bb465b5f8fd228c | 7848ded2f7b1cf5cc33380d739e0ceee5718ffec | /imrunicorn/api/migrations/0001_initial.py | b482e08ed992a3d2a8fb1b85887896b36c690222 | []
| no_license | benspelledabc/djangosite | cbed1a7da3eb6ba6eee05897ec928b350831fc6b | fa8004b20f790f56fc69e9d158128a867be700f3 | refs/heads/master | 2023-04-17T19:24:48.908640 | 2021-05-02T19:05:38 | 2021-05-02T19:05:38 | 294,891,690 | 1 | 1 | null | 2021-05-02T19:05:38 | 2020-09-12T07:16:11 | Python | UTF-8 | Python | false | false | 850 | py | # Generated by Django 3.0.7 on 2021-04-25 07:33
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='DockerHubWebhook',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pusher', models.CharField(max_length=450)),
('repo_name', models.CharField(max_length=450)),
('tag', models.CharField(max_length=450)),
('date_created', models.DateTimeField(default=django.utils.timezone.now, editable=False)),
],
options={
'ordering': ('-date_created', '-tag'),
},
),
]
| [
"[email protected]"
]
| |
495298d24e69f2a064d50674a1cfb7b26a1e67ae | 60da3d5a9c3957ddbaaf481fea19691f87d682b0 | /frec/utils.py | 025ccc33cb295f36e33179c60bf53eded66b6c7d | []
| no_license | heynemann/frec | 59d46740ee86c7f3fc68fe0e0d14d98c043c8a31 | bb23732f1c367f4c8167adeeefad93cc153851a2 | refs/heads/master | 2020-05-16T23:17:42.826305 | 2012-08-13T13:35:25 | 2012-08-13T13:35:25 | 4,644,976 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 530 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# frec face recognition service
# https://github.com/heynemann/frec
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license
# Copyright (c) 2012 Bernardo Heynemann [email protected]
# code adapted from thumbor's util module (http://github.com/globocom/thumbor)
import logging
def real_import(name):
if '.' in name:
return reduce(getattr, name.split('.')[1:], __import__(name))
return __import__(name)
logger = logging.getLogger('frec')
| [
"[email protected]"
]
| |
33978a2e58d074881b12e622dcb6d6abc7198bd0 | d4418afe18acc3e46533aedf4ac66c237dab65db | /Processing/bin/htmlDressing.py | 720c796db1f1214595d89fb4559b7f01dffd3967 | []
| no_license | cpausmit/MitProd | c148772ee6c844d384f5e83ca21b77591d979403 | c552a39533487c4e90fb52d35ce083dae24304d8 | refs/heads/master | 2021-01-17T03:49:17.821338 | 2016-11-30T21:56:38 | 2016-11-30T21:56:38 | 14,059,357 | 0 | 4 | null | 2016-10-12T04:40:27 | 2013-11-02T02:56:11 | Python | UTF-8 | Python | false | false | 3,697 | py | #!/usr/bin/env python
#---------------------------------------------------------------------------------------------------
# Script to get a quick overview how far the production has come.
#
# Author: C.Paus (Feb 16, 2016)
#---------------------------------------------------------------------------------------------------
import os,sys,re,getopt
def getHeader():
header = '<!DOCTYPE html><html><head><title>Bambu Production</title></head><style>a:link{color:#000000; background-color:transparent; text-decoration:none}a:visited{color:#009000; background-color:transparent; text-decoration:none}a:hover{color:#900000;background-color:transparent; text-decoration:underline}a:active{color:#900000;background-color:transparent; text-decoration:underline}body.ex{margin-top: 0px; margin-bottom:25px; margin-right: 25px; margin-left: 25px;}</style><body class="ex" bgcolor="#eeeeee"><body style="font-family: arial;font-size: 20px;font-weight: bold;color:#900000;"><pre>\n'
return header
def getFooter():
footer = '</pre></body></html>\n'
return footer
#===================================================================================================
# Main starts here
#===================================================================================================
# Define string to explain usage of the script
usage = "\nUsage: htmlDressing.py [ --input=<id> --help ]\n"
# Define the valid options which can be specified and check out the command line
valid = ['input=','version=','help']
try:
opts, args = getopt.getopt(sys.argv[1:], "", valid)
except getopt.GetoptError, ex:
print usage
print str(ex)
sys.exit(1)
# --------------------------------------------------------------------------------------------------
# Get all parameters for this little task
# --------------------------------------------------------------------------------------------------
# Set defaults
input = ''
version = '000'
# Read new values from the command line
for opt, arg in opts:
if opt == "--help":
print usage
sys.exit(0)
if opt == "--input":
input = arg
if opt == "--version":
version = arg
# Deal with obvious problems
if input == "":
cmd = "--input parameter not provided. This is a required parameter."
raise RuntimeError, cmd
# --------------------------------------------------------------------------------------------------
# Here is where the real action starts -------------------------------------------------------------
# --------------------------------------------------------------------------------------------------
# find new file name
htmlFile = input + '.html'
#print ' ASCII: ' + input
#print ' HTML: ' + htmlFile
fileInput = open(input,'r')
fileOutput = open(htmlFile,'w')
line = ' '
# insert header
fileOutput.write(getHeader())
# translate the body
with open(input,"r") as fileInput:
for line in fileInput:
# cleanup CR
line = line[:-1]
## cleanup duplicate blanks
#line = re.sub(' +',' ',line)
# remove commented lines
if '+' in line:
f = line.split(' ')
dataset = f.pop()
line = ' '.join(f) \
+ ' <a href="filefi/' + version + '/' + dataset + '">' + dataset + '</a>'
else:
f = line.split(' ')
if len(f) > 1:
v = f.pop()
test = f.pop()
if test == "VERSION:":
version = v
fileOutput.write(line+'\n')
# insert footer
fileOutput.write(getFooter())
fileInput .close()
fileOutput.close()
| [
"[email protected]"
]
| |
c6b2eb96daa285928570f762bd1a2c747ba8d49d | 33d490698f2958f2d53a6436043959bac5c9f63d | /radius_advanced/models/radius/__init__.py | 0784c3e972603c412b2f1ac75db238c121875525 | []
| no_license | ideosoft/odoo-modules | cf1a4bf0a1f0f25bfa44a83f8c10a2c73baed67e | 3183a533ec9b89a57fd2b4c09cca0111afc86730 | refs/heads/master | 2021-03-30T18:13:42.873503 | 2016-07-14T13:46:01 | 2016-07-14T13:46:01 | 49,328,128 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 68 | py | # -*- coding: utf-8 -*-
import group
import user
import usergroup | [
"[email protected]"
]
| |
a7a7c414f370b2b086402995386596237370520c | 35fe64e51683305123d85701093325858596bdeb | /sliders.py | 6ed673c095f555930634eae5178d60210ed7d022 | []
| no_license | BruceJohnJennerLawso/atmosphere | 9d12a226cd287703927e577b19c6c9fb713448df | 9690d3dd7332f08a2f5fea20d32b5a0cd4408f7f | refs/heads/master | 2022-02-02T09:57:16.452393 | 2018-06-15T19:53:14 | 2018-06-15T19:53:14 | 71,283,572 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,106 | py | ## sliders.py ##################################################################
## all manner of useful widgets to get yo sliding needs done ###################
## looks like this was just a testbed I had for learning how the tkinter #######
## sliders work ################################################################
################################################################################
from Tkinter import Tk
import Tkinter as tk
from sys import argv
def getScaleValue(scaleTitle, scaleUnits):
def setVal(val):
global outputVal
outputVal = val
def endControl():
control.destroy()
control=tk.Tk()
control.protocol("WM_DELETE_WINDOW",endControl)
control.title()
control.geometry("650x100+100+250")
cline1=tk.Label(control, text=scaleUnits).pack()
cline3=tk.Scale(control,orient=tk.HORIZONTAL,length=580,width=20,sliderlength=10,from_=0,to=100,tickinterval=5, command=setVal)
cline3.set(50)
cline3.pack()
control.mainloop()
print "Slider widget outputting ", outputVal
return outputVal
if(__name__ == "__main__"):
print getScaleValue("Test Scale", "Units")
| [
"[email protected]"
]
| |
b792a234218d5cd156cf4eba9d6f7772a2555e38 | 5a171226c273825345429b0bd6e4a2878ef4979f | /aces_1.0.0/python/aces_ocio/tests/tests_aces_config.py | f41349e6a788eea512555eb6125272b7dfe9da7a | []
| no_license | taka25/OpenColorIO-Configs | 5ea63f024804c1dbc98631358ef5c6f6a051fe8b | 547fceb44bbc1f7475fb17851c3b3cc31b616455 | refs/heads/master | 2021-01-15T09:23:42.345060 | 2015-05-05T18:54:32 | 2015-05-05T18:54:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,135 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Defines unit tests for *ACES* configuration.
"""
from __future__ import division
import hashlib
import os
import re
import shutil
import tempfile
import unittest
from aces_ocio.utilities import files_walker
from aces_ocio.create_aces_config import (
ACES_OCIO_CTL_DIRECTORY_ENVIRON,
create_ACES_config)
__author__ = 'ACES Developers'
__copyright__ = 'Copyright (C) 2014 - 2015 - ACES Developers'
__license__ = ''
__maintainer__ = 'ACES Developers'
__email__ = '[email protected]'
__status__ = 'Production'
__all__ = ['REFERENCE_CONFIG_ROOT_DIRECTORY',
'HASH_TEST_PATTERNS',
'UNHASHABLE_TEST_PATTERNS',
'TestACESConfig']
# TODO: Investigate how the current config has been generated to use it for
# tests.
# REFERENCE_CONFIG_ROOT_DIRECTORY = os.path.abspath(
# os.path.join(os.path.dirname(__file__), '..', '..', '..'))
REFERENCE_CONFIG_ROOT_DIRECTORY = '/colour-science/colour-ramblings/ocio/aces'
HASH_TEST_PATTERNS = ('\.3dl', '\.lut', '\.csp')
UNHASHABLE_TEST_PATTERNS = ('\.icc', '\.ocio')
class TestACESConfig(unittest.TestCase):
"""
Performs tests on the *ACES* configuration.
"""
def setUp(self):
"""
Initialises common tests attributes.
"""
self.__aces_ocio_ctl_directory = os.environ.get(
ACES_OCIO_CTL_DIRECTORY_ENVIRON, None)
assert self.__aces_ocio_ctl_directory is not None, (
'Undefined "{0}" environment variable!'.format(
ACES_OCIO_CTL_DIRECTORY_ENVIRON))
assert os.path.exists(self.__aces_ocio_ctl_directory) is True, (
'"{0}" directory does not exists!'.format(
self.__aces_ocio_ctl_directory))
self.maxDiff = None
self.__temporary_directory = tempfile.mkdtemp()
def tearDown(self):
"""
Post tests actions.
"""
shutil.rmtree(self.__temporary_directory)
@staticmethod
def directory_hashes(directory,
filters_in=None,
filters_out=None,
flags=0):
"""
Recursively computes the hashes from the file within given directory.
Parameters
----------
directory : str or unicode
Directory to compute the file hashes.
filters_in : array_like
Included patterns.
filters_out : array_like
Excluded patterns.
flags : int
Regex flags.
Returns
-------
dict
Directory file hashes.
"""
hashes = {}
for path in files_walker(directory,
filters_in=filters_in,
filters_out=filters_out,
flags=flags):
with open(path) as file:
digest = hashlib.md5(
re.sub('\s', '', file.read())).hexdigest()
hashes[path.replace(directory, '')] = digest
return hashes
def test_ACES_config(self):
"""
Performs tests on the *ACES* configuration by computing hashes on the
generated configuration and comparing them to the existing one.
"""
self.assertTrue(create_ACES_config(self.__aces_ocio_ctl_directory,
self.__temporary_directory))
reference_hashes = self.directory_hashes(
REFERENCE_CONFIG_ROOT_DIRECTORY,
HASH_TEST_PATTERNS)
test_hashes = self.directory_hashes(
self.__temporary_directory,
HASH_TEST_PATTERNS)
self.assertDictEqual(reference_hashes, test_hashes)
# Checking that unashable files ('.icc', '.ocio') are generated.
unashable = lambda x: (
sorted([file.replace(x, '') for file in
files_walker(x, UNHASHABLE_TEST_PATTERNS)]))
self.assertListEqual(unashable(REFERENCE_CONFIG_ROOT_DIRECTORY),
unashable(self.__temporary_directory))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
a15603b3758d92526600d42a165894eca6cd2f51 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_toeing.py | 94ec751994aa0fdf9ff24eee291846ff392885e8 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 214 | py |
#calss header
class _TOEING():
def __init__(self,):
self.name = "TOEING"
self.definitions = toe
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['toe']
| [
"[email protected]"
]
| |
6a9099a6a18d621eb5bef925cbb1bc5ce827c848 | 325fde42058b2b82f8a4020048ff910cfdf737d7 | /src/azure-firewall/azext_firewall/vendored_sdks/v2020_07_01/v2020_07_01/operations/_vpn_site_links_operations.py | b7c35fdfc04bbbcbdc10df704b1a330725d28142 | [
"LicenseRef-scancode-generic-cla",
"MIT"
]
| permissive | ebencarek/azure-cli-extensions | 46b0d18fe536fe5884b00d7ffa30f54c7d6887d1 | 42491b284e38f8853712a5af01836f83b04a1aa8 | refs/heads/master | 2023-04-12T00:28:44.828652 | 2021-03-30T22:34:13 | 2021-03-30T22:34:13 | 261,621,934 | 2 | 5 | MIT | 2020-10-09T18:21:52 | 2020-05-06T01:25:58 | Python | UTF-8 | Python | false | false | 8,009 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from .. import models
class VpnSiteLinksOperations(object):
"""VpnSiteLinksOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: Client API version. Constant value: "2020-07-01".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2020-07-01"
self.config = config
def get(
self, resource_group_name, vpn_site_name, vpn_site_link_name, custom_headers=None, raw=False, **operation_config):
"""Retrieves the details of a VPN site link.
:param resource_group_name: The resource group name of the VpnSite.
:type resource_group_name: str
:param vpn_site_name: The name of the VpnSite.
:type vpn_site_name: str
:param vpn_site_link_name: The name of the VpnSiteLink being
retrieved.
:type vpn_site_link_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: VpnSiteLink or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.network.v2020_07_01.models.VpnSiteLink or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vpnSiteName': self._serialize.url("vpn_site_name", vpn_site_name, 'str'),
'vpnSiteLinkName': self._serialize.url("vpn_site_link_name", vpn_site_link_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VpnSiteLink', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks/{vpnSiteLinkName}'}
def list_by_vpn_site(
self, resource_group_name, vpn_site_name, custom_headers=None, raw=False, **operation_config):
"""Lists all the vpnSiteLinks in a resource group for a vpn site.
:param resource_group_name: The resource group name of the VpnSite.
:type resource_group_name: str
:param vpn_site_name: The name of the VpnSite.
:type vpn_site_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of VpnSiteLink
:rtype:
~azure.mgmt.network.v2020_07_01.models.VpnSiteLinkPaged[~azure.mgmt.network.v2020_07_01.models.VpnSiteLink]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def prepare_request(next_link=None):
if not next_link:
# Construct URL
url = self.list_by_vpn_site.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'vpnSiteName': self._serialize.url("vpn_site_name", vpn_site_name, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
return request
def internal_paging(next_link=None):
request = prepare_request(next_link)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
header_dict = None
if raw:
header_dict = {}
deserialized = models.VpnSiteLinkPaged(internal_paging, self._deserialize.dependencies, header_dict)
return deserialized
list_by_vpn_site.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/vpnSites/{vpnSiteName}/vpnSiteLinks'}
| [
"[email protected]"
]
| |
5ea023f8b22b079a1c6ef60caf7748d09c188e78 | a742bd051641865d2e5b5d299c6bc14ddad47f22 | /Mysite/Mysite/urls.py | 3b6e04e1c9549e94a6cee0321e84fbd1d387ce51 | []
| no_license | lxconfig/UbuntuCode_bak | fb8f9fae7c42cf6d984bf8231604ccec309fb604 | 3508e1ce089131b19603c3206aab4cf43023bb19 | refs/heads/master | 2023-02-03T19:10:32.001740 | 2020-12-19T07:27:57 | 2020-12-19T07:27:57 | 321,351,481 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,567 | py | """Mysite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from showInfo import views
urlpatterns = [
path('admin/', admin.site.urls),
path("show/", include("showInfo.urls")),
path("app01/", include("app01.urls")),
re_path(r"^edit/(?P<num>\d+).html$", views.index),
path('del/<int:nid>/', views.index2, name="del"),
path("crud", views.CRUD),
path("CBV", views.CBV.as_view()),
path("foreign", views.foreign),
path("paging", views.paging),
path("custom", views.custom),
path("many2many", views.many2many),
path("filters", views.filters),
path("session_login", views.session_login),
path("index", views.index),
path("login", views.male_female_login),
path("male_female_info", views.male_female_info),
path("logout", views.logout),
path("others", views.others),
# path("add", views.add_data),
]
| [
"[email protected]"
]
| |
183738a9cec443d7c91c9694e74ba4eb9486ce94 | 26bd175ffb3bd204db5bcb70eec2e3dfd55fbe9f | /exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/tests/unit/modules/net_tools/nios/test_nios_member.py | c9ad0fcfea3515184182cc14f31e2fdf2be4fc80 | [
"MIT",
"GPL-3.0-only",
"CC0-1.0",
"GPL-1.0-or-later"
]
| permissive | tr3ck3r/linklight | 37814ed19173d893cdff161355d70a1cf538239b | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | refs/heads/master | 2021-04-11T04:33:02.727318 | 2020-03-25T17:38:41 | 2020-03-25T17:38:41 | 248,992,437 | 0 | 0 | MIT | 2020-03-21T14:26:25 | 2020-03-21T14:26:25 | null | UTF-8 | Python | false | false | 6,928 | py | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible_collections.community.general.plugins.module_utils.net_tools.nios import api
from ansible_collections.community.general.plugins.modules.net_tools.nios import nios_member
from ansible_collections.community.general.tests.unit.compat.mock import patch, MagicMock, Mock
from ..test_nios_module import TestNiosModule, load_fixture
class TestNiosMemberModule(TestNiosModule):
module = nios_member
def setUp(self):
super(TestNiosMemberModule, self).setUp()
self.module = MagicMock(name='ansible_collections.community.general.plugins.modules.net_tools.nios.nios_member.WapiModule')
self.module.check_mode = False
self.module.params = {'provider': None}
self.mock_wapi = patch('ansible_collections.community.general.plugins.modules.net_tools.nios.nios_member.WapiModule')
self.exec_command = self.mock_wapi.start()
self.mock_wapi_run = patch('ansible_collections.community.general.plugins.modules.net_tools.nios.nios_member.WapiModule.run')
self.mock_wapi_run.start()
self.load_config = self.mock_wapi_run.start()
def tearDown(self):
super(TestNiosMemberModule, self).tearDown()
self.mock_wapi.stop()
self.mock_wapi_run.stop()
def load_fixtures(self, commands=None):
self.exec_command.return_value = (0, load_fixture('nios_result.txt').strip(), None)
self.load_config.return_value = dict(diff=None, session='session')
def _get_wapi(self, test_object):
wapi = api.WapiModule(self.module)
wapi.get_object = Mock(name='get_object', return_value=test_object)
wapi.create_object = Mock(name='create_object')
wapi.update_object = Mock(name='update_object')
wapi.delete_object = Mock(name='delete_object')
return wapi
def test_nios_member_create(self):
self.module.params = {'provider': None, 'state': 'present', 'host_name': 'test_member',
'vip_setting': {'address': '192.168.1.110', 'subnet_mask': '255.255.255.0', 'gateway': '192.168.1.1'},
'config_addr_type': 'IPV4', 'platform': 'VNIOS', 'comment': None, 'extattrs': None}
test_object = None
test_spec = {
"host_name": {"ib_req": True},
"vip_setting": {},
"config_addr_type": {},
"platform": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.create_object.assert_called_once_with('testobject', {'host_name': 'test_member',
'vip_setting': {'address': '192.168.1.110', 'subnet_mask': '255.255.255.0',
'gateway': '192.168.1.1'},
'config_addr_type': 'IPV4', 'platform': 'VNIOS'})
def test_nios_member_update(self):
self.module.params = {'provider': None, 'state': 'present', 'host_name': 'test_member',
'vip_setting': {'address': '192.168.1.110', 'subnet_mask': '255.255.255.0', 'gateway': '192.168.1.1'},
'config_addr_type': 'IPV4', 'platform': 'VNIOS', 'comment': 'updated comment', 'extattrs': None}
test_object = [
{
"comment": "Created with Ansible",
"_ref": "member/b25lLnZpcnR1YWxfbm9kZSQ3:member01.ansible-dev.com",
"config_addr_type": "IPV4",
"host_name": "member01.ansible-dev.com",
"platform": "VNIOS",
"service_type_configuration": "ALL_V4",
"vip_setting":
{
"address": "192.168.1.100",
"dscp": 0,
"gateway": "192.168.1.1",
"primary": True,
"subnet_mask": "255.255.255.0",
"use_dscp": False
}
}
]
test_spec = {
"host_name": {"ib_req": True},
"vip_setting": {},
"config_addr_type": {},
"platform": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
def test_nios_member_remove(self):
self.module.params = {'provider': None, 'state': 'absent', 'host_name': 'test_member',
'vip_setting': {'address': '192.168.1.110', 'subnet_mask': '255.255.255.0', 'gateway': '192.168.1.1'},
'config_addr_type': 'IPV4', 'platform': 'VNIOS', 'comment': 'updated comment', 'extattrs': None}
ref = "member/b25lLnZpcnR1YWxfbm9kZSQ3:member01.ansible-dev.com"
test_object = [
{
"comment": "Created with Ansible",
"_ref": "member/b25lLnZpcnR1YWxfbm9kZSQ3:member01.ansible-dev.com",
"config_addr_type": "IPV4",
"host_name": "member01.ansible-dev.com",
"platform": "VNIOS",
"service_type_configuration": "ALL_V4",
"vip_setting":
{
"address": "192.168.1.100",
"dscp": 0,
"gateway": "192.168.1.1",
"primary": True,
"subnet_mask": "255.255.255.0",
"use_dscp": False
}
}
]
test_spec = {
"host_name": {"ib_req": True},
"vip_setting": {},
"config_addr_type": {},
"platform": {},
"comment": {},
"extattrs": {}
}
wapi = self._get_wapi(test_object)
res = wapi.run('testobject', test_spec)
self.assertTrue(res['changed'])
wapi.delete_object.assert_called_once_with(ref)
| [
"[email protected]"
]
| |
c2e00b209d01add39d31fcef937d285577c5e4f1 | 5c09a25f0b401d525a0d6023399e4910d8a8c487 | /students/alex_skrn/lesson04/mailroom.py | 5ae4740324a09c03e2f86c865016bc03ad5fb085 | []
| no_license | UWPCE-PythonCert-ClassRepos/Wi2018-Online | d6183f47e5b37b6ba56e2310d252334cb6024c9e | 8c32086bf30de8decdc8e5f43cfa1344d5747200 | refs/heads/master | 2021-09-11T13:26:31.817582 | 2018-04-07T20:11:20 | 2018-04-07T20:11:20 | 115,212,113 | 0 | 38 | null | 2018-04-07T20:11:20 | 2017-12-23T17:47:10 | Python | UTF-8 | Python | false | false | 7,714 | py | #!/usr/bin/env python3
"""Mailroom - Part 2 - Dicts, Files."""
import os
import datetime
import tkinter as tk
from tkinter import filedialog
donors = {'Aristarkh Lentulov': [4.5, 5.0],
'El Lissitzky': [34.2, 30.0, 35.5],
'Kazimir Malevich': [15.0, 20.25, 12.25],
'Marc Chagall': [148.75, 155.0],
'Wassily Kandinsky': [75.0, 50.5, 60.4],
}
# DONOR-RELATED FUNCTIONS
def add_donation(name, amount):
"""Add a donation for a named donor."""
if name in donors:
donors[name].append(float(amount))
else:
donors[name] = [float(amount)]
def get_last_donation(name):
"""Return a float -- the last donation of the given donor."""
return donors[name][-1]
def get_total_given(name):
"""Return total amount of donations for the given donor."""
return sum(donors[name])
def get_donations(name):
"""Return a list of the specified donor's donations."""
return donors[name]
def sort_donors_by_total():
"""Return a list of donor names sorted by total donations, max to min."""
donors_L = list(donors.items())
donors_sorted = sorted(donors_L, key=lambda x: sum(x[1]), reverse=True)
sorted_donor_names = []
for item in donors_sorted:
sorted_donor_names.append(item[0])
return sorted_donor_names
def print_donor_names():
"""Print existing donor names on screen."""
print()
donors_L = list(donors.keys())
for name in donors_L[:-1]:
print(name, end=', ')
print(donors_L[-1])
print()
def get_email(name, amount):
"""Return a str containing a thank-you email."""
d = dict(key1=name, key2=amount)
# Can't figure out how to combine {:,} and {key2} below.
# Used a dict here 'cos the assignment seems to ask for it.
email_text = ("\nDear {key1},\n"
"\nI would like to thank you for your donation of ${key2}.\n"
"\nWe appreciate your support.\n"
"\nSincerely,\n"
"The Organization\n"
)
return email_text.format(**d)
def print_email(name, amount):
"""Print a thank-you email on screen."""
print(get_email(name, amount))
def create_report():
"""Create a report."""
title_line_form = "{:<26}{:^3}{:>13}{:^3}{:>13}{:^3}{:>13}"
title_line_text = ('Donor Name', '|', 'Total Given', '|',
'Num Gifts', '|', 'Average Gift'
)
print()
print(title_line_form.format(*title_line_text))
print('- ' * 38)
form_line = "{:<26}{:>3}{:>13}{:>3}{:>13}{:>3}{:>13}"
for name in sort_donors_by_total():
total = get_total_given(name)
num_gifts = len(get_donations(name))
mean = round((total / num_gifts), 2)
print(form_line.format(str(name), '$', str(total), ' ',
str(num_gifts), '$', str(mean)
)
)
print()
# PRINT ON SREEN A THANK YOU LETTER TO SOMEONE WHO JUST MADE A DONATION
def existing_donor_interaction():
"""Ask for old donor name, donation amount, print a thank-you email."""
prompt_name = "Type full name of the old donor or 0 to abort > "
old_donor_name = input(prompt_name)
if old_donor_name == "0":
return
while old_donor_name not in donors:
old_donor_name = input(prompt_name)
if old_donor_name == "0":
return
prompt_amount = "Enter the donation amount or 0 to abort > "
donation_amount = input(prompt_amount)
if donation_amount == "0":
return
# Add the donation amount to the dict.
add_donation(old_donor_name, donation_amount)
print_email(old_donor_name, get_last_donation(old_donor_name))
def new_donor_interaction():
"""Ask for new donor name, donation amount, print a thank-you email."""
prompt_name = "Type full name of the new donor or 0 to abort > "
new_donor_name = input(prompt_name)
if new_donor_name == "0":
return
prompt_amount = "Enter the donation amount or 0 to abort > "
donation_amount = input(prompt_amount)
if donation_amount == "0":
return
# Add the donor and the donation amount to the dict.
add_donation(new_donor_name, donation_amount)
print_email(new_donor_name, get_last_donation(new_donor_name))
# WRITE ALL LETTERS TO FILES
def write_file(destination, name, text):
"""Write text to destination/name.txt."""
date = str(datetime.date.today())
# path = "{}/{}-{}.txt".format(destination, date, name)
filename = "{}-{}.txt".format(date, name)
path = os.path.join(destination, filename)
with open(path, "w") as toF:
toF.write(text)
def write_cwd():
"""Write all emails to the current working directory."""
cwd = os.getcwd()
for name in donors:
text = get_email(name, get_last_donation(name))
write_file(cwd, name, text)
print("\nAll letters saved in {}\n".format(cwd))
def write_select_dir():
"""Write all emails to a dir selected by the user."""
root = tk.Tk()
root.withdraw()
# Get the target directory from the user.
target_dir = filedialog.askdirectory()
for name in donors:
text = get_email(name, get_last_donation(name))
write_file(target_dir, name, text)
print("\nAll letters saved in {}\n".format(target_dir))
# MANAGING MENUS
def quit():
"""Provide an exit option for menus."""
return "exit menu"
def send_all_menu():
"""Initiate the send-all-letters sub-sub-menu."""
menu_selection(write_file_prompt, write_file_dispatch)
def send_thank_you_interaction():
"""Initiate the send-thank-you sub-menu."""
menu_selection(send_thanks_prompt, send_thanks_dispatch)
def menu_selection(prompt, dispatch_dict):
"""Provide a template for using dispatch dicts to switch through menus."""
while True:
response = input(prompt)
try:
if dispatch_dict[response]() == "exit menu":
break
except KeyError:
print("\nInvalid choice. Try again")
pass
if __name__ == "__main__":
# Write to files
write_file_prompt = ("\nSend to everyone sub-menu\n"
"\n1 - Write to current working directory\n"
"2 - Choose a directory to write\n"
"3 - Quit\n"
">> "
)
write_file_dispatch = {"1": write_cwd,
"2": write_select_dir,
"3": quit,
}
# Print on screen
send_thanks_dispatch = {"1": print_donor_names,
"2": new_donor_interaction,
"3": existing_donor_interaction,
"4": quit,
}
send_thanks_prompt = ("\nSend-Thank-You Sub-Menu\n"
"\n1 - See the list of donors\n"
"2 - Add a new donor and a donation amount\n"
"3 - Choose an existing donor\n"
"4 - Quit\n"
">> "
)
# Main menu
main_dispatch = {"1": send_thank_you_interaction,
"2": create_report,
"3": send_all_menu,
"4": quit,
}
main_prompt = ("\nMain Menu\n"
"\n1 - Send a Thank You\n"
"2 - Create a Report\n"
"3 - Send letters to everyone\n"
"4 - Quit\n"
">> "
)
menu_selection(main_prompt, main_dispatch)
| [
"[email protected]"
]
| |
644d2ebcf8dde622bf31286a808291a3ac4b316c | f4fd57b93fa7178a38357f4ab2391aa299a2f1de | /.c9/metadata/environment/employees/resources.py | 179c7eefe305777134f677baa8e6ac57a3df8ca8 | []
| no_license | matthewrr/Employee-Scheduling-App | 10660bba2afead94fe9c43f772e185355f9ac101 | aec55f3a622486d77521f47e83f50c4733694545 | refs/heads/master | 2020-03-22T00:56:47.918978 | 2018-09-02T20:25:15 | 2018-09-02T20:25:15 | 139,274,371 | 1 | 0 | null | 2018-08-03T20:57:43 | 2018-06-30T19:26:13 | Python | UTF-8 | Python | false | false | 4,311 | py | {"filter":false,"title":"resources.py","tooltip":"/employees/resources.py","undoManager":{"mark":6,"position":6,"stack":[[{"start":{"row":0,"column":0},"end":{"row":5,"column":21},"action":"insert","lines":["from import_export import resources","from .models import Event","","class EventResource(resources.ModelResource):"," class Meta:"," model = Event"],"id":1}],[{"start":{"row":1,"column":24},"end":{"row":1,"column":25},"action":"remove","lines":["t"],"id":2},{"start":{"row":1,"column":23},"end":{"row":1,"column":24},"action":"remove","lines":["n"]},{"start":{"row":1,"column":22},"end":{"row":1,"column":23},"action":"remove","lines":["e"]},{"start":{"row":1,"column":21},"end":{"row":1,"column":22},"action":"remove","lines":["v"]},{"start":{"row":1,"column":20},"end":{"row":1,"column":21},"action":"remove","lines":["E"]}],[{"start":{"row":1,"column":20},"end":{"row":1,"column":21},"action":"insert","lines":["E"],"id":3},{"start":{"row":1,"column":21},"end":{"row":1,"column":22},"action":"insert","lines":["m"]},{"start":{"row":1,"column":22},"end":{"row":1,"column":23},"action":"insert","lines":["p"]},{"start":{"row":1,"column":23},"end":{"row":1,"column":24},"action":"insert","lines":["l"]},{"start":{"row":1,"column":24},"end":{"row":1,"column":25},"action":"insert","lines":["o"]},{"start":{"row":1,"column":25},"end":{"row":1,"column":26},"action":"insert","lines":["y"]},{"start":{"row":1,"column":26},"end":{"row":1,"column":27},"action":"insert","lines":["e"]},{"start":{"row":1,"column":27},"end":{"row":1,"column":28},"action":"insert","lines":["e"]}],[{"start":{"row":3,"column":10},"end":{"row":3,"column":11},"action":"remove","lines":["t"],"id":4},{"start":{"row":3,"column":9},"end":{"row":3,"column":10},"action":"remove","lines":["n"]},{"start":{"row":3,"column":8},"end":{"row":3,"column":9},"action":"remove","lines":["e"]},{"start":{"row":3,"column":7},"end":{"row":3,"column":8},"action":"remove","lines":["v"]},{"start":{"row":3,"column":6},"end":{"row":3,"column":7},"action":"remove","lines":["E"]}],[{"start":{"row":3,"column":6},"end":{"row":3,"column":7},"action":"insert","lines":["E"],"id":5},{"start":{"row":3,"column":7},"end":{"row":3,"column":8},"action":"insert","lines":["m"]},{"start":{"row":3,"column":8},"end":{"row":3,"column":9},"action":"insert","lines":["p"]},{"start":{"row":3,"column":9},"end":{"row":3,"column":10},"action":"insert","lines":["l"]},{"start":{"row":3,"column":10},"end":{"row":3,"column":11},"action":"insert","lines":["o"]},{"start":{"row":3,"column":11},"end":{"row":3,"column":12},"action":"insert","lines":["y"]},{"start":{"row":3,"column":12},"end":{"row":3,"column":13},"action":"insert","lines":["e"]},{"start":{"row":3,"column":13},"end":{"row":3,"column":14},"action":"insert","lines":["e"]}],[{"start":{"row":5,"column":20},"end":{"row":5,"column":21},"action":"remove","lines":["t"],"id":6},{"start":{"row":5,"column":19},"end":{"row":5,"column":20},"action":"remove","lines":["n"]},{"start":{"row":5,"column":18},"end":{"row":5,"column":19},"action":"remove","lines":["e"]},{"start":{"row":5,"column":17},"end":{"row":5,"column":18},"action":"remove","lines":["v"]},{"start":{"row":5,"column":16},"end":{"row":5,"column":17},"action":"remove","lines":["E"]}],[{"start":{"row":5,"column":16},"end":{"row":5,"column":17},"action":"insert","lines":["E"],"id":7},{"start":{"row":5,"column":17},"end":{"row":5,"column":18},"action":"insert","lines":["m"]},{"start":{"row":5,"column":18},"end":{"row":5,"column":19},"action":"insert","lines":["p"]},{"start":{"row":5,"column":19},"end":{"row":5,"column":20},"action":"insert","lines":["l"]},{"start":{"row":5,"column":20},"end":{"row":5,"column":21},"action":"insert","lines":["o"]},{"start":{"row":5,"column":21},"end":{"row":5,"column":22},"action":"insert","lines":["y"]},{"start":{"row":5,"column":22},"end":{"row":5,"column":23},"action":"insert","lines":["e"]},{"start":{"row":5,"column":23},"end":{"row":5,"column":24},"action":"insert","lines":["e"]}]]},"ace":{"folds":[],"scrolltop":0,"scrollleft":0,"selection":{"start":{"row":5,"column":24},"end":{"row":5,"column":24},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":0},"timestamp":1531692744297,"hash":"4a8c4abbefa579d5b00201a0d5e392c6b7e745f7"} | [
"[email protected]"
]
| |
c03ee96cb703f340a5d609bd3956659f5382d3ae | 741ee09b8b73187fab06ecc1f07f46a6ba77e85c | /AutonomousSourceCode/data/raw/sort/45f445a1-9136-48b4-8087-3e2ba78df746__ngnotifier_filters.py | da1355a8abf0fee1c219af55f72201920e09c7ba | []
| no_license | erickmiller/AutomatousSourceCode | fbe8c8fbf215430a87a8e80d0479eb9c8807accb | 44ee2fb9ac970acf7389e5da35b930d076f2c530 | refs/heads/master | 2021-05-24T01:12:53.154621 | 2020-11-20T23:50:11 | 2020-11-20T23:50:11 | 60,889,742 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 313 | py | import operator
from django import template
register = template.Library()
@register.filter(name='sort_hosts')
def list_sort(value):
return sorted(value, key=operator.attrgetter('host'))
@register.filter(name='sort_groups')
def list_sort(value):
return sorted(value, key=operator.attrgetter('name')) | [
"[email protected]"
]
| |
de46502ffb2b3dece449752adde08694708987ae | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03379/s380015361.py | 436f043c6480227367064b8e867d7e9f66a47528 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | import copy
n = int(input())
x = list(map(int,input().split()))
x2 = copy.copy(x)
x2.sort()
num1,num2 = (len(x)//2)-1,(len(x)//2)
med1,med2 = x2[num1],x2[num2]
for i in range(n):
if x[i] <= med1:
print(med2)
else:
print(med1) | [
"[email protected]"
]
| |
b5f8c14ad7122cdf83315be435b923ae1edff549 | ff6248be9573caec94bea0fa2b1e4b6bf0aa682b | /StudentProblem/10.21.12.4/1/1569576594.py | 1b464c42214abc044ae82513f5d498da2e00f185 | []
| no_license | LennartElbe/codeEvo | 0e41b1a7705204e934ef71a5a28c047366c10f71 | e89b329bc9edd37d5d9986f07ca8a63d50686882 | refs/heads/master | 2020-12-21T17:28:25.150352 | 2020-03-26T10:22:35 | 2020-03-26T10:22:35 | 236,498,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,283 | py | import functools
import typing
import string
import random
import pytest
## Lösung Teil 1.
def nwords(s: str)-> int:
"""
Die Funktion nwords berechenet zu einem String Argument s die Anzahl der Worte im String.
args:
s(str): Text
return:
Anzahl an Wörter im Text
"""
if len(s) == 0:
return 0
result = 1
for element in s:
if element == (" "):
result += 1
return result
## Lösung Teil 2.
def word_count_iter(m):
"""
Die Funktion word_count_iter
args:
m: iterierbares Objekt
return:
Tupel aus der Anzahl der Zeilen, der Anzahl der Worte und der Anzahl der Zeichen liefert, die aus dem Argument gelesen worden sind
"""
zeilen = 1
worte = nwords(m)
zeichen = len(m)
for element in m:
if element == (" "):
zeilen += 1
return (zeilen, worte, zeichen)
######################################################################
## Lösung Teil 3. (Tests)
assert word_count_iter("Hallo") == (1,1,5)
## revert
try:
word_count_iter = word_count_iter.__wrapped__
except:
pass
## Lösung Teil 4.
def word_count():
pass
######################################################################
| [
"[email protected]"
]
| |
c9169e0f5182f273adf69081428b61877d165c48 | 2f98aa7e5bfc2fc5ef25e4d5cfa1d7802e3a7fae | /python/python_3091.py | 4cc06869f0330e8014d867c4d1cdc15f089c51c8 | []
| no_license | AK-1121/code_extraction | cc812b6832b112e3ffcc2bb7eb4237fd85c88c01 | 5297a4a3aab3bb37efa24a89636935da04a1f8b6 | refs/heads/master | 2020-05-23T08:04:11.789141 | 2015-10-22T19:19:40 | 2015-10-22T19:19:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 59 | py | # How to call an element in an numpy array?
print arr[1,1]
| [
"[email protected]"
]
| |
5b34f79f6412bb654bf991a1c15c12d7d6e8f3b7 | 730d9a4b1a8a031d3dd158f695634ef9a3da9506 | /non_hugin_based_stitching/parse_output_for_hugin_2.py | 5eca6ddecf0912b2faf181d3a81fd2a3d4a03885 | []
| no_license | tingleshao/kirito | dcf6ddb49059624ef5f4aff966b1d56ef516d740 | 6309852cbd24b21e724060510a71e6ff84789c96 | refs/heads/master | 2020-05-20T21:12:37.159318 | 2017-07-21T20:22:42 | 2017-07-21T20:22:42 | 84,524,894 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,113 | py | import matched
import cv2
# parse the plain text output into hugin style
# the opencv feature extraction is on images with scale 581 x xxx
# hugin uses the original image resolution
# 2160 x xxxx
adjacernt_map = [[1, 8, 9, 10, 11, 12, 13, 14],[0, 2, 7, 8, 9, 13, 14, 15],[1, 3, 6, 7, 8, 14, 15, 16],
[2, 4, 5, 6, 7, 15, 16, 17],[3, 5, 6, 16, 17],[3, 4, 6],[2, 3, 4, 5, 7],[1, 2, 3, 6, 8],
[0, 1, 2, 7, 9],[0, 1, 8, 10, 11], [0, 9, 11],[0, 9, 10, 12, 13],[0, 11, 13],[0, 1, 11, 12, 14],
[0, 1, 2, 13, 15],[1, 2, 3, 14, 16],[2, 3, 4, 15, 17],[3, 4, 16]]
scaled_image_h = 581.0
original_image_h = 2160.0
def main():
ratio = original_image_h / scaled_image_h
with open("parsed_output_2.txt") as inpust_file:
text = input_file.read()
lines = text.split('\n')
curr_idx = 0
output_str = "# control points\n"
while curr_idx < len(lines) and len(lines[curr_idx]) > 0:
lines = lines[curr_idx]
curr_key = (int(float(line.split(' ')[1])), int(float(line.split(' ')[2]))
curr_i = curr_idx + 1
values_lst = []
dist_lst = []
while curr_i < len(lines) and len(lines[curr_i]) > 0 and lines[curr_i][0] != '#':
line = lines[curr_i]
tokens = line.split(' ')
values_lst.append([float(tokens[x]) for x in range(4)])
dfist_lst.append(float(tokens[4]))
curr_i = curr_i + 1
curr_idx = curr_i
if curr_key[1] in adjacent_map[curr_key[0]]:
important_features = [values_lst[w] for w in sorted(range(len(values_lst)), key=lambda k: dist_lst[k])]
for i in range(min(len(important_features), 25)):
output_str = output_str + "c n{0} N{1} x{2:.12f} y{3:.12f} X{4:.12f} Y{5:.12f} t0\n".format(curr_key[0], curr_key[1], important_features[i][0] * ratio, important_features[i][1] * ratio, important_features[i][2] * ratio, important_features[i][3] * ratio)
with open("parsed_output_for_hugin.txt", 'w') as output_file:
output_file.write(output_str)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
3eddaa9d2ce87b1a7007b1bf576aa4a6cd259e13 | 5d9bd0ee9ef2875b5d0f050bb859bbc1a9e8cf2d | /邮件发送/邮件1.py | 836a6f16decc5830592be48b8aa3d1301d817933 | []
| no_license | StarLord777/Spider-learn | 75a825fd2f858682c7388bd5565a9b126e515fba | 1b206df0d6298f3ac401d85ded86f6fb328e49a4 | refs/heads/master | 2020-03-27T10:01:36.215743 | 2018-10-12T13:23:14 | 2018-10-12T13:23:14 | 146,390,047 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,226 | py | import smtplib
from email.mime.text import MIMEText
from email.header import Header
import threading,time
import redis
redis = redis.Redis(db=15)
def sendMailToOne():
print('线程{}正在运行---------------'.format(threading.current_thread().name))
sender = '[email protected]'
receiver = ['[email protected]']
#receiver = []
#获取接收人
msg = MIMEText('悉心收集,包括但不限于英语(各种考试,口语),计算机(编程,网络安全,硬件创客)'
',绘画,健身,vip电影电视剧音乐等', 'plain', 'utf-8')
msg['From'] = Header('001')
msg['To'] = Header('爱学习的您', 'utf-8')
msg['Subject'] = Header('如果您需要任何类别的学习资料,请联系我', 'utf-8')
try:
smtpobj = smtplib.SMTP('smtp.vipjiexi.club')
smtpobj.login('[email protected]', 'qjp12345')
smtpobj.sendmail(sender, receiver, msg.as_string())
print('发送成功')
smtpobj.quit()
smtpobj.close()
except Exception as e:
print('{}-----出错了'.format(e))
if __name__ == '__main__':
for i in range(20):
threading.Thread(target=sendMailToOne,).start()
time.sleep(1) | [
"[email protected]"
]
| |
8b87c7af62f2b1e6b49f820f026a418edaecf433 | 339f9b9425c76c1c09c6244fa9918482477d4a5a | /catkin_ws/devel_isolated/apriltag_ros/lib/python3/dist-packages/apriltag_ros/msg/_AprilTagDetectionArray.py | 2b64a27431ae88c6e057d9e483ea1eaca93e9547 | []
| no_license | kloya03/AuE893_SP21_KartikLoya | b33b4830e678b0ba2f6531f5d311cc95d05ba89d | 84c72144878d5f41daa199aa6636d2e4e2cd849d | refs/heads/main | 2023-04-13T14:41:43.970494 | 2021-04-26T01:41:31 | 2021-04-26T01:41:31 | 356,286,456 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,916 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from apriltag_ros/AprilTagDetectionArray.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import apriltag_ros.msg
import geometry_msgs.msg
import std_msgs.msg
class AprilTagDetectionArray(genpy.Message):
_md5sum = "2b6c03434883a5c9897c13b5594dbd91"
_type = "apriltag_ros/AprilTagDetectionArray"
_has_header = True # flag to mark the presence of a Header object
_full_text = """std_msgs/Header header
AprilTagDetection[] detections
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
string frame_id
================================================================================
MSG: apriltag_ros/AprilTagDetection
# Tag ID(s). If a standalone tag, this is a vector of size 1. If a tag bundle,
# this is a vector containing the IDs of each tag in the bundle.
int32[] id
# Tag size(s). If a standalone tag, this is a vector of size 1. If a tag bundle,
# this is a vector containing the sizes of each tag in the bundle, in the same
# order as the IDs above.
float64[] size
# Pose in the camera frame, obtained from homography transform. If a standalone
# tag, the homography is from the four tag corners. If a tag bundle, the
# homography is from at least the four corners of one member tag and at most the
# four corners of all member tags.
geometry_msgs/PoseWithCovarianceStamped pose
================================================================================
MSG: geometry_msgs/PoseWithCovarianceStamped
# This expresses an estimated pose with a reference coordinate frame and timestamp
Header header
PoseWithCovariance pose
================================================================================
MSG: geometry_msgs/PoseWithCovariance
# This represents a pose in free space with uncertainty.
Pose pose
# Row-major representation of the 6x6 covariance matrix
# The orientation parameters use a fixed-axis representation.
# In order, the parameters are:
# (x, y, z, rotation about X axis, rotation about Y axis, rotation about Z axis)
float64[36] covariance
================================================================================
MSG: geometry_msgs/Pose
# A representation of pose in free space, composed of position and orientation.
Point position
Quaternion orientation
================================================================================
MSG: geometry_msgs/Point
# This contains the position of a point in free space
float64 x
float64 y
float64 z
================================================================================
MSG: geometry_msgs/Quaternion
# This represents an orientation in free space in quaternion form.
float64 x
float64 y
float64 z
float64 w
"""
__slots__ = ['header','detections']
_slot_types = ['std_msgs/Header','apriltag_ros/AprilTagDetection[]']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,detections
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(AprilTagDetectionArray, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.detections is None:
self.detections = []
else:
self.header = std_msgs.msg.Header()
self.detections = []
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.detections)
buff.write(_struct_I.pack(length))
for val1 in self.detections:
length = len(val1.id)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(struct.Struct(pattern).pack(*val1.id))
length = len(val1.size)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(struct.Struct(pattern).pack(*val1.size))
_v1 = val1.pose
_v2 = _v1.header
_x = _v2.seq
buff.write(_get_struct_I().pack(_x))
_v3 = _v2.stamp
_x = _v3
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v2.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v4 = _v1.pose
_v5 = _v4.pose
_v6 = _v5.position
_x = _v6
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v7 = _v5.orientation
_x = _v7
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
buff.write(_get_struct_36d().pack(*_v4.covariance))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.detections is None:
self.detections = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.detections = []
for i in range(0, length):
val1 = apriltag_ros.msg.AprilTagDetection()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
s = struct.Struct(pattern)
end += s.size
val1.id = s.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val1.size = s.unpack(str[start:end])
_v8 = val1.pose
_v9 = _v8.header
start = end
end += 4
(_v9.seq,) = _get_struct_I().unpack(str[start:end])
_v10 = _v9.stamp
_x = _v10
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v9.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v9.frame_id = str[start:end]
_v11 = _v8.pose
_v12 = _v11.pose
_v13 = _v12.position
_x = _v13
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v14 = _v12.orientation
_x = _v14
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
start = end
end += 288
_v11.covariance = _get_struct_36d().unpack(str[start:end])
self.detections.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_3I().pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
length = len(self.detections)
buff.write(_struct_I.pack(length))
for val1 in self.detections:
length = len(val1.id)
buff.write(_struct_I.pack(length))
pattern = '<%si'%length
buff.write(val1.id.tostring())
length = len(val1.size)
buff.write(_struct_I.pack(length))
pattern = '<%sd'%length
buff.write(val1.size.tostring())
_v15 = val1.pose
_v16 = _v15.header
_x = _v16.seq
buff.write(_get_struct_I().pack(_x))
_v17 = _v16.stamp
_x = _v17
buff.write(_get_struct_2I().pack(_x.secs, _x.nsecs))
_x = _v16.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_v18 = _v15.pose
_v19 = _v18.pose
_v20 = _v19.position
_x = _v20
buff.write(_get_struct_3d().pack(_x.x, _x.y, _x.z))
_v21 = _v19.orientation
_x = _v21
buff.write(_get_struct_4d().pack(_x.x, _x.y, _x.z, _x.w))
buff.write(_v18.covariance.tostring())
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.detections is None:
self.detections = None
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _get_struct_3I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
self.header.frame_id = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
self.detections = []
for i in range(0, length):
val1 = apriltag_ros.msg.AprilTagDetection()
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%si'%length
start = end
s = struct.Struct(pattern)
end += s.size
val1.id = numpy.frombuffer(str[start:end], dtype=numpy.int32, count=length)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
pattern = '<%sd'%length
start = end
s = struct.Struct(pattern)
end += s.size
val1.size = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=length)
_v22 = val1.pose
_v23 = _v22.header
start = end
end += 4
(_v23.seq,) = _get_struct_I().unpack(str[start:end])
_v24 = _v23.stamp
_x = _v24
start = end
end += 8
(_x.secs, _x.nsecs,) = _get_struct_2I().unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
_v23.frame_id = str[start:end].decode('utf-8', 'rosmsg')
else:
_v23.frame_id = str[start:end]
_v25 = _v22.pose
_v26 = _v25.pose
_v27 = _v26.position
_x = _v27
start = end
end += 24
(_x.x, _x.y, _x.z,) = _get_struct_3d().unpack(str[start:end])
_v28 = _v26.orientation
_x = _v28
start = end
end += 32
(_x.x, _x.y, _x.z, _x.w,) = _get_struct_4d().unpack(str[start:end])
start = end
end += 288
_v25.covariance = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=36)
self.detections.append(val1)
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2I = None
def _get_struct_2I():
global _struct_2I
if _struct_2I is None:
_struct_2I = struct.Struct("<2I")
return _struct_2I
_struct_36d = None
def _get_struct_36d():
global _struct_36d
if _struct_36d is None:
_struct_36d = struct.Struct("<36d")
return _struct_36d
_struct_3I = None
def _get_struct_3I():
global _struct_3I
if _struct_3I is None:
_struct_3I = struct.Struct("<3I")
return _struct_3I
_struct_3d = None
def _get_struct_3d():
global _struct_3d
if _struct_3d is None:
_struct_3d = struct.Struct("<3d")
return _struct_3d
_struct_4d = None
def _get_struct_4d():
global _struct_4d
if _struct_4d is None:
_struct_4d = struct.Struct("<4d")
return _struct_4d
| [
"[email protected]"
]
| |
255178d0466894ec672c8e0a84ade575a81e6df0 | 62050251587de5a816cb55deb20ad57be5362fa1 | /ws2812b_rmt_demo-2.py | e095922651b373cc1cc602ae80659e3748d439d7 | []
| no_license | rsp-esl/micropython_esp32_examples | 60832e7bc4a2e7864d60f96d9d523f754d7ea085 | 073222d467ae914838a0a475d07cb679f7a4ba01 | refs/heads/master | 2022-09-17T10:42:44.331333 | 2020-05-27T15:22:31 | 2020-05-27T15:22:31 | 257,871,864 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 955 | py | # File: rmt_ws2812b_demo-2.py
# Date: 2022-05-27
from machine import Pin
import utime as time
from ws2812b import WS2812B
WS2812B_PIN = Pin(27)
NUM_LEDS = 8
# create WS2812B object for n-pixel RGB LED strip
strip = WS2812B(pin=WS2812B_PIN,n=NUM_LEDS)
# define test colors
COLORS = [
0x3f << 16, 0x3f << 8, 0x3f,
0x3f3f00, 0x3f003f, 0x003f3f,
0x7f7f7f,(127,20,20) ]
try:
for i in range(NUM_LEDS):
strip[i] = COLORS[i]
strip.update()
time.sleep_ms(500)
# press Ctrl+C to terminate
while True: # main loop
for i in range(16):
if i < 8:
# rotate shift left
strip.shift_left()
else:
# rotate shift right
strip.shift_right()
strip.update()
time.sleep_ms(1000)
except KeyboardInterrupt:
print('Terminated...')
finally:
strip.clear()
strip.deinit() # release the RMT channel
| [
"[email protected]"
]
| |
c2aea491b89982df7749eb3e86a0983090c42777 | a1ee744a2b31bd8177589924c06dcb065fe16646 | /2022/22_Monkey_Map/main.py | acb8a9528fb940c80491625ac337d2f9a7374394 | []
| no_license | claytonjwong/advent-of-code | 7cdd381698b62276ca58a6ef684c950aeb315c53 | 234298a5b6d40ef17f48065b37cbfcca70e02513 | refs/heads/master | 2023-01-27T18:33:17.558144 | 2023-01-16T17:36:51 | 2023-01-16T17:37:07 | 161,686,749 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,437 | py | #
# https://adventofcode.com/2022/day/22
#
A, dirs = [], []
with open('input.txt') as input:
for line in input:
line = list(line[:-1]) # discard trailing newline
if not len(line):
continue
elif line[0] in [' ', '.', '#']:
A.append(line)
else:
steps = 0
for c in line:
if c.isdigit():
steps = 10 * steps + int(c)
else:
if steps:
dirs.append(steps); steps = 0
dirs.append(c)
M = len(A)
N = max(len(A[i]) for i in range(M))
for row in A:
pad = [' '] * (N - len(row))
row.extend(pad)
R, D, L, U = 0, 1, 2, 3 # right, down, left, up
class Walker:
def __init__(self):
self.i = 0
self.j = A[0].index('.')
self.d = R
def turn(self, d):
if self.d == R:
if d == 'L': self.d = U
if d == 'R': self.d = D
elif self.d == D:
if d == 'L': self.d = R
if d == 'R': self.d = L
elif self.d == L:
if d == 'L': self.d = D
if d == 'R': self.d = U
elif self.d == U:
if d == 'L': self.d = L
if d == 'R': self.d = R
def walk(self, steps):
di, dj = (0, 1) if self.d == R else (1, 0) if self.d == D else (0, -1) if self.d == L else (-1, 0) # right, down, left, up
while steps:
steps -= 1
u, v = (self.i + di, self.j + dj)
if self.d == R and not self.step_R(u, v): break
if self.d == D and not self.step_D(u, v): break
if self.d == L and not self.step_L(u, v): break
if self.d == U and not self.step_U(u, v): break
def step_R(self, u, v):
if 0 <= v < N and A[u][v] == '.': # step right
self.j = v
return True
if v == N or A[u][v] == ' ': # wrap-around left
v = 0
while A[u][v] == ' ':
v += 1
if A[u][v] == '.':
self.j = v
return True
return False
def step_D(self, u, v):
if 0 <= u < M and A[u][v] == '.': # step down
self.i = u
return True
if u == M or A[u][v] == ' ': # wrap-around up
u = 0
while A[u][v] == ' ':
u += 1
if A[u][v] == '.':
self.i = u
return True
return False
def step_L(self, u, v):
if 0 <= v < N and A[u][v] == '.': # step left
self.j = v
return True
if v < 0 or A[u][v] == ' ': # wrap-around right
v = N - 1
while A[u][v] == ' ':
v -= 1
if A[u][v] == '.':
self.j = v
return True
return False
def step_U(self, u, v):
if 0 <= u < M and A[u][v] == '.': # step up
self.i = u
return True
if u < 0 or A[u][v] == ' ': # wrap-around down
u = M - 1
while A[u][v] == ' ':
u -= 1
if A[u][v] == '.':
self.i = u
return True
return False
walker = Walker()
for x in dirs:
if type(x) == int:
walker.walk(x)
else:
walker.turn(x)
part1 = (walker.i + 1) * 1000 + (walker.j + 1) * 4 + walker.d
print(f'part 1: {part1}')
# part 1: 165094 | [
"[email protected]"
]
| |
34da2c9efaf65c6127540f5c1078643dff788197 | bc8b15fa1e22983995063d5ddf46d58dc0a13787 | /apps/login_registration/models.py | 24fb3de4849d164b02a7086bf3e0482e53857d1e | []
| no_license | mtjhartley/new_semi_restful_routes | d8aa4e95f900d1841c46461b05940f6e3fdb883b | 44e5b7053643302489b27a529f4593420aebe516 | refs/heads/master | 2022-12-10T07:30:19.023696 | 2017-06-26T22:35:12 | 2017-06-26T22:35:12 | 95,484,930 | 0 | 0 | null | 2022-12-07T23:58:08 | 2017-06-26T20:06:52 | JavaScript | UTF-8 | Python | false | false | 4,167 | py | from __future__ import unicode_literals
from django.db import models
import datetime
import re
import bcrypt
EMAIL_REGEX = re.compile(r'^[a-zA-Z0-9.+_-]+@[a-zA-Z0-9._-]+\.[a-zA-Z]+$')
# Create your models here.
class UserManager(models.Manager):
#takes request.POST, returns loginObject. If "user" in loginObject, can proceed. Else,
#model views will handle logic to redirect to login page and render messages.
def isValidLogin(self, userInfo):
loginObject = {
"errors": []
}
if User.objects.filter(email=userInfo['email']):
hashed = User.objects.get(email=userInfo['email']).password
hashed = hashed.encode('utf-8')
password = userInfo['password']
password = password.encode('utf-8')
if bcrypt.hashpw(password, hashed) == hashed:
user = User.objects.get(email=userInfo['email'])
loginObject['user'] = user
else:
loginObject['errors'].append("Unsuccessful login, incorrect password")
else:
loginObject['errors'].append("Unsuccessful login, email does not exist.")
return loginObject
def isValidRegistration(self, userInfo):
all_users = User.objects.all()
if all_users:
admin = False
else:
admin = True
registrationObject = {
"errors": []
}
validRegistration = True
if not userInfo['first_name'].isalpha():
registrationObject['errors'].append('First name contains non-alpha character(s).')
validRegistration = False
if len(userInfo['first_name']) < 2:
registrationObject['errors'].append('First name is less than 2 char.')
validRegistration = False
if not userInfo['last_name'].isalpha():
registrationObject['errors'].append('Last name contains non-alpha character(s).')
validRegistration = False
if len(userInfo['last_name']) < 2:
registrationObject['errors'].append('Last name is less than 2 char.')
validRegistration = False
if User.objects.filter(email=userInfo['email']):
registrationObject['errors'].append("Email is already registered.")
validRegistration = False
if not EMAIL_REGEX.match(userInfo['email']):
registrationObject['errors'].append('Email is not a valid Email!')
validRegistration = False
if len(userInfo['password']) < 7:
registrationObject['errors'].append('Password is too short. Must be at least 8 char.')
validRegistration = False
if userInfo['password'] != userInfo['confirm_password']:
registrationObject['errors'].append('Passwords do not match!')
validRegistration = False
if User.objects.filter(email=userInfo['email']):
registrationObject['errors'].append("This email already exists in our database.")
validRegistration = False
'''
now = datetime.datetime.now()
birthday = datetime.datetime.strptime(userInfo['birthday'], '%Y-%m-%d')
if birthday > now:
registrationObject['errors'].append("You can't be born in the future!")
validRegistration = False
'''
if validRegistration:
hashed = bcrypt.hashpw(userInfo['password'].encode(), bcrypt.gensalt())
new_user = User.objects.create(first_name = userInfo['first_name'], last_name = userInfo['last_name'], email=userInfo['email'], password=hashed, admin=admin)
registrationObject['user'] = new_user
return registrationObject
class User(models.Model):
first_name = models.CharField(max_length=255)
last_name = models.CharField(max_length=255)
email = models.CharField(max_length=255)
password = models.CharField(max_length=60)
#birthday = models.DateField(null=True, blank=True)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
admin = models.BooleanField(default=False)
objects = UserManager() | [
"[email protected]"
]
| |
de53da853cc6103cea2c395036310e4d7cea6c8b | af1a5e8245a34cb205216bc3e196045bb53f27d1 | /cottonformation/res/eventschemas.py | f9bdafc5e0c0c3a833bf51c8b24dd57bf16846c4 | [
"BSD-2-Clause"
]
| permissive | gitter-badger/cottonformation-project | b77dfca5679566fb23a63d94c0f56aebdd6f2508 | 354f1dce7ea106e209af2d5d818b6033a27c193c | refs/heads/main | 2023-06-02T05:51:51.804770 | 2021-06-27T02:52:39 | 2021-06-27T02:52:39 | 380,639,731 | 0 | 0 | BSD-2-Clause | 2021-06-27T03:08:21 | 2021-06-27T03:08:21 | null | UTF-8 | Python | false | false | 16,852 | py | # -*- coding: utf-8 -*-
"""
This module
"""
import attr
import typing
from ..core.model import (
Property, Resource, Tag, GetAtt, TypeHint, TypeCheck,
)
from ..core.constant import AttrMeta
#--- Property declaration ---
@attr.s
class SchemaTagsEntry(Property):
"""
AWS Object Type = "AWS::EventSchemas::Schema.TagsEntry"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-schema-tagsentry.html
Property Document:
- ``rp_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-schema-tagsentry.html#cfn-eventschemas-schema-tagsentry-key
- ``rp_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-schema-tagsentry.html#cfn-eventschemas-schema-tagsentry-value
"""
AWS_OBJECT_TYPE = "AWS::EventSchemas::Schema.TagsEntry"
rp_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-schema-tagsentry.html#cfn-eventschemas-schema-tagsentry-key"""
rp_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-schema-tagsentry.html#cfn-eventschemas-schema-tagsentry-value"""
@attr.s
class DiscovererTagsEntry(Property):
"""
AWS Object Type = "AWS::EventSchemas::Discoverer.TagsEntry"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-discoverer-tagsentry.html
Property Document:
- ``rp_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-discoverer-tagsentry.html#cfn-eventschemas-discoverer-tagsentry-key
- ``rp_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-discoverer-tagsentry.html#cfn-eventschemas-discoverer-tagsentry-value
"""
AWS_OBJECT_TYPE = "AWS::EventSchemas::Discoverer.TagsEntry"
rp_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-discoverer-tagsentry.html#cfn-eventschemas-discoverer-tagsentry-key"""
rp_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-discoverer-tagsentry.html#cfn-eventschemas-discoverer-tagsentry-value"""
@attr.s
class RegistryTagsEntry(Property):
"""
AWS Object Type = "AWS::EventSchemas::Registry.TagsEntry"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-registry-tagsentry.html
Property Document:
- ``rp_Key``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-registry-tagsentry.html#cfn-eventschemas-registry-tagsentry-key
- ``rp_Value``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-registry-tagsentry.html#cfn-eventschemas-registry-tagsentry-value
"""
AWS_OBJECT_TYPE = "AWS::EventSchemas::Registry.TagsEntry"
rp_Key: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Key"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-registry-tagsentry.html#cfn-eventschemas-registry-tagsentry-key"""
rp_Value: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Value"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-eventschemas-registry-tagsentry.html#cfn-eventschemas-registry-tagsentry-value"""
#--- Resource declaration ---
@attr.s
class Discoverer(Resource):
"""
AWS Object Type = "AWS::EventSchemas::Discoverer"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html
Property Document:
- ``rp_SourceArn``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html#cfn-eventschemas-discoverer-sourcearn
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html#cfn-eventschemas-discoverer-description
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html#cfn-eventschemas-discoverer-tags
"""
AWS_OBJECT_TYPE = "AWS::EventSchemas::Discoverer"
rp_SourceArn: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "SourceArn"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html#cfn-eventschemas-discoverer-sourcearn"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html#cfn-eventschemas-discoverer-description"""
p_Tags: typing.List[typing.Union['DiscovererTagsEntry', dict]] = attr.ib(
default=None,
converter=DiscovererTagsEntry.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(DiscovererTagsEntry), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html#cfn-eventschemas-discoverer-tags"""
@property
def rv_DiscovererArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html#aws-resource-eventschemas-discoverer-return-values"""
return GetAtt(resource=self, attr_name="DiscovererArn")
@property
def rv_DiscovererId(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-discoverer.html#aws-resource-eventschemas-discoverer-return-values"""
return GetAtt(resource=self, attr_name="DiscovererId")
@attr.s
class RegistryPolicy(Resource):
"""
AWS Object Type = "AWS::EventSchemas::RegistryPolicy"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registrypolicy.html
Property Document:
- ``rp_Policy``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registrypolicy.html#cfn-eventschemas-registrypolicy-policy
- ``rp_RegistryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registrypolicy.html#cfn-eventschemas-registrypolicy-registryname
- ``p_RevisionId``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registrypolicy.html#cfn-eventschemas-registrypolicy-revisionid
"""
AWS_OBJECT_TYPE = "AWS::EventSchemas::RegistryPolicy"
rp_Policy: dict = attr.ib(
default=None,
validator=attr.validators.instance_of(dict),
metadata={AttrMeta.PROPERTY_NAME: "Policy"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registrypolicy.html#cfn-eventschemas-registrypolicy-policy"""
rp_RegistryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RegistryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registrypolicy.html#cfn-eventschemas-registrypolicy-registryname"""
p_RevisionId: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RevisionId"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registrypolicy.html#cfn-eventschemas-registrypolicy-revisionid"""
@property
def rv_Id(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registrypolicy.html#aws-resource-eventschemas-registrypolicy-return-values"""
return GetAtt(resource=self, attr_name="Id")
@attr.s
class Schema(Resource):
"""
AWS Object Type = "AWS::EventSchemas::Schema"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html
Property Document:
- ``rp_Content``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-content
- ``rp_RegistryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-registryname
- ``rp_Type``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-type
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-description
- ``p_SchemaName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-schemaname
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-tags
"""
AWS_OBJECT_TYPE = "AWS::EventSchemas::Schema"
rp_Content: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Content"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-content"""
rp_RegistryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "RegistryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-registryname"""
rp_Type: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.instance_of(TypeCheck.intrinsic_str_type),
metadata={AttrMeta.PROPERTY_NAME: "Type"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-type"""
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-description"""
p_SchemaName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "SchemaName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-schemaname"""
p_Tags: typing.List[typing.Union['SchemaTagsEntry', dict]] = attr.ib(
default=None,
converter=SchemaTagsEntry.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(SchemaTagsEntry), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#cfn-eventschemas-schema-tags"""
@property
def rv_SchemaVersion(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#aws-resource-eventschemas-schema-return-values"""
return GetAtt(resource=self, attr_name="SchemaVersion")
@property
def rv_SchemaArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#aws-resource-eventschemas-schema-return-values"""
return GetAtt(resource=self, attr_name="SchemaArn")
@property
def rv_SchemaName(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-schema.html#aws-resource-eventschemas-schema-return-values"""
return GetAtt(resource=self, attr_name="SchemaName")
@attr.s
class Registry(Resource):
"""
AWS Object Type = "AWS::EventSchemas::Registry"
Resource Document: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html
Property Document:
- ``p_Description``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html#cfn-eventschemas-registry-description
- ``p_RegistryName``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html#cfn-eventschemas-registry-registryname
- ``p_Tags``: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html#cfn-eventschemas-registry-tags
"""
AWS_OBJECT_TYPE = "AWS::EventSchemas::Registry"
p_Description: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "Description"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html#cfn-eventschemas-registry-description"""
p_RegistryName: TypeHint.intrinsic_str = attr.ib(
default=None,
validator=attr.validators.optional(attr.validators.instance_of(TypeCheck.intrinsic_str_type)),
metadata={AttrMeta.PROPERTY_NAME: "RegistryName"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html#cfn-eventschemas-registry-registryname"""
p_Tags: typing.List[typing.Union['RegistryTagsEntry', dict]] = attr.ib(
default=None,
converter=RegistryTagsEntry.from_list,
validator=attr.validators.optional(attr.validators.deep_iterable(member_validator=attr.validators.instance_of(RegistryTagsEntry), iterable_validator=attr.validators.instance_of(list))),
metadata={AttrMeta.PROPERTY_NAME: "Tags"},
)
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html#cfn-eventschemas-registry-tags"""
@property
def rv_RegistryName(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html#aws-resource-eventschemas-registry-return-values"""
return GetAtt(resource=self, attr_name="RegistryName")
@property
def rv_RegistryArn(self) -> GetAtt:
"""Doc: http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-resource-eventschemas-registry.html#aws-resource-eventschemas-registry-return-values"""
return GetAtt(resource=self, attr_name="RegistryArn")
| [
"[email protected]"
]
| |
fabe74c1ac07365d3462e69f8781152f04bd89d2 | 777b281b8a13eb33276023cb1fddbc41be55950f | /lib/dataset/mpii.py | f85aa17676e7d37c6b3403b647b8122ee5df71df | [
"MIT"
]
| permissive | CrystalSixone/DSPNet | af0980908f5e0008b4b5499bc3092bddce4a25d5 | e82cc1938af65234471b6a139a8ac51f22de32a6 | refs/heads/main | 2023-01-03T21:59:14.891648 | 2020-10-24T02:20:21 | 2020-10-24T02:20:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,734 | py | # ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao ([email protected])
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import os
import json_tricks as json
from collections import OrderedDict
import numpy as np
from scipy.io import loadmat, savemat
from dataset.JointsDataset import JointsDataset
logger = logging.getLogger(__name__)
class MPIIDataset(JointsDataset):
def __init__(self, cfg, root, image_set, is_train, transform=None):
super().__init__(cfg, root, image_set, is_train, transform)
self.num_joints = 16
self.flip_pairs = [[0, 5], [1, 4], [2, 3], [10, 15], [11, 14], [12, 13]]
self.parent_ids = [1, 2, 6, 6, 3, 4, 6, 6, 7, 8, 11, 12, 7, 7, 13, 14]
self.upper_body_ids = (7, 8, 9, 10, 11, 12, 13, 14, 15)
self.lower_body_ids = (0, 1, 2, 3, 4, 5, 6)
self.db = self._get_db()
if is_train and cfg.DATASET.SELECT_DATA:
self.db = self.select_data(self.db)
logger.info('=> load {} samples'.format(len(self.db)))
def _get_db(self):
file_name = os.path.join(self.root,'annot',self.image_set+'.json')
with open(file_name) as anno_file:
anno = json.load(anno_file)
gt_db = []
for a in anno:
image_name = a['image']
c = np.array(a['center'], dtype=np.float)
s = np.array([a['scale'], a['scale']], dtype=np.float)
# Adjust center/scale slightly to avoid cropping limbs
if c[0] != -1:
c[1] = c[1] + 15 * s[1]
s = s * 1.25
# MPII uses matlab format, index is based 1,
# we should first convert to 0-based index
c = c - 1
joints_3d = np.zeros((self.num_joints, 3), dtype=np.float)
joints_3d_vis = np.zeros((self.num_joints, 3), dtype=np.float)
if self.image_set != 'test':
joints = np.array(a['joints'])
joints[:, 0:2] = joints[:, 0:2] - 1
joints_vis = np.array(a['joints_vis'])
assert len(joints) == self.num_joints, \
'joint num diff: {} vs {}'.format(len(joints),
self.num_joints)
joints_3d[:, 0:2] = joints[:, 0:2]
joints_3d_vis[:, 0] = joints_vis[:]
joints_3d_vis[:, 1] = joints_vis[:]
image_dir = 'images.zip@' if self.data_format == 'zip' else 'images'
gt_db.append(
{
'image': os.path.join(self.root, image_dir, image_name),
'center': c,
'scale': s,
'joints_3d': joints_3d,
'joints_3d_vis': joints_3d_vis,
'filename': '',
'imgnum': 0,
}
)
return gt_db
def evaluate(self, cfg, preds, output_dir, *args, **kwargs):
# convert 0-based index to 1-based index
preds = preds[:, :, 0:2] + 1.0
if output_dir:
pred_file = os.path.join(output_dir, 'pred.mat')
savemat(pred_file, mdict={'preds': preds})
if 'test' in cfg.DATASET.TEST_SET:
return {'Null': 0.0}, 0.0
SC_BIAS = 0.6
threshold = 0.5
gt_file = os.path.join(cfg.DATASET.ROOT,
'annot',
'gt_{}.mat'.format(cfg.DATASET.TEST_SET))
gt_dict = loadmat(gt_file)
dataset_joints = gt_dict['dataset_joints']
jnt_missing = gt_dict['jnt_missing']
pos_gt_src = gt_dict['pos_gt_src']
headboxes_src = gt_dict['headboxes_src']
pos_pred_src = np.transpose(preds, [1, 2, 0])
head = np.where(dataset_joints == 'head')[1][0]
lsho = np.where(dataset_joints == 'lsho')[1][0]
lelb = np.where(dataset_joints == 'lelb')[1][0]
lwri = np.where(dataset_joints == 'lwri')[1][0]
lhip = np.where(dataset_joints == 'lhip')[1][0]
lkne = np.where(dataset_joints == 'lkne')[1][0]
lank = np.where(dataset_joints == 'lank')[1][0]
rsho = np.where(dataset_joints == 'rsho')[1][0]
relb = np.where(dataset_joints == 'relb')[1][0]
rwri = np.where(dataset_joints == 'rwri')[1][0]
rkne = np.where(dataset_joints == 'rkne')[1][0]
rank = np.where(dataset_joints == 'rank')[1][0]
rhip = np.where(dataset_joints == 'rhip')[1][0]
jnt_visible = 1 - jnt_missing
uv_error = pos_pred_src - pos_gt_src
uv_err = np.linalg.norm(uv_error, axis=1)
headsizes = headboxes_src[1, :, :] - headboxes_src[0, :, :]
headsizes = np.linalg.norm(headsizes, axis=0)
headsizes *= SC_BIAS
scale = np.multiply(headsizes, np.ones((len(uv_err), 1)))
scaled_uv_err = np.divide(uv_err, scale)
scaled_uv_err = np.multiply(scaled_uv_err, jnt_visible)
jnt_count = np.sum(jnt_visible, axis=1)
less_than_threshold = np.multiply((scaled_uv_err <= threshold),
jnt_visible)
PCKh = np.divide(100.*np.sum(less_than_threshold, axis=1), jnt_count)
# save
rng = np.arange(0, 0.5+0.01, 0.01)
pckAll = np.zeros((len(rng), 16))
for r in range(len(rng)):
threshold = rng[r]
less_than_threshold = np.multiply(scaled_uv_err <= threshold,
jnt_visible)
pckAll[r, :] = np.divide(100.*np.sum(less_than_threshold, axis=1),
jnt_count)
PCKh = np.ma.array(PCKh, mask=False)
PCKh.mask[6:8] = True
jnt_count = np.ma.array(jnt_count, mask=False)
jnt_count.mask[6:8] = True
jnt_ratio = jnt_count / np.sum(jnt_count).astype(np.float64)
name_value = [
('Head', PCKh[head]),
('Shoulder', 0.5 * (PCKh[lsho] + PCKh[rsho])),
('Elbow', 0.5 * (PCKh[lelb] + PCKh[relb])),
('Wrist', 0.5 * (PCKh[lwri] + PCKh[rwri])),
('Hip', 0.5 * (PCKh[lhip] + PCKh[rhip])),
('Knee', 0.5 * (PCKh[lkne] + PCKh[rkne])),
('Ankle', 0.5 * (PCKh[lank] + PCKh[rank])),
('Mean', np.sum(PCKh * jnt_ratio)),
('[email protected]', np.sum(pckAll[11, :] * jnt_ratio))
]
name_value = OrderedDict(name_value)
return name_value, name_value['Mean']
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.